diff --git a/doc/conf.py b/doc/conf.py index 32a712446b..043d477612 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,5 +1,3 @@ -# emacs: -*- coding: utf-8; mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- -# vi: set fileencoding=utf-8 ft=python sts=4 ts=4 sw=4 et: # # nipype documentation build configuration file, created by # sphinx-quickstart on Mon Jul 20 12:30:18 2009. @@ -151,8 +149,8 @@ master_doc = "index" # General information about the project. -project = u"nipype" -copyright = u"2009-21, Neuroimaging in Python team" +project = "nipype" +copyright = "2009-21, Neuroimaging in Python team" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/nipype/__init__.py b/nipype/__init__.py index 09728b62d1..22e6f97c2d 100644 --- a/nipype/__init__.py +++ b/nipype/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -26,14 +25,14 @@ import faulthandler faulthandler.enable() -except (ImportError, IOError) as e: +except (ImportError, OSError) as e: pass config = NipypeConfig() logging = Logging(config) -class NipypeTester(object): +class NipypeTester: def __call__(self, doctests=True, parallel=False): try: import pytest diff --git a/nipype/algorithms/__init__.py b/nipype/algorithms/__init__.py index e34dc850ab..a701f6fe59 100644 --- a/nipype/algorithms/__init__.py +++ b/nipype/algorithms/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 763c7020bb..c33b7d3abe 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -150,7 +149,7 @@ class ComputeDVARS(BaseInterface): def __init__(self, **inputs): self._results = {} - super(ComputeDVARS, self).__init__(**inputs) + super().__init__(**inputs) def _gen_fname(self, suffix, ext=None): fname, in_ext = op.splitext(op.basename(self.inputs.in_file)) @@ -165,7 +164,7 @@ def _gen_fname(self, suffix, ext=None): if ext.startswith("."): ext = ext[1:] - return op.abspath("{}_{}.{}".format(fname, suffix, ext)) + return op.abspath(f"{fname}_{suffix}.{ext}") def _run_interface(self, runtime): dvars = compute_dvars( @@ -584,7 +583,7 @@ class CompCor(SimpleInterface): def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" - super(CompCor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._header = "CompCor" def _run_interface(self, runtime): @@ -713,7 +712,7 @@ def _run_interface(self, runtime): self.inputs.pre_filter ] ncols = filter_basis.shape[1] if filter_basis.size > 0 else 0 - header = ["{}{:02d}".format(ftype, i) for i in range(ncols)] + header = [f"{ftype}{i:02d}" for i in range(ncols)] if skip_vols: old_basis = filter_basis # nrows defined above @@ -724,7 +723,7 @@ def _run_interface(self, runtime): filter_basis[skip_vols:, :ncols] = old_basis filter_basis[:skip_vols, -skip_vols:] = np.eye(skip_vols) header.extend( - ["NonSteadyStateOutlier{:02d}".format(i) for i in range(skip_vols)] + [f"NonSteadyStateOutlier{i:02d}" for i in range(skip_vols)] ) np.savetxt( self._results["pre_filter_file"], @@ -747,7 +746,7 @@ def _run_interface(self, runtime): not_retained = np.where(np.logical_not(metadata["retained"])) components_names[retained] = components_header components_names[not_retained] = [ - "dropped{}".format(i) for i in range(len(not_retained[0])) + f"dropped{i}" for i in range(len(not_retained[0])) ] with open(self._results["metadata_file"], "w") as f: f.write("\t".join(["component"] + list(metadata.keys())) + "\n") @@ -768,7 +767,7 @@ def _make_headers(self, num_col): if isdefined(self.inputs.header_prefix) else self._header ) - headers = ["{}{:02d}".format(header, i) for i in range(num_col)] + headers = [f"{header}{i:02d}" for i in range(num_col)] return headers @@ -781,7 +780,7 @@ class ACompCor(CompCor): def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" - super(ACompCor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._header = "aCompCor" @@ -807,7 +806,7 @@ class TCompCorInputSpec(CompCorInputSpec): class TCompCorOutputSpec(CompCorOutputSpec): # and all the fields in CompCorOutputSpec high_variance_masks = OutputMultiPath( - File(exists=True), desc=(("voxels exceeding the variance" " threshold")) + File(exists=True), desc=("voxels exceeding the variance" " threshold") ) @@ -832,7 +831,7 @@ class TCompCor(CompCor): def __init__(self, *args, **kwargs): """exactly the same as compcor except the header""" - super(TCompCor, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._header = "tCompCor" self._mask_files = [] @@ -854,7 +853,7 @@ def _process_masks(self, mask_images, timeseries=None): out_image = nb.Nifti1Image(mask_data, affine=img.affine, header=img.header) # save mask - mask_file = os.path.abspath("mask_{:03d}.nii.gz".format(i)) + mask_file = os.path.abspath(f"mask_{i:03d}.nii.gz") out_image.to_filename(mask_file) IFLOGGER.debug( "tCompcor computed and saved mask of shape %s to " "mask_file %s", @@ -866,7 +865,7 @@ def _process_masks(self, mask_images, timeseries=None): return out_images def _list_outputs(self): - outputs = super(TCompCor, self)._list_outputs() + outputs = super()._list_outputs() outputs["high_variance_masks"] = self._mask_files return outputs @@ -1136,7 +1135,7 @@ def plot_confound(tseries, figsize, name, units=None, series_tr=None, normalize= xlabel = "Frame #" if series_tr is not None: - xlabel = "Frame # ({} sec TR)".format(series_tr) + xlabel = f"Frame # ({series_tr} sec TR)" ax.set_xlabel(xlabel) ylim = ax.get_ylim() @@ -1280,17 +1279,15 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): mask_index = 0 else: raise ValueError( - ( - "When more than one mask file is provided, " - "one of merge_method or mask_index must be " - "set" - ) + "When more than one mask file is provided, " + "one of merge_method or mask_index must be " + "set" ) if mask_index < len(mask_files): mask = nb.load(mask_files[mask_index]) return [mask] raise ValueError( - ("mask_index {0} must be less than number of mask " "files {1}").format( + ("mask_index {} must be less than number of mask " "files {}").format( mask_index, len(mask_files) ) ) diff --git a/nipype/algorithms/icc.py b/nipype/algorithms/icc.py index 8e5c6b150c..2ea5f43d87 100644 --- a/nipype/algorithms/icc.py +++ b/nipype/algorithms/icc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from functools import lru_cache import numpy as np diff --git a/nipype/algorithms/mesh.py b/nipype/algorithms/mesh.py index 188cc3ec7c..d5a3b8a5b7 100644 --- a/nipype/algorithms/mesh.py +++ b/nipype/algorithms/mesh.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -30,7 +29,7 @@ class TVTKBaseInterface(BaseInterface): def __init__(self, **inputs): if VTKInfo.no_tvtk(): raise ImportError("This interface requires tvtk to run.") - super(TVTKBaseInterface, self).__init__(**inputs) + super().__init__(**inputs) class WarpPointsInputSpec(BaseInterfaceInputSpec): @@ -92,7 +91,7 @@ def _gen_fname(self, in_file, suffix="generated", ext=None): if ext[0] == ".": ext = ext[1:] - return op.abspath("%s_%s.%s" % (fname, suffix, ext)) + return op.abspath(f"{fname}_{suffix}.{ext}") def _run_interface(self, runtime): import nibabel as nb @@ -423,7 +422,7 @@ class P2PDistance(ComputeMeshWarp): """ def __init__(self, **inputs): - super(P2PDistance, self).__init__(**inputs) + super().__init__(**inputs) IFLOGGER.warning( "This interface has been deprecated since 1.0, please " "use ComputeMeshWarp" diff --git a/nipype/algorithms/metrics.py b/nipype/algorithms/metrics.py index 155598bcd3..1cbcce9409 100644 --- a/nipype/algorithms/metrics.py +++ b/nipype/algorithms/metrics.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 839696144e..0c715a8c77 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous algorithms.""" @@ -494,7 +493,7 @@ def merge_csvs(in_list): try: in_array = np.loadtxt(in_file, delimiter=",", skiprows=1) except ValueError: - with open(in_file, "r") as first: + with open(in_file) as first: header_line = first.readline() header_list = header_line.split(",") @@ -671,7 +670,7 @@ def _run_interface(self, runtime): iflogger.info( 'Row headings have been provided. Adding "labels"' "column header." ) - prefix = '"{p}","'.format(p=self.inputs.row_heading_title) + prefix = f'"{self.inputs.row_heading_title}","' csv_headings = prefix + '","'.join(itertools.chain(headings)) + '"\n' rowheadingsBool = True else: @@ -772,7 +771,7 @@ class AddCSVColumn(BaseInterface): output_spec = AddCSVColumnOutputSpec def _run_interface(self, runtime): - in_file = open(self.inputs.in_file, "r") + in_file = open(self.inputs.in_file) _, name, ext = split_filename(self.inputs.out_file) if not ext == ".csv": ext = ".csv" @@ -808,12 +807,12 @@ class AddCSVRowInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): - super(AddCSVRowInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(AddCSVRowInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class AddCSVRowOutputSpec(TraitedSpec): @@ -850,7 +849,7 @@ class AddCSVRow(BaseInterface): output_spec = AddCSVRowOutputSpec def __init__(self, infields=None, force_run=True, **kwargs): - super(AddCSVRow, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} self._infields = infields self._have_lock = False @@ -882,10 +881,8 @@ def _run_interface(self, runtime): from warnings import warn warn( - ( - "Python module filelock was not found: AddCSVRow will not be" - " thread-safe in multi-processor execution" - ) + "Python module filelock was not found: AddCSVRow will not be" + " thread-safe in multi-processor execution" ) input_dict = {} @@ -926,7 +923,7 @@ def _list_outputs(self): return outputs def _outputs(self): - return self._add_output_traits(super(AddCSVRow, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return base @@ -1070,7 +1067,7 @@ def _run_interface(self, runtime): def _gen_output_filename(self): if not isdefined(self.inputs.out_file): _, base, ext = split_filename(self.inputs.in_file) - out_file = os.path.abspath("%s_SNR%03.2f%s" % (base, self.inputs.snr, ext)) + out_file = os.path.abspath(f"{base}_SNR{self.inputs.snr:03.2f}{ext}") else: out_file = self.inputs.out_file @@ -1121,7 +1118,7 @@ def gen_noise(self, image, mask=None, snr_db=10.0, dist="normal", bg_dist="norma im_noise = np.sqrt((image + stde_1) ** 2 + (stde_2) ** 2) else: raise NotImplementedError( - ("Only normal and rician distributions " "are supported") + "Only normal and rician distributions " "are supported" ) return im_noise @@ -1547,7 +1544,7 @@ class CalculateMedian(BaseInterface): output_spec = CalculateMedianOutputSpec def __init__(self, *args, **kwargs): - super(CalculateMedian, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self._median_files = [] def _gen_fname(self, suffix, idx=None, ext=None): @@ -1569,10 +1566,10 @@ def _gen_fname(self, suffix, idx=None, ext=None): if self.inputs.median_file: outname = self.inputs.median_file else: - outname = "{}_{}".format(fname, suffix) + outname = f"{fname}_{suffix}" if idx: outname += str(idx) - return op.abspath("{}.{}".format(outname, ext)) + return op.abspath(f"{outname}.{ext}") def _run_interface(self, runtime): total = None diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index afd6841c59..4ab54ef7e9 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -167,7 +166,7 @@ def bids_gen_info( condition_column = "_trial_type" for i in events: i.update({condition_column: "ev0"}) - conditions = sorted(set([i[condition_column] for i in events])) + conditions = sorted({i[condition_column] for i in events}) runinfo = Bunch(conditions=[], onsets=[], durations=[], amplitudes=[]) for condition in conditions: selected_events = [i for i in events if i[condition_column] == condition] @@ -488,7 +487,7 @@ def _generate_design(self, infolist=None): for filename in self.inputs.outlier_files: try: outindices = np.loadtxt(filename, dtype=int) - except IOError: + except OSError: outliers.append([]) else: if outindices.size == 1: @@ -616,8 +615,8 @@ def _concatenate_info(self, infolist): else: raise ValueError( "Mismatch in number of onsets and \ - durations for run {0}, condition \ - {1}".format( + durations for run {}, condition \ + {}".format( i + 2, j + 1 ) ) @@ -651,7 +650,7 @@ def _generate_design(self, infolist=None): not isdefined(self.inputs.concatenate_runs) or not self.inputs.concatenate_runs ): - super(SpecifySPMModel, self)._generate_design(infolist=infolist) + super()._generate_design(infolist=infolist) return if isdefined(self.inputs.subject_info): @@ -682,7 +681,7 @@ def _generate_design(self, infolist=None): for i, filename in enumerate(self.inputs.outlier_files): try: out = np.loadtxt(filename) - except IOError: + except OSError: iflogger.warning("Error reading outliers file %s", filename) out = np.array([]) @@ -789,7 +788,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): dt = TA / 10.0 durations = np.round(np.array(i_durations) * 1000) if len(durations) == 1: - durations = durations * np.ones((len(i_onsets))) + durations = durations * np.ones(len(i_onsets)) onsets = np.round(np.array(i_onsets) * 1000) dttemp = math.gcd(TA, math.gcd(SILENCE, TR)) if dt < dttemp: @@ -801,8 +800,8 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): iflogger.info("Setting dt = %d ms\n", dt) npts = int(np.ceil(total_time / dt)) times = np.arange(0, total_time, dt) * 1e-3 - timeline = np.zeros((npts)) - timeline2 = np.zeros((npts)) + timeline = np.zeros(npts) + timeline2 = np.zeros(npts) if isdefined(self.inputs.model_hrf) and self.inputs.model_hrf: hrf = spm_hrf(dt * 1e-3) reg_scale = 1.0 @@ -839,7 +838,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): if not self.inputs.stimuli_as_impulses: if durations[i] == 0: durations[i] = TA * nvol - stimdur = np.ones((int(durations[i] / dt))) + stimdur = np.ones(int(durations[i] / dt)) timeline2 = np.convolve(timeline2, stimdur)[0 : len(timeline2)] timeline += timeline2 timeline2[:] = 0 @@ -866,7 +865,7 @@ def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans): ): plt.plot(times, timederiv) # sample timeline - timeline2 = np.zeros((npts)) + timeline2 = np.zeros(npts) reg = [] regderiv = [] for i, trial in enumerate(np.arange(nscans) / nvol): @@ -977,7 +976,7 @@ def _generate_design(self, infolist=None): else: infolist = gen_info(self.inputs.event_files) sparselist = self._generate_clustered_design(infolist) - super(SpecifySparseModel, self)._generate_design(infolist=sparselist) + super()._generate_design(infolist=sparselist) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/algorithms/rapidart.py b/nipype/algorithms/rapidart.py index e764dc9243..3c3a481d11 100644 --- a/nipype/algorithms/rapidart.py +++ b/nipype/algorithms/rapidart.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -378,7 +377,7 @@ class ArtifactDetect(BaseInterface): output_spec = ArtifactDetectOutputSpec def __init__(self, **inputs): - super(ArtifactDetect, self).__init__(**inputs) + super().__init__(**inputs) def _get_output_filenames(self, motionfile, output_dir): """Generate output files based on motion filenames diff --git a/nipype/algorithms/stats.py b/nipype/algorithms/stats.py index 29ce8d6be4..9fadd6fcf3 100644 --- a/nipype/algorithms/stats.py +++ b/nipype/algorithms/stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/algorithms/tests/__init__.py b/nipype/algorithms/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/algorithms/tests/__init__.py +++ b/nipype/algorithms/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_CompCor.py b/nipype/algorithms/tests/test_CompCor.py index 51b1ea60f7..60e1ba34b2 100644 --- a/nipype/algorithms/tests/test_CompCor.py +++ b/nipype/algorithms/tests/test_CompCor.py @@ -261,7 +261,7 @@ def run_cc( assert os.path.exists(expected_file) assert os.path.getsize(expected_file) > 0 - with open(ccresult.outputs.components_file, "r") as components_file: + with open(ccresult.outputs.components_file) as components_file: header = components_file.readline().rstrip().split("\t") components_data = np.loadtxt(components_file, delimiter="\t") @@ -283,7 +283,7 @@ def run_cc( assert os.path.exists(expected_metadata_file) assert os.path.getsize(expected_metadata_file) > 0 - with open(ccresult.outputs.metadata_file, "r") as metadata_file: + with open(ccresult.outputs.metadata_file) as metadata_file: components_metadata = [ line.rstrip().split("\t") for line in metadata_file ] diff --git a/nipype/algorithms/tests/test_ErrorMap.py b/nipype/algorithms/tests/test_ErrorMap.py index faae860a5a..c1b3f1cac8 100644 --- a/nipype/algorithms/tests/test_ErrorMap.py +++ b/nipype/algorithms/tests/test_ErrorMap.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- import pytest from nipype.testing import example_data diff --git a/nipype/algorithms/tests/test_Overlap.py b/nipype/algorithms/tests/test_Overlap.py index ea3b5a3f5d..ee3900e82e 100644 --- a/nipype/algorithms/tests/test_Overlap.py +++ b/nipype/algorithms/tests/test_Overlap.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_confounds.py b/nipype/algorithms/tests/test_confounds.py index 29f18c9221..9698b6d2c1 100644 --- a/nipype/algorithms/tests/test_confounds.py +++ b/nipype/algorithms/tests/test_confounds.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- import os import pytest diff --git a/nipype/algorithms/tests/test_icc_anova.py b/nipype/algorithms/tests/test_icc_anova.py index bd0fe3525b..34e8c5c3e9 100644 --- a/nipype/algorithms/tests/test_icc_anova.py +++ b/nipype/algorithms/tests/test_icc_anova.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import numpy as np from nipype.algorithms.icc import ICC_rep_anova diff --git a/nipype/algorithms/tests/test_mesh_ops.py b/nipype/algorithms/tests/test_mesh_ops.py index 8be59e08c0..484925ddbb 100644 --- a/nipype/algorithms/tests/test_mesh_ops.py +++ b/nipype/algorithms/tests/test_mesh_ops.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_modelgen.py b/nipype/algorithms/tests/test_modelgen.py index 5931fd894e..9bc0e8376d 100644 --- a/nipype/algorithms/tests/test_modelgen.py +++ b/nipype/algorithms/tests/test_modelgen.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_moments.py b/nipype/algorithms/tests/test_moments.py index 91e6313193..6fe60c4e61 100644 --- a/nipype/algorithms/tests/test_moments.py +++ b/nipype/algorithms/tests/test_moments.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import numpy as np from nipype.algorithms.misc import calc_moments diff --git a/nipype/algorithms/tests/test_normalize_tpms.py b/nipype/algorithms/tests/test_normalize_tpms.py index 99aa5950ae..5f3f2456ca 100644 --- a/nipype/algorithms/tests/test_normalize_tpms.py +++ b/nipype/algorithms/tests/test_normalize_tpms.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/algorithms/tests/test_rapidart.py b/nipype/algorithms/tests/test_rapidart.py index fdf0716805..322d32ad2e 100644 --- a/nipype/algorithms/tests/test_rapidart.py +++ b/nipype/algorithms/tests/test_rapidart.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import numpy as np diff --git a/nipype/algorithms/tests/test_splitmerge.py b/nipype/algorithms/tests/test_splitmerge.py index 3060ef0611..5a8f098c18 100644 --- a/nipype/algorithms/tests/test_splitmerge.py +++ b/nipype/algorithms/tests/test_splitmerge.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- from nipype.testing import example_data diff --git a/nipype/algorithms/tests/test_stats.py b/nipype/algorithms/tests/test_stats.py index 752fadf307..ed698d47cf 100644 --- a/nipype/algorithms/tests/test_stats.py +++ b/nipype/algorithms/tests/test_stats.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -10,7 +9,7 @@ def test_ActivationCount(tmpdir): tmpdir.chdir() - in_files = ["{:d}.nii".format(i) for i in range(3)] + in_files = [f"{i:d}.nii" for i in range(3)] for fname in in_files: nb.Nifti1Image(np.random.normal(size=(5, 5, 5)), np.eye(4)).to_filename(fname) @@ -32,7 +31,7 @@ def test_ActivationCount(tmpdir): ) def test_ActivationCount_normaldistr(tmpdir, threshold, above_thresh): tmpdir.chdir() - in_files = ["{:d}.nii".format(i) for i in range(3)] + in_files = [f"{i:d}.nii" for i in range(3)] for fname in in_files: nb.Nifti1Image(np.random.normal(size=(100, 100, 100)), np.eye(4)).to_filename( fname diff --git a/nipype/caching/__init__.py b/nipype/caching/__init__.py index 1e99ed4428..db0261ebea 100644 --- a/nipype/caching/__init__.py +++ b/nipype/caching/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- from .memory import Memory diff --git a/nipype/caching/memory.py b/nipype/caching/memory.py index d2946710f1..c941b48b41 100644 --- a/nipype/caching/memory.py +++ b/nipype/caching/memory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Using nipype with persistence and lazy recomputation but without explicit name-steps pipeline: getting back scope in command-line based programming. @@ -18,7 +17,7 @@ # PipeFunc object: callable interface to nipype.interface objects -class PipeFunc(object): +class PipeFunc: """Callable interface to nipype.interface objects Use this to wrap nipype.interface object and call them @@ -53,7 +52,7 @@ def __init__(self, interface, base_dir, callback=None): if not os.path.exists(base_dir) and os.path.isdir(base_dir): raise ValueError("base_dir should be an existing directory") self.base_dir = base_dir - doc = "%s\n%s" % (self.interface.__doc__, self.interface.help(returnhelp=True)) + doc = f"{self.interface.__doc__}\n{self.interface.help(returnhelp=True)}" self.__doc__ = doc self.callback = callback @@ -66,7 +65,7 @@ def __call__(self, **kwargs): inputs = interface.inputs.get_hashval() hasher = hashlib.new("md5") hasher.update(pickle.dumps(inputs)) - dir_name = "%s-%s" % ( + dir_name = "{}-{}".format( interface.__class__.__module__.replace(".", "-"), interface.__class__.__name__, ) @@ -103,7 +102,7 @@ def read_log(filename, run_dict=None): if run_dict is None: run_dict = dict() - with open(filename, "r") as logfile: + with open(filename) as logfile: for line in logfile: dir_name, job_name = line[:-1].split("/") jobs = run_dict.get(dir_name, set()) @@ -137,7 +136,7 @@ def rm_all_but(base_dir, dirs_to_keep, warn=False): shutil.rmtree(dir_name) -class _MemoryCallback(object): +class _MemoryCallback: "An object to avoid closures and have everything pickle" def __init__(self, memory): @@ -147,7 +146,7 @@ def __call__(self, dir_name, job_name): self.memory._log_name(dir_name, job_name) -class Memory(object): +class Memory: """Memory context to provide caching for interfaces Parameters @@ -224,7 +223,7 @@ def _log_name(self, dir_name, job_name): # immediately to avoid race conditions in parallel computing: # file appends are atomic with open(os.path.join(base_dir, "log.current"), "a") as currentlog: - currentlog.write("%s/%s\n" % (dir_name, job_name)) + currentlog.write(f"{dir_name}/{job_name}\n") t = time.localtime() year_dir = os.path.join(base_dir, "log.%i" % t.tm_year) @@ -239,7 +238,7 @@ def _log_name(self, dir_name, job_name): "Dir exists" with open(os.path.join(month_dir, "%02i.log" % t.tm_mday), "a") as rotatefile: - rotatefile.write("%s/%s\n" % (dir_name, job_name)) + rotatefile.write(f"{dir_name}/{job_name}\n") def clear_previous_runs(self, warn=True): """Remove all the cache that where not used in the latest run of @@ -295,4 +294,4 @@ def _clear_all_but(self, runs, warn=True): rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn) def __repr__(self): - return "{}(base_dir={})".format(self.__class__.__name__, self.base_dir) + return f"{self.__class__.__name__}(base_dir={self.base_dir})" diff --git a/nipype/caching/tests/__init__.py b/nipype/caching/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/caching/tests/__init__.py +++ b/nipype/caching/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/caching/tests/test_memory.py b/nipype/caching/tests/test_memory.py index ef80869f03..5bd9fad528 100644 --- a/nipype/caching/tests/test_memory.py +++ b/nipype/caching/tests/test_memory.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Test the nipype interface caching mechanism """ @@ -16,7 +15,7 @@ class SideEffectInterface(EngineTestInterface): def _run_interface(self, runtime): global nb_runs nb_runs += 1 - return super(SideEffectInterface, self)._run_interface(runtime) + return super()._run_interface(runtime) def test_caching(tmpdir): diff --git a/nipype/external/__init__.py b/nipype/external/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/external/__init__.py +++ b/nipype/external/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/external/cloghandler.py b/nipype/external/cloghandler.py index c861654990..1ac2081c91 100644 --- a/nipype/external/cloghandler.py +++ b/nipype/external/cloghandler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # Copyright 2008 Lowell Alleman # # Licensed under the Apache License, Version 2.0 (the "License"); you may not @@ -40,7 +39,6 @@ """ -from builtins import range __version__ = "$Id: cloghandler.py 6175 2009-11-02 18:40:35Z lowell $" __author__ = "Lowell Alleman" @@ -191,7 +189,7 @@ def release(self): self.stream.flush() if self._rotateFailed: self.stream.close() - except IOError: + except OSError: if self._rotateFailed: self.stream.close() finally: @@ -266,7 +264,7 @@ def doRollover(self): try: # Do a rename test to determine if we can successfully rename the log file os.rename(self.baseFilename, tmpname) - except (IOError, OSError): + except OSError: exc_value = sys.exc_info()[1] self._degrade( True, "rename failed. File in use? " "exception=%s", exc_value @@ -319,7 +317,7 @@ def _shouldRollover(self): if self.maxBytes > 0: # are we rolling over? try: self.stream.seek(0, 2) # due to non-posix-compliant Windows feature - except IOError: + except OSError: return True if self.stream.tell() >= self.maxBytes: return True diff --git a/nipype/external/due.py b/nipype/external/due.py index fc436d5d45..47a0ae4e0f 100644 --- a/nipype/external/due.py +++ b/nipype/external/due.py @@ -27,7 +27,7 @@ __version__ = "0.0.5" -class InactiveDueCreditCollector(object): +class InactiveDueCreditCollector: """Just a stub at the Collector which would not do anything""" def _donothing(self, *args, **kwargs): @@ -45,7 +45,7 @@ def nondecorating_decorator(func): cite = load = add = _donothing def __repr__(self): - return "{}()".format(self.__class__.__name__) + return f"{self.__class__.__name__}()" def _donothing_func(*args, **kwargs): diff --git a/nipype/external/fsl_imglob.py b/nipype/external/fsl_imglob.py index 3707e4750d..3f131c33b3 100755 --- a/nipype/external/fsl_imglob.py +++ b/nipype/external/fsl_imglob.py @@ -65,7 +65,6 @@ # innovation@isis.ox.ac.uk quoting reference DE/9564. import sys import glob -from builtins import range def usage(): diff --git a/nipype/info.py b/nipype/info.py index 1a040830dc..192ba4368f 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -45,7 +45,7 @@ def get_nipype_gitversion(): if __version__.endswith("-dev"): gitversion = get_nipype_gitversion() if gitversion: - __version__ = "{}+{}".format(__version__, gitversion) + __version__ = f"{__version__}+{gitversion}" CLASSIFIERS = [ "Development Status :: 5 - Production/Stable", diff --git a/nipype/interfaces/__init__.py b/nipype/interfaces/__init__.py index fe1bf9c9e5..d72a463882 100644 --- a/nipype/interfaces/__init__.py +++ b/nipype/interfaces/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/afni/__init__.py b/nipype/interfaces/afni/__init__.py index 3629090ac0..7e6df345bc 100644 --- a/nipype/interfaces/afni/__init__.py +++ b/nipype/interfaces/afni/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/afni/base.py b/nipype/interfaces/afni/base.py index e3b910f8c5..3dcf209428 100644 --- a/nipype/interfaces/afni/base.py +++ b/nipype/interfaces/afni/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provide a base interface to AFNI commands.""" @@ -123,9 +122,7 @@ class AFNICommandBase(CommandLine): def _run_interface(self, runtime, correct_return_codes=(0,)): if platform == "darwin": runtime.environ["DYLD_FALLBACK_LIBRARY_PATH"] = "/usr/local/afni/" - return super(AFNICommandBase, self)._run_interface( - runtime, correct_return_codes - ) + return super()._run_interface(runtime, correct_return_codes) class AFNICommandInputSpec(CommandLineInputSpec): @@ -213,7 +210,7 @@ def set_default_output_type(cls, outputtype): def __init__(self, **inputs): """Instantiate an AFNI command tool wrapper.""" - super(AFNICommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._output_update, "outputtype") if hasattr(self.inputs, "num_threads"): @@ -248,7 +245,7 @@ def _overload_extension(self, value, name=None): ) def _list_outputs(self): - outputs = super(AFNICommand, self)._list_outputs() + outputs = super()._list_outputs() metadata = dict(name_source=lambda t: t is not None) out_names = list(self.inputs.traits(**metadata).keys()) if out_names: @@ -318,13 +315,13 @@ class AFNIPythonCommand(AFNICommand): @property def cmd(self): """Revise the command path.""" - orig_cmd = super(AFNIPythonCommand, self).cmd + orig_cmd = super().cmd found = shutil.which(orig_cmd) return found if found is not None else orig_cmd @property def _cmd_prefix(self): - return "{} ".format(self.inputs.py27_path) + return f"{self.inputs.py27_path} " def no_afni(): diff --git a/nipype/interfaces/afni/model.py b/nipype/interfaces/afni/model.py index 1235f8afff..3eb9358811 100644 --- a/nipype/interfaces/afni/model.py +++ b/nipype/interfaces/afni/model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft = python sts = 4 ts = 4 sw = 4 et: """ @@ -305,7 +304,7 @@ def _format_arg(self, name, trait_spec, value): if val.startswith("SYM: "): value[n] = val.lstrip("SYM: ") - return super(Deconvolve, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: @@ -317,7 +316,7 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.out_file): self.inputs.out_file = "Decon.nii" - return super(Deconvolve, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() @@ -637,7 +636,7 @@ class Remlfit(AFNICommand): def _parse_inputs(self, skip=None): if skip is None: skip = [] - return super(Remlfit, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/afni/preprocess.py b/nipype/interfaces/afni/preprocess.py index 0eec60cd62..819ce663b4 100644 --- a/nipype/interfaces/afni/preprocess.py +++ b/nipype/interfaces/afni/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI preprocessing interfaces.""" @@ -584,7 +583,7 @@ class Allineate(AFNICommand): output_spec = AllineateOutputSpec def _list_outputs(self): - outputs = super(Allineate, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.out_weight_file: outputs["out_weight_file"] = op.abspath(self.inputs.out_weight_file) @@ -1140,7 +1139,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if runtime is None: try: clip_val = load_json(outfile)["stat"] - except IOError: + except OSError: return self.run().outputs else: clip_val = [] @@ -1217,7 +1216,7 @@ class DegreeCentrality(AFNICommand): # Re-define generated inputs def _list_outputs(self): # Update outputs dictionary if oned file is defined - outputs = super(DegreeCentrality, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.oned_file: outputs["oned_file"] = os.path.abspath(self.inputs.oned_file) @@ -1554,7 +1553,7 @@ class Hist(AFNICommandBase): _redirect_x = True def __init__(self, **inputs): - super(Hist, self).__init__(**inputs) + super().__init__(**inputs) if not no_afni(): version = Info.version() @@ -1567,10 +1566,10 @@ def _parse_inputs(self, skip=None): if skip is None: skip = [] skip += ["out_show"] - return super(Hist, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): - outputs = super(Hist, self)._list_outputs() + outputs = super()._list_outputs() outputs["out_file"] += ".niml.hist" if not self.inputs.showhist: outputs["out_show"] = Undefined @@ -1833,12 +1832,10 @@ def _parse_inputs(self, skip=None): if not self.inputs.save_outliers: skip += ["outliers_file"] - return super(OutlierCount, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _run_interface(self, runtime, correct_return_codes=(0,)): - runtime = super(OutlierCount, self)._run_interface( - runtime, correct_return_codes - ) + runtime = super()._run_interface(runtime, correct_return_codes) # Read from runtime.stdout or runtime.merged with open(op.abspath(self.inputs.out_file), "w") as outfh: @@ -2106,7 +2103,7 @@ def _format_arg(self, name, trait_spec, value): } if name == "stat": value = [_stat_dict[v] for v in value] - return super(ROIStats, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class RetroicorInputSpec(AFNICommandInputSpec): @@ -2206,7 +2203,7 @@ def _format_arg(self, name, trait_spec, value): if name == "in_file": if not isdefined(self.inputs.card) and not isdefined(self.inputs.resp): return None - return super(Retroicor, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class SegInputSpec(CommandLineInputSpec): @@ -2351,7 +2348,7 @@ class SkullStrip(AFNICommand): output_spec = AFNICommandOutputSpec def __init__(self, **inputs): - super(SkullStrip, self).__init__(**inputs) + super().__init__(**inputs) if not no_afni(): v = Info.version() @@ -2546,7 +2543,7 @@ def _format_arg(self, name, trait_spec, value): elif name == "histogram": return trait_spec.argstr % (self.inputs.histogram_bin_numbers, value) else: - return super(TCorrMap, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class NetCorrInputSpec(AFNICommandInputSpec): @@ -3287,7 +3284,7 @@ def _format_arg(self, name, trait_spec, value): ) elif name == "slice_timing" and isinstance(value, list): value = self._write_slice_timing() - return super(TShift, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _write_slice_timing(self): slice_timing = list(self.inputs.slice_timing) @@ -3300,7 +3297,7 @@ def _write_slice_timing(self): return fname def _list_outputs(self): - outputs = super(TShift, self)._list_outputs() + outputs = super()._list_outputs() if isdefined(self.inputs.slice_timing): if isinstance(self.inputs.slice_timing, list): outputs["timing_file"] = os.path.abspath("slice_timing.1D") @@ -3507,7 +3504,7 @@ class Volreg(AFNICommand): def _format_arg(self, name, trait_spec, value): if name == "in_weight_volume" and not isinstance(value, tuple): value = (value, 0) - return super(Volreg, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class WarpInputSpec(AFNICommandInputSpec): @@ -3603,17 +3600,17 @@ class Warp(AFNICommand): output_spec = WarpOutputSpec def _run_interface(self, runtime, correct_return_codes=(0,)): - runtime = super(Warp, self)._run_interface(runtime, correct_return_codes) + runtime = super()._run_interface(runtime, correct_return_codes) if self.inputs.save_warp: import numpy as np warp_file = self._list_outputs()["warp_file"] - np.savetxt(warp_file, [runtime.stdout], fmt=str("%s")) + np.savetxt(warp_file, [runtime.stdout], fmt="%s") return runtime def _list_outputs(self): - outputs = super(Warp, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.save_warp: outputs["warp_file"] = fname_presuffix( outputs["out_file"], suffix="_transform.mat", use_ext=False @@ -4367,7 +4364,7 @@ class Qwarp(AFNICommand): def _format_arg(self, name, trait_spec, value): if name == "allineate_opts": return trait_spec.argstr % ("'" + value + "'") - return super(Qwarp, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/afni/svm.py b/nipype/interfaces/afni/svm.py index 13c83af51c..e7bd3c520a 100644 --- a/nipype/interfaces/afni/svm.py +++ b/nipype/interfaces/afni/svm.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI's svm interfaces.""" @@ -107,7 +106,7 @@ class SVMTrain(AFNICommand): _additional_metadata = ["suffix"] def _format_arg(self, name, trait_spec, value): - return super(SVMTrain, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class SVMTestInputSpec(AFNICommandInputSpec): diff --git a/nipype/interfaces/afni/tests/__init__.py b/nipype/interfaces/afni/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/afni/tests/__init__.py +++ b/nipype/interfaces/afni/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/afni/utils.py b/nipype/interfaces/afni/utils.py index 3075cffce3..26711c95a7 100644 --- a/nipype/interfaces/afni/utils.py +++ b/nipype/interfaces/afni/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """AFNI utility interfaces.""" @@ -163,7 +162,7 @@ def _overload_extension(self, value, name=None): return os.path.join(path, base + ext) def _gen_filename(self, name): - return os.path.abspath(super(AFNItoNIFTI, self)._gen_filename(name)) + return os.path.abspath(super()._gen_filename(name)) class AutoboxInputSpec(AFNICommandInputSpec): @@ -224,7 +223,7 @@ class Autobox(AFNICommand): output_spec = AutoboxOutputSpec def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = super(Autobox, self).aggregate_outputs(runtime, needed_outputs) + outputs = super().aggregate_outputs(runtime, needed_outputs) pattern = ( r"x=(?P-?\d+)\.\.(?P-?\d+) " r"y=(?P-?\d+)\.\.(?P-?\d+) " @@ -309,7 +308,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if runtime is None: try: min_val = load_json(outfile)["stat"] - except IOError: + except OSError: return self.run().outputs else: min_val = [] @@ -418,7 +417,7 @@ class Bucket(AFNICommand): def _format_arg(self, name, spec, value): if name == "in_file": return spec.argstr % (" ".join([i[0] + "'" + i[1] + "'" for i in value])) - return super(Bucket, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class CalcInputSpec(AFNICommandInputSpec): @@ -492,11 +491,11 @@ def _format_arg(self, name, trait_spec, value): if isdefined(self.inputs.single_idx): arg += "[%d]" % (self.inputs.single_idx) return arg - return super(Calc, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata""" - return super(Calc, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) + return super()._parse_inputs(skip=("start_idx", "stop_idx", "other")) class CatInputSpec(AFNICommandInputSpec): @@ -654,9 +653,9 @@ def _format_arg(self, name, spec, value): if name == "in_file": # Concatenate a series of filenames, with optional opkeys return " ".join( - "%s -%s" % (mfile, opkey) if opkey else mfile for mfile, opkey in value + f"{mfile} -{opkey}" if opkey else mfile for mfile, opkey in value ) - return super(CatMatvec, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class CenterMassInputSpec(CommandLineInputSpec): @@ -747,7 +746,7 @@ class CenterMass(AFNICommandBase): output_spec = CenterMassOutputSpec def _list_outputs(self): - outputs = super(CenterMass, self)._list_outputs() + outputs = super()._list_outputs() outputs["out_file"] = os.path.abspath(self.inputs.in_file) outputs["cm_file"] = os.path.abspath(self.inputs.cm_file) sout = np.loadtxt(outputs["cm_file"], ndmin=2) @@ -1125,11 +1124,11 @@ def _format_arg(self, name, trait_spec, value): if isdefined(self.inputs.single_idx): arg += "[%d]" % (self.inputs.single_idx) return arg - return super(Eval, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): """Skip the arguments without argstr metadata""" - return super(Eval, self)._parse_inputs(skip=("start_idx", "stop_idx", "other")) + return super()._parse_inputs(skip=("start_idx", "stop_idx", "other")) class FWHMxInputSpec(CommandLineInputSpec): @@ -1360,7 +1359,7 @@ def _parse_inputs(self, skip=None): if skip is None: skip = [] skip += ["out_detrend"] - return super(FWHMx, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _format_arg(self, name, trait_spec, value): if name == "detrend": @@ -1381,10 +1380,10 @@ def _format_arg(self, name, trait_spec, value): return trait_spec.argstr + " %s %f" % value elif isinstance(value, (str, bytes)): return trait_spec.argstr + " " + value - return super(FWHMx, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): - outputs = super(FWHMx, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.detrend: fname, ext = op.splitext(self.inputs.in_file) @@ -1550,7 +1549,7 @@ def _format_arg(self, name, spec, value): if name == "neighborhood" and value[0] == "RECT": value = ("RECT", "%s,%s,%s" % value[1]) - return super(LocalBistat, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class LocalstatInputSpec(AFNICommandInputSpec): @@ -1761,7 +1760,7 @@ def _format_arg(self, name, spec, value): if len(value) == 3: value = "%s %s %s" % value - return super(Localstat, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class MaskToolInputSpec(AFNICommandInputSpec): @@ -2011,7 +2010,7 @@ def _parse_inputs(self, skip=None): if skip is None: skip = [] skip += ["out_file"] - return super(NwarpAdjust, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() @@ -2230,7 +2229,7 @@ def _format_arg(self, name, spec, value): ] ) ) - return super(NwarpCat, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name == "out_file": @@ -2581,7 +2580,7 @@ class ReHo(AFNICommandBase): output_spec = ReHoOutputSpec def _list_outputs(self): - outputs = super(ReHo, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.label_set: outputs["out_vals"] = outputs["out_file"] + "_ROI_reho.vals" return outputs @@ -2590,7 +2589,7 @@ def _format_arg(self, name, spec, value): _neigh_dict = {"faces": 7, "edges": 19, "vertices": 27} if name == "neighborhood": value = _neigh_dict[value] - return super(ReHo, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ResampleInputSpec(AFNICommandInputSpec): @@ -2622,7 +2621,7 @@ class ResampleInputSpec(AFNICommandInputSpec): voxel_size = traits.Tuple( *[traits.Float()] * 3, argstr="-dxyz %f %f %f", - desc="resample to new dx, dy and dz" + desc="resample to new dx, dy and dz", ) master = File(argstr="-master %s", desc="align dataset grid to a reference file") @@ -3237,7 +3236,7 @@ class GCOR(CommandLine): output_spec = GCOROutputSpec def _run_interface(self, runtime): - runtime = super(GCOR, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) gcor_line = [ line.strip() diff --git a/nipype/interfaces/ants/__init__.py b/nipype/interfaces/ants/__init__.py index dc96642f23..9671f1b31d 100644 --- a/nipype/interfaces/ants/__init__.py +++ b/nipype/interfaces/ants/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Top-level namespace for ants.""" diff --git a/nipype/interfaces/ants/base.py b/nipype/interfaces/ants/base.py index a868e54b97..c484104d28 100644 --- a/nipype/interfaces/ants/base.py +++ b/nipype/interfaces/ants/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The ants module provides basic functions for interfacing with ANTS tools.""" @@ -74,7 +73,7 @@ class ANTSCommand(CommandLine): _num_threads = LOCAL_DEFAULT_NUMBER_OF_THREADS def __init__(self, **inputs): - super(ANTSCommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): diff --git a/nipype/interfaces/ants/legacy.py b/nipype/interfaces/ants/legacy.py index 6be07f35a2..3a68a93247 100644 --- a/nipype/interfaces/ants/legacy.py +++ b/nipype/interfaces/ants/legacy.py @@ -1,11 +1,9 @@ -# -*- coding: utf-8 -*- """ANTS Legacy Interfaces These interfaces are for programs that have been deprecated by ANTs, but are preserved for backwards compatibility. """ -from builtins import range import os from glob import glob @@ -335,7 +333,7 @@ def _format_arg(self, opt, spec, val): else: start = "" return start + " ".join(name for name in val) - return super(buildtemplateparallel, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -366,7 +364,7 @@ def _list_outputs(self): outputs["subject_outfiles"] = [] for filename in self.inputs.in_files: _, base, _ = split_filename(filename) - temp = glob(os.path.realpath("%s%s*" % (self.inputs.out_prefix, base))) + temp = glob(os.path.realpath(f"{self.inputs.out_prefix}{base}*")) for file_ in temp: outputs["subject_outfiles"].append(file_) return outputs diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 4b870b53ab..de6798ccf8 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The ants module provides basic functions for interfacing with ants functions. """ @@ -203,7 +202,7 @@ def _transformation_constructor(self): return "".join(retval) def _regularization_constructor(self): - return "--regularization {0}[{1},{2}]".format( + return "--regularization {}[{},{}]".format( self.inputs.regularization, self.inputs.regularization_gradient_field_sigma, self.inputs.regularization_deformation_field_sigma, @@ -237,7 +236,7 @@ def _format_arg(self, opt, spec, val): return "--use-Histogram-Matching 1" else: return "--use-Histogram-Matching 0" - return super(ANTS, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -1012,12 +1011,12 @@ class Registration(ANTSCommand): ] def __init__(self, **inputs): - super(Registration, self).__init__(**inputs) + super().__init__(**inputs) self._elapsed_time = None self._metric_value = None def _run_interface(self, runtime, correct_return_codes=(0,)): - runtime = super(Registration, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) # Parse some profiling info output = runtime.stdout or runtime.merged @@ -1078,7 +1077,7 @@ def _format_metric(self, index): indexes = list(range(0, len(name_input))) specs = list() for i in indexes: - temp = dict([(k, v[i]) for k, v in items]) + temp = {k: v[i] for k, v in items} if len(self.inputs.fixed_image) == 1: temp["fixed_image"] = self.inputs.fixed_image[0] else: @@ -1195,7 +1194,7 @@ def _format_registration(self): moving_mask = moving_masks[ii if len(moving_masks) > 1 else 0] else: moving_mask = "NULL" - retval.append("--masks [ %s, %s ]" % (fixed_mask, moving_mask)) + retval.append(f"--masks [ {fixed_mask}, {moving_mask} ]") return " ".join(retval) def _get_outputfilenames(self, inverse=False): @@ -1248,7 +1247,7 @@ def _format_winsorize_image_intensities(self): ) ) self._quantilesDone = True - return "--winsorize-image-intensities [ %s, %s ]" % ( + return "--winsorize-image-intensities [ {}, {} ]".format( self.inputs.winsorize_lower_quantile, self.inputs.winsorize_upper_quantile, ) @@ -1275,7 +1274,7 @@ def _get_initial_transform_filenames(self): def _format_arg(self, opt, spec, val): if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): - return "--masks [ %s, %s ]" % ( + return "--masks [ {}, {} ]".format( self.inputs.fixed_image_mask, self.inputs.moving_image_mask, ) @@ -1303,7 +1302,7 @@ def _format_arg(self, opt, spec, val): "Gaussian", "GenericLabel", ] and isdefined(self.inputs.interpolation_parameters): - return "--interpolation %s[ %s ]" % ( + return "--interpolation {}[ {} ]".format( self.inputs.interpolation, ", ".join( [str(param) for param in self.inputs.interpolation_parameters] @@ -1315,13 +1314,13 @@ def _format_arg(self, opt, spec, val): out_filename = self._get_outputfilenames(inverse=False) inv_out_filename = self._get_outputfilenames(inverse=True) if out_filename and inv_out_filename: - return "--output [ %s, %s, %s ]" % ( + return "--output [ {}, {}, {} ]".format( self.inputs.output_transform_prefix, out_filename, inv_out_filename, ) elif out_filename: - return "--output [ %s, %s ]" % ( + return "--output [ {}, {} ]".format( self.inputs.output_transform_prefix, out_filename, ) @@ -1336,7 +1335,7 @@ def _format_arg(self, opt, spec, val): # This feature was removed from recent versions of antsRegistration due to corrupt outputs. # elif opt == 'collapse_linear_transforms_to_fixed_image_header': # return self._formatCollapseLinearTransformsToFixedImageHeader() - return super(Registration, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _output_filenames(self, prefix, count, transform, inverse=False): self.low_dimensional_transform_map = { @@ -1626,7 +1625,7 @@ def _format_arg(self, opt, spec, val): return self._metric_constructor() elif opt == "fixed_image_mask": return self._mask_constructor() - return super(MeasureImageSimilarity, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() @@ -1772,7 +1771,7 @@ def _num_threads_update(self): def _format_arg(self, name, spec, value): if name == "precision_type": return spec.argstr % value[0] - return super(RegistrationSynQuick, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1868,13 +1867,13 @@ def _format_arg(self, name, spec, value): return "" if name == "out_file" and self.inputs.process == "disassemble": return "" - return super(CompositeTransformUtil, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() if self.inputs.process == "disassemble": outputs["affine_transform"] = os.path.abspath( - "00_{}_AffineTransform.mat".format(self.inputs.output_prefix) + f"00_{self.inputs.output_prefix}_AffineTransform.mat" ) outputs["displacement_field"] = os.path.abspath( "01_{}_DisplacementFieldTransform.nii.gz".format( diff --git a/nipype/interfaces/ants/resampling.py b/nipype/interfaces/ants/resampling.py index 8738c2dada..1ff1f5c029 100644 --- a/nipype/interfaces/ants/resampling.py +++ b/nipype/interfaces/ants/resampling.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ANTS Apply Transforms interface """ import os @@ -130,9 +129,7 @@ def _format_arg(self, opt, spec, val): ) return " ".join(series) - return super(WarpTimeSeriesImageMultiTransform, self)._format_arg( - opt, spec, val - ) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -143,9 +140,7 @@ def _list_outputs(self): return outputs def _run_interface(self, runtime, correct_return_codes=[0]): - runtime = super(WarpTimeSeriesImageMultiTransform, self)._run_interface( - runtime, correct_return_codes=[0, 1] - ) + runtime = super()._run_interface(runtime, correct_return_codes=[0, 1]) if "100 % complete" not in runtime.stdout: self.raise_exception(runtime) return runtime @@ -290,7 +285,7 @@ def _format_arg(self, opt, spec, val): return " ".join(series) - return super(WarpImageMultiTransform, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -509,7 +504,7 @@ def _format_arg(self, opt, spec, val): "MultiLabel", "Gaussian", ] and isdefined(self.inputs.interpolation_parameters): - return "--interpolation %s[ %s ]" % ( + return "--interpolation {}[ {} ]".format( self.inputs.interpolation, ", ".join( [str(param) for param in self.inputs.interpolation_parameters] @@ -517,7 +512,7 @@ def _format_arg(self, opt, spec, val): ) else: return "--interpolation %s" % self.inputs.interpolation - return super(ApplyTransforms, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -614,10 +609,8 @@ def _get_transform_filenames(self): ) else: raise Exception( - ( - "ERROR: The useInverse list must have the same number " - "of entries as the transformsFileName list." - ) + "ERROR: The useInverse list must have the same number " + "of entries as the transformsFileName list." ) else: retval.append("--transform %s" % self.inputs.transforms[ii]) @@ -626,4 +619,4 @@ def _get_transform_filenames(self): def _format_arg(self, opt, spec, val): if opt == "transforms": return self._get_transform_filenames() - return super(ApplyTransformsToPoints, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index 8af76d95b9..11119cbde7 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -199,7 +199,7 @@ def _format_arg(self, opt, spec, val): self.inputs.prior_probability_threshold ): brackets.append("%g" % self.inputs.prior_probability_threshold) - return "--initialization %s[%s]" % (val, ",".join(brackets)) + return "--initialization {}[{}]".format(val, ",".join(brackets)) if opt == "mrf_smoothing_factor": retval = "--mrf [%g" % val if isdefined(self.inputs.mrf_radius): @@ -227,7 +227,7 @@ def _format_arg(self, opt, spec, val): if isdefined(self.inputs.save_posteriors): retval += ",%s" % self.inputs.output_posteriors_name_template return retval + "]" - return super(Atropos, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _gen_filename(self, name): if name == "out_classified_image_name": @@ -499,11 +499,11 @@ class N4BiasFieldCorrection(ANTSCommand, CopyHeaderInterface): def __init__(self, *args, **kwargs): """Instantiate the N4BiasFieldCorrection interface.""" self._out_bias_file = None - super(N4BiasFieldCorrection, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) def _format_arg(self, name, trait_spec, value): if name == "output_image" and self._out_bias_file: - newval = "[ %s, %s ]" % (value, self._out_bias_file) + newval = f"[ {value}, {self._out_bias_file} ]" return trait_spec.argstr % newval if name == "bspline_fitting_distance": @@ -515,7 +515,7 @@ def _format_arg(self, name, trait_spec, value): if name == "n_iterations": if isdefined(self.inputs.convergence_threshold): - newval = "[ %s, %g ]" % ( + newval = "[ {}, {:g} ]".format( self._format_xarray([str(elt) for elt in value]), self.inputs.convergence_threshold, ) @@ -523,7 +523,7 @@ def _format_arg(self, name, trait_spec, value): newval = "[ %s ]" % self._format_xarray([str(elt) for elt in value]) return trait_spec.argstr % newval - return super(N4BiasFieldCorrection, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): skip = (skip or []) + ["save_bias", "bias_image"] @@ -535,10 +535,10 @@ def _parse_inputs(self, skip=None): os.path.basename(self.inputs.input_image), suffix="_bias" ) self._out_bias_file = bias_image - return super(N4BiasFieldCorrection, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): - outputs = super(N4BiasFieldCorrection, self)._list_outputs() + outputs = super()._list_outputs() if self._out_bias_file: outputs["bias_image"] = os.path.abspath(self._out_bias_file) return outputs @@ -767,7 +767,7 @@ def _format_arg(self, opt, spec, val): _, _, ext = split_filename(self.inputs.segmentation_priors[0]) retval = "-p nipype_priors/BrainSegmentationPrior%02d" + ext return retval - return super(CorticalThickness, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime, correct_return_codes=[0]): priors_directory = os.path.join(os.getcwd(), "nipype_priors") @@ -783,7 +783,7 @@ def _run_interface(self, runtime, correct_return_codes=[0]): and os.path.realpath(target) == os.path.abspath(f) ): copyfile(os.path.abspath(f), target) - runtime = super(CorticalThickness, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) return runtime def _list_outputs(self): @@ -1006,7 +1006,7 @@ def _run_interface(self, runtime, correct_return_codes=(0,)): self.inputs.environ.update({"ANTSPATH": ants_path}) runtime.environ.update({"ANTSPATH": ants_path}) - runtime = super(BrainExtraction, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) # Still, double-check if it didn't found N4 if "we can't find" in runtime.stdout: @@ -1235,13 +1235,13 @@ def _format_arg(self, name, trait_spec, value): if (name == "output_image") and ( self.inputs.save_noise or isdefined(self.inputs.noise_image) ): - newval = "[ %s, %s ]" % ( + newval = "[ {}, {} ]".format( self._filename_from_source("output_image"), self._filename_from_source("noise_image"), ) return trait_spec.argstr % newval - return super(DenoiseImage, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class JointFusionInputSpec(ANTSCommandInputSpec): @@ -1482,16 +1482,16 @@ def _format_arg(self, opt, spec, val): retval = [] for ii in range(len(self.inputs.exclusion_image_label)): retval.append( - "-e {0}[{1}]".format( + "-e {}[{}]".format( self.inputs.exclusion_image_label[ii], self.inputs.exclusion_image[ii], ) ) return " ".join(retval) if opt == "patch_radius": - return "-p {0}".format(self._format_xarray(val)) + return f"-p {self._format_xarray(val)}" if opt == "search_radius": - return "-s {0}".format(self._format_xarray(val)) + return f"-s {self._format_xarray(val)}" if opt == "out_label_fusion": args = [self.inputs.out_label_fusion] for option in ( @@ -1508,19 +1508,19 @@ def _format_arg(self, opt, spec, val): return "-o [{}]".format(", ".join(args)) if opt == "out_intensity_fusion_name_format": if not isdefined(self.inputs.out_label_fusion): - return "-o {0}".format(self.inputs.out_intensity_fusion_name_format) + return f"-o {self.inputs.out_intensity_fusion_name_format}" return "" if opt == "atlas_image": return " ".join( [ - "-g [{0}]".format(", ".join("'%s'" % fn for fn in ai)) + "-g [{}]".format(", ".join("'%s'" % fn for fn in ai)) for ai in self.inputs.atlas_image ] ) if opt == "target_image": return " ".join( [ - "-t [{0}]".format(", ".join("'%s'" % fn for fn in ai)) + "-t [{}]".format(", ".join("'%s'" % fn for fn in ai)) for ai in self.inputs.target_image ] ) @@ -1528,14 +1528,12 @@ def _format_arg(self, opt, spec, val): if len(val) != len(self.inputs.atlas_image): raise ValueError( "Number of specified segmentations should be identical to the number " - "of atlas image sets {0}!={1}".format( + "of atlas image sets {}!={}".format( len(val), len(self.inputs.atlas_image) ) ) - return " ".join( - ["-l {0}".format(fn) for fn in self.inputs.atlas_segmentation_image] - ) + return " ".join([f"-l {fn}" for fn in self.inputs.atlas_segmentation_image]) return super(AntsJointFusion, self)._format_arg(opt, spec, val) def _list_outputs(self): @@ -1760,7 +1758,7 @@ def _parse_inputs(self, skip=None): if skip is None: skip = [] skip += ["warped_white_matter", "gray_matter_label", "white_matter_label"] - return super(KellyKapowski, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _gen_filename(self, name): if name == "cortical_thickness": @@ -1779,7 +1777,7 @@ def _gen_filename(self, name): def _format_arg(self, opt, spec, val): if opt == "segmentation_image": - newval = "[{0},{1},{2}]".format( + newval = "[{},{},{}]".format( self.inputs.segmentation_image, self.inputs.gray_matter_label, self.inputs.white_matter_label, @@ -1789,7 +1787,7 @@ def _format_arg(self, opt, spec, val): if opt == "cortical_thickness": ct = self._gen_filename("cortical_thickness") wm = self._gen_filename("warped_white_matter") - newval = "[{},{}]".format(ct, wm) + newval = f"[{ct},{wm}]" return spec.argstr % newval - return super(KellyKapowski, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/ants/tests/__init__.py b/nipype/interfaces/ants/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/ants/tests/__init__.py +++ b/nipype/interfaces/ants/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/ants/utils.py b/nipype/interfaces/ants/utils.py index c68e98b479..2bc70b641f 100644 --- a/nipype/interfaces/ants/utils.py +++ b/nipype/interfaces/ants/utils.py @@ -196,7 +196,7 @@ class ImageMath(ANTSCommand, CopyHeaderInterface): ) def __init__(self, **inputs): - super(ImageMath, self).__init__(**inputs) + super().__init__(**inputs) if self.inputs.operation in self._no_copy_header_operation: self.inputs.copy_header = False @@ -304,7 +304,7 @@ def _format_arg(self, name, trait_spec, value): value = " ".join(["%g" % d for d in value]) - return super(ResampleImageBySpacing, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class ThresholdImageInputSpec(ANTSCommandInputSpec): @@ -508,7 +508,7 @@ class AI(ANTSCommand): output_spec = AIOuputSpec def _run_interface(self, runtime, correct_return_codes=(0,)): - runtime = super(AI, self)._run_interface(runtime, correct_return_codes) + runtime = super()._run_interface(runtime, correct_return_codes) self._output = { "output_transform": os.path.join( @@ -527,14 +527,14 @@ def _format_arg(self, opt, spec, val): return spec.argstr % val if opt == "search_grid": - fmtval = "[%s,%s]" % (val[0], "x".join("%g" % v for v in val[1])) + fmtval = "[{},{}]".format(val[0], "x".join("%g" % v for v in val[1])) return spec.argstr % fmtval if opt == "fixed_image_mask": if isdefined(self.inputs.moving_image_mask): - return spec.argstr % ("[%s,%s]" % (val, self.inputs.moving_image_mask)) + return spec.argstr % (f"[{val},{self.inputs.moving_image_mask}]") - return super(AI, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): return getattr(self, "_output") @@ -582,7 +582,7 @@ class AverageAffineTransform(ANTSCommand): output_spec = AverageAffineTransformOutputSpec def _format_arg(self, opt, spec, val): - return super(AverageAffineTransform, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -643,7 +643,7 @@ class AverageImages(ANTSCommand): output_spec = AverageImagesOutputSpec def _format_arg(self, opt, spec, val): - return super(AverageImages, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -699,7 +699,7 @@ class MultiplyImages(ANTSCommand): output_spec = MultiplyImagesOutputSpec def _format_arg(self, opt, spec, val): - return super(MultiplyImages, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -751,7 +751,7 @@ class CreateJacobianDeterminantImage(ANTSCommand): output_spec = CreateJacobianDeterminantImageOutputSpec def _format_arg(self, opt, spec, val): - return super(CreateJacobianDeterminantImage, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/interfaces/ants/visualization.py b/nipype/interfaces/ants/visualization.py index 0fcf9a6b47..c73b64c632 100644 --- a/nipype/interfaces/ants/visualization.py +++ b/nipype/interfaces/ants/visualization.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The ants visualisation module provides basic functions based on ITK. """ @@ -98,7 +97,7 @@ class ConvertScalarImageToRGB(ANTSCommand): output_spec = ConvertScalarImageToRGBOutputSpec def _format_arg(self, opt, spec, val): - return super(ConvertScalarImageToRGB, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/interfaces/base/__init__.py b/nipype/interfaces/base/__init__.py index 7c70f9768d..baf54e2b30 100644 --- a/nipype/interfaces/base/__init__.py +++ b/nipype/interfaces/base/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index fd2e2c54ca..33e583ccd3 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -61,7 +61,7 @@ __docformat__ = "restructuredtext" -class Interface(object): +class Interface: """This is an abstract definition for Interface objects. It provides no functionality. It defines the necessary attributes @@ -562,7 +562,7 @@ class SimpleInterface(BaseInterface): """ def __init__(self, from_file=None, resource_monitor=None, **inputs): - super(SimpleInterface, self).__init__( + super().__init__( from_file=from_file, resource_monitor=resource_monitor, **inputs ) self._results = {} @@ -628,7 +628,7 @@ def set_default_terminal_output(cls, output_type): def __init__( self, command=None, terminal_output=None, write_cmdline=False, **inputs ): - super(CommandLine, self).__init__(**inputs) + super().__init__(**inputs) self._environ = None # Set command. Input argument takes precedence self._cmd = command or getattr(self, "_cmd", None) @@ -751,7 +751,7 @@ def _run_interface(self, runtime, correct_return_codes=(0,)): cmd_path = which(executable_name, env=runtime.environ) if cmd_path is None: - raise IOError( + raise OSError( 'No command "%s" found on host %s. Please check that the ' "corresponding package is installed." % (executable_name, runtime.hostname) @@ -994,7 +994,7 @@ def cmdline(self): result.append("mpiexec") if self.inputs.n_procs: result.append("-n %d" % self.inputs.n_procs) - result.append(super(MpiCommandLine, self).cmdline) + result.append(super().cmdline) return " ".join(result) @@ -1033,7 +1033,7 @@ def _format_arg(self, name, spec, value): value = os.path.abspath(self._outputs_filenames[name]) else: return "" - return super(SEMLikeCommandLine, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class LibraryBaseInterface(BaseInterface): @@ -1041,7 +1041,7 @@ class LibraryBaseInterface(BaseInterface): imports = () def __init__(self, check_import=True, *args, **kwargs): - super(LibraryBaseInterface, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) if check_import: import pkgutil @@ -1065,10 +1065,10 @@ def version(self): self._version = importlib.import_module(self._pkg).__version__ except (ImportError, AttributeError): pass - return super(LibraryBaseInterface, self).version + return super().version -class PackageInfo(object): +class PackageInfo: _version = None version_cmd = None version_file = None @@ -1083,13 +1083,13 @@ def version(klass): resource_monitor=False, terminal_output="allatonce", ).run() - except IOError: + except OSError: return None raw_info = clout.runtime.stdout elif klass.version_file is not None: try: - with open(klass.version_file, "rt") as fobj: + with open(klass.version_file) as fobj: raw_info = fobj.read() except OSError: return None diff --git a/nipype/interfaces/base/specs.py b/nipype/interfaces/base/specs.py index 01ef126abb..7db8388cd8 100644 --- a/nipype/interfaces/base/specs.py +++ b/nipype/interfaces/base/specs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -63,7 +62,7 @@ def __init__(self, **kwargs): # arguments. HasTraits does not define an __init__ and # therefore these args were being ignored. # super(TraitedSpec, self).__init__(*args, **kwargs) - super(BaseTraitedSpec, self).__init__(**kwargs) + super().__init__(**kwargs) traits.push_exception_handler(reraise_exceptions=True) undefined_traits = {} for trait in self.copyable_trait_names(): @@ -82,7 +81,7 @@ def __repr__(self): """Return a well-formatted representation of the traits""" outstr = [] for name, value in sorted(self.trait_get().items()): - outstr.append("%s = %s" % (name, value)) + outstr.append(f"{name} = {value}") return "\n{}\n".format("\n".join(outstr)) def _generate_handlers(self): @@ -115,13 +114,13 @@ def _xor_warn(self, obj, name, old, new): 'Input "%s" is mutually exclusive with input "%s", ' "which is already set" ) % (name, trait_name) - raise IOError(msg) + raise OSError(msg) def _deprecated_warn(self, obj, name, old, new): """Checks if a user assigns a value to a deprecated trait""" if isdefined(new): trait_spec = self.traits()[name] - msg1 = "Input %s in interface %s is deprecated." % ( + msg1 = "Input {} in interface {} is deprecated.".format( name, self.__class__.__name__.split("InputSpec")[0], ) @@ -142,7 +141,7 @@ def _deprecated_warn(self, obj, name, old, new): raise TraitError(msg) else: if trait_spec.new_name: - msg += "Unsetting old value %s; setting new value %s." % ( + msg += "Unsetting old value {}; setting new value {}.".format( name, trait_spec.new_name, ) @@ -150,7 +149,7 @@ def _deprecated_warn(self, obj, name, old, new): if trait_spec.new_name: self.trait_set( trait_change_notify=False, - **{"%s" % name: Undefined, "%s" % trait_spec.new_name: new} + **{"%s" % name: Undefined, "%s" % trait_spec.new_name: new}, ) def trait_get(self, **kwargs): @@ -159,7 +158,7 @@ def trait_get(self, **kwargs): Augments the trait get function to return a dictionary without notification handles """ - out = super(BaseTraitedSpec, self).trait_get(**kwargs) + out = super().trait_get(**kwargs) out = self._clean_container(out, Undefined) return out @@ -172,7 +171,7 @@ def get_traitsfree(self, **kwargs): any traits. The dictionary does not contain any attributes that were Undefined """ - out = super(BaseTraitedSpec, self).trait_get(**kwargs) + out = super().trait_get(**kwargs) out = self._clean_container(out, skipundefined=True) return out @@ -348,7 +347,7 @@ def __getstate__(self): [4] """ - state = super(BaseTraitedSpec, self).__getstate__() + state = super().__getstate__() for key in self.__all__: _trait_spec = self.trait(key) if _trait_spec.is_trait_type(OutputMultiObject): diff --git a/nipype/interfaces/base/support.py b/nipype/interfaces/base/support.py index 14c8a55da1..c6c223b668 100644 --- a/nipype/interfaces/base/support.py +++ b/nipype/interfaces/base/support.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -129,10 +128,10 @@ def __init__(self, value): self.value = value def __str__(self): - return "{}".format(self.value) + return f"{self.value}" -class Bunch(object): +class Bunch: """ Dictionary-like class that provides attribute-style access to its items. @@ -203,11 +202,11 @@ def __repr__(self): if isinstance(v, dict): pairs = [] for key, value in sorted(v.items()): - pairs.append("'%s': %s" % (key, value)) + pairs.append(f"'{key}': {value}") v = "{" + ", ".join(pairs) + "}" - outstr.append("%s=%s" % (k, v)) + outstr.append(f"{k}={v}") else: - outstr.append("%s=%r" % (k, v)) + outstr.append(f"{k}={v!r}") first = False outstr.append(")") return "".join(outstr) @@ -289,7 +288,7 @@ def _hash_bunch_dict(adict, key): return [(afile, hash_infile(afile)) for afile in stuff] -class InterfaceResult(object): +class InterfaceResult: """Object that contains the results of running a particular Interface. Attributes @@ -448,7 +447,7 @@ def get_trait_desc(inputs, name, spec): default = "" if spec.usedefault: default = ", nipype default value: %s" % str(spec.default_value()[1]) - line = "(%s%s)" % (type_info, default) + line = f"({type_info}{default})" manhelpstr = wrap( line, @@ -468,7 +467,7 @@ def get_trait_desc(inputs, name, spec): pos = spec.position if pos is not None: manhelpstr += wrap( - "argument: ``%s``, position: %s" % (argstr, pos), + f"argument: ``{argstr}``, position: {pos}", HELP_LINEWIDTH, initial_indent="\t\t", subsequent_indent="\t\t", diff --git a/nipype/interfaces/base/tests/__init__.py b/nipype/interfaces/base/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/base/tests/__init__.py +++ b/nipype/interfaces/base/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/base/tests/test_core.py b/nipype/interfaces/base/tests/test_core.py index 0c3f5a8dc1..d86142ff3b 100644 --- a/nipype/interfaces/base/tests/test_core.py +++ b/nipype/interfaces/base/tests/test_core.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -117,7 +116,7 @@ class DerivedInterface(nib.BaseInterface): input_spec = InputSpec def __init__(self, **inputs): - super(DerivedInterface, self).__init__(**inputs) + super().__init__(**inputs) inputs_dict = {"input1": 12, "input3": True, "input4": "some string"} bif = DerivedInterface(**inputs_dict) @@ -571,13 +570,13 @@ class OOPCLI(nib.CommandLine): ci.run() class OOPShell(nib.CommandLine): - _cmd_prefix = "bash {}/".format(oop) + _cmd_prefix = f"bash {oop}/" ci = OOPShell(command=script_name) ci.run() class OOPBadShell(nib.CommandLine): - _cmd_prefix = "shell_dne {}/".format(oop) + _cmd_prefix = f"shell_dne {oop}/" ci = OOPBadShell(command=script_name) with pytest.raises(IOError): diff --git a/nipype/interfaces/base/tests/test_resource_monitor.py b/nipype/interfaces/base/tests/test_resource_monitor.py index 79ca422385..802e8e6ec9 100644 --- a/nipype/interfaces/base/tests/test_resource_monitor.py +++ b/nipype/interfaces/base/tests/test_resource_monitor.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/base/tests/test_specs.py b/nipype/interfaces/base/tests/test_specs.py index b088c95716..44a9c014c4 100644 --- a/nipype/interfaces/base/tests/test_specs.py +++ b/nipype/interfaces/base/tests/test_specs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/base/tests/test_support.py b/nipype/interfaces/base/tests/test_support.py index 878794b04f..3997a88280 100644 --- a/nipype/interfaces/base/tests/test_support.py +++ b/nipype/interfaces/base/tests/test_support.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -50,7 +49,7 @@ def test_bunch_hash(): assert bhash == "d1f46750044c3de102efc847720fc35f" # Make sure the hash stored in the json file for `infile` is correct. jshash = md5() - with open(json_pth, "r") as fp: + with open(json_pth) as fp: jshash.update(fp.read().encode("utf-8")) assert newbdict["infile"][0][1] == jshash.hexdigest() assert newbdict["yat"] is True diff --git a/nipype/interfaces/base/traits_extension.py b/nipype/interfaces/base/traits_extension.py index cadce596fb..db41244fc4 100644 --- a/nipype/interfaces/base/traits_extension.py +++ b/nipype/interfaces/base/traits_extension.py @@ -50,7 +50,7 @@ "nifti2": (".nii", ".nii.gz"), "nrrd": (".nrrd", ".nhdr"), } -IMG_ZIP_FMT = set([".nii.gz", "tar.gz", ".gii.gz", ".mgz", ".mgh.gz", "img.gz"]) +IMG_ZIP_FMT = {".nii.gz", "tar.gz", ".gii.gz", ".mgz", ".mgh.gz", "img.gz"} """ The functions that pop-up the Traits GUIs, edit_traits and @@ -121,7 +121,7 @@ def __init__(self, value=Undefined, exists=False, resolve=False, **metadata): """Create a BasePath trait.""" self.exists = exists self.resolve = resolve - super(BasePath, self).__init__(value, **metadata) + super().__init__(value, **metadata) def validate(self, objekt, name, value, return_pathlike=False): """Validate a value change.""" @@ -309,15 +309,10 @@ def __init__( extensions = list(set(extensions) - IMG_ZIP_FMT) self._exts = sorted( - set( - [ - ".%s" % ext if not ext.startswith(".") else ext - for ext in extensions - ] - ) + {".%s" % ext if not ext.startswith(".") else ext for ext in extensions} ) - super(File, self).__init__( + super().__init__( value=value, exists=exists, resolve=resolve, @@ -327,10 +322,10 @@ def __init__( def validate(self, objekt, name, value, return_pathlike=False): """Validate a value change.""" - value = super(File, self).validate(objekt, name, value, return_pathlike=True) + value = super().validate(objekt, name, value, return_pathlike=True) if self._exts: fname = value.name - if not any((fname.endswith(e) for e in self._exts)): + if not any(fname.endswith(e) for e in self._exts): self.error(objekt, name, str(value)) if not return_pathlike: @@ -366,7 +361,7 @@ def __init__( ) extensions = [ext for t in types for ext in IMG_FORMATS[t]] - super(ImageFile, self).__init__( + super().__init__( value=value, exists=exists, extensions=extensions, @@ -421,7 +416,7 @@ def validate(self, objekt, name, value): and not isinstance(value[0], list) ): newvalue = [value] - value = super(MultiObject, self).validate(objekt, name, newvalue) + value = super().validate(objekt, name, newvalue) if value: return value diff --git a/nipype/interfaces/brainsuite/__init__.py b/nipype/interfaces/brainsuite/__init__.py index 45bcf5fc65..5fb27d6ae1 100644 --- a/nipype/interfaces/brainsuite/__init__.py +++ b/nipype/interfaces/brainsuite/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .brainsuite import ( Bse, Bfc, diff --git a/nipype/interfaces/brainsuite/brainsuite.py b/nipype/interfaces/brainsuite/brainsuite.py index 91f762f47d..846132724d 100644 --- a/nipype/interfaces/brainsuite/brainsuite.py +++ b/nipype/interfaces/brainsuite/brainsuite.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """This script provides interfaces for BrainSuite command line tools. Please see brainsuite.org for more information. @@ -269,7 +268,7 @@ def _format_arg(self, name, spec, value): }[value] ) - return super(Bfc, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): return l_outputs(self) @@ -777,12 +776,12 @@ def _format_arg(self, name, spec, value): return ( spec.argstr % { - "greater_than": "".join(("-gt %f" % threshold)), - "less_than": "".join(("-lt %f" % threshold)), - "equal_to": "".join(("-eq %f" % threshold)), + "greater_than": "".join("-gt %f" % threshold), + "less_than": "".join("-lt %f" % threshold), + "equal_to": "".join("-eq %f" % threshold), }[value] ) - return super(Dfs, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): inputs = self.inputs.get() @@ -1206,7 +1205,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % os.path.expanduser(value) if name == "dataSinkDelay": return spec.argstr % "" - return super(SVReg, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class BDPInputSpec(CommandLineInputSpec): @@ -1756,7 +1755,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % (value[0], value[1]) if name == "dataSinkDelay": return spec.argstr % "" - return super(BDP, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ThicknessPVCInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/brainsuite/tests/__init__.py b/nipype/interfaces/brainsuite/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/brainsuite/tests/__init__.py +++ b/nipype/interfaces/brainsuite/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/bru2nii.py b/nipype/interfaces/bru2nii.py index 7ed9c77651..4635bbdc9d 100644 --- a/nipype/interfaces/bru2nii.py +++ b/nipype/interfaces/bru2nii.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """The bru2nii module provides basic functions for dicom conversion """ diff --git a/nipype/interfaces/c3.py b/nipype/interfaces/c3.py index e9a3aed5eb..8112da3dd1 100644 --- a/nipype/interfaces/c3.py +++ b/nipype/interfaces/c3.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Convert3D is a command-line tool for converting 3D images between common file formats.""" import os from glob import glob @@ -197,7 +196,7 @@ class C3d(CommandLine): _cmd = "c3d" def __init__(self, **inputs): - super(C3d, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._is_4d, "is_4d") if self.inputs.is_4d: self._is_4d() @@ -211,7 +210,7 @@ def _run_interface(self, runtime): # Convert3d does not want to override file, by default # so we define a new output file self._gen_outfile() - runtime = super(C3d, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) self._cmd = cmd return runtime @@ -225,7 +224,7 @@ def _gen_outfile(self): self.inputs.out_file = fn + "_generated" + ext # if generated file will overwrite, raise error if os.path.exists(os.path.abspath(self.inputs.out_file)): - raise IOError("File already found - to overwrite, use `out_file`.") + raise OSError("File already found - to overwrite, use `out_file`.") iflogger.info("Generating `out_file`.") def _list_outputs(self): diff --git a/nipype/interfaces/camino/__init__.py b/nipype/interfaces/camino/__init__.py index e90cc6f375..766fa9c906 100644 --- a/nipype/interfaces/camino/__init__.py +++ b/nipype/interfaces/camino/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino top level namespace diff --git a/nipype/interfaces/camino/calib.py b/nipype/interfaces/camino/calib.py index 0c44b4abea..0c432c00b4 100644 --- a/nipype/interfaces/camino/calib.py +++ b/nipype/interfaces/camino/calib.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/camino/connectivity.py b/nipype/interfaces/camino/connectivity.py index 6fdc8239f7..3421afced2 100644 --- a/nipype/interfaces/camino/connectivity.py +++ b/nipype/interfaces/camino/connectivity.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/camino/convert.py b/nipype/interfaces/camino/convert.py index 0dd998c0c6..d81ed9449b 100644 --- a/nipype/interfaces/camino/convert.py +++ b/nipype/interfaces/camino/convert.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os import glob @@ -446,10 +444,10 @@ class ProcStreamlines(StdOutCommandLine): def _format_arg(self, name, spec, value): if name == "outputroot": return spec.argstr % self._get_actual_outputroot(value) - return super(ProcStreamlines, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def __init__(self, *args, **kwargs): - super(ProcStreamlines, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) self.outputroot_files = [] def _run_interface(self, runtime): @@ -459,13 +457,13 @@ def _run_interface(self, runtime): base, filename, ext = split_filename(actual_outputroot) if not os.path.exists(base): os.makedirs(base) - new_runtime = super(ProcStreamlines, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) self.outputroot_files = glob.glob( os.path.join(os.getcwd(), actual_outputroot + "*") ) return new_runtime else: - new_runtime = super(ProcStreamlines, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) return new_runtime def _get_actual_outputroot(self, outputroot): diff --git a/nipype/interfaces/camino/dti.py b/nipype/interfaces/camino/dti.py index 3e223532f6..dbf4424600 100644 --- a/nipype/interfaces/camino/dti.py +++ b/nipype/interfaces/camino/dti.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os from ...utils.filemanip import split_filename @@ -938,7 +936,7 @@ class TrackDT(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "dt" - return super(TrackDT, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackPICoInputSpec(TrackInputSpec): @@ -975,7 +973,7 @@ class TrackPICo(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "pico" - return super(TrackPICo, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBedpostxDeterInputSpec(TrackInputSpec): @@ -1025,7 +1023,7 @@ class TrackBedpostxDeter(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx_dyad" - return super(TrackBedpostxDeter, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBedpostxProbaInputSpec(TrackInputSpec): @@ -1086,7 +1084,7 @@ class TrackBedpostxProba(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bedpostx" - return super(TrackBedpostxProba, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBayesDiracInputSpec(TrackInputSpec): @@ -1187,7 +1185,7 @@ class TrackBayesDirac(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "bayesdirac" - return super(TrackBayesDirac, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBallStick(Track): @@ -1206,7 +1204,7 @@ class TrackBallStick(Track): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "ballstick" - return super(TrackBallStick, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class TrackBootstrapInputSpec(TrackInputSpec): @@ -1268,7 +1266,7 @@ class TrackBootstrap(Track): input_spec = TrackBootstrapInputSpec def __init__(self, command=None, **inputs): - return super(TrackBootstrap, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class ComputeMeanDiffusivityInputSpec(CommandLineInputSpec): diff --git a/nipype/interfaces/camino/odf.py b/nipype/interfaces/camino/odf.py index 3825cf1bb6..de50073202 100644 --- a/nipype/interfaces/camino/odf.py +++ b/nipype/interfaces/camino/odf.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os from ...utils.filemanip import split_filename diff --git a/nipype/interfaces/camino/tests/__init__.py b/nipype/interfaces/camino/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/camino/tests/__init__.py +++ b/nipype/interfaces/camino/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/camino/utils.py b/nipype/interfaces/camino/utils.py index 201e4e05d0..cc1416575c 100644 --- a/nipype/interfaces/camino/utils.py +++ b/nipype/interfaces/camino/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from ..base import ( diff --git a/nipype/interfaces/camino2trackvis/__init__.py b/nipype/interfaces/camino2trackvis/__init__.py index b132a20f0c..ce31d60610 100644 --- a/nipype/interfaces/camino2trackvis/__init__.py +++ b/nipype/interfaces/camino2trackvis/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Camino-Trackvis allows interoperability between Camino and TrackVis.""" diff --git a/nipype/interfaces/camino2trackvis/convert.py b/nipype/interfaces/camino2trackvis/convert.py index a4db0b59ef..8d1db28b95 100644 --- a/nipype/interfaces/camino2trackvis/convert.py +++ b/nipype/interfaces/camino2trackvis/convert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Provides interfaces to various commands provided by Camino-Trackvis.""" import os diff --git a/nipype/interfaces/camino2trackvis/tests/__init__.py b/nipype/interfaces/camino2trackvis/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/camino2trackvis/tests/__init__.py +++ b/nipype/interfaces/camino2trackvis/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/cat12/preprocess.py b/nipype/interfaces/cat12/preprocess.py index 1d962bc08b..1f9189b376 100644 --- a/nipype/interfaces/cat12/preprocess.py +++ b/nipype/interfaces/cat12/preprocess.py @@ -525,7 +525,7 @@ def _format_arg(self, opt, spec, val): elif opt in ["tpm", "shooting_tpm"]: return Cell2Str(val) - return super(CAT12Segment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -726,7 +726,7 @@ def _format_arg(self, opt, spec, val): if opt == "spm_type": type_map = {"same": 0, "uint8": 2, "uint16": 512, "float32": 16} val = type_map[val] - return super(CAT12SANLMDenoising, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/interfaces/cat12/surface.py b/nipype/interfaces/cat12/surface.py index 5e74f8d015..852150d539 100644 --- a/nipype/interfaces/cat12/surface.py +++ b/nipype/interfaces/cat12/surface.py @@ -163,9 +163,7 @@ def _list_outputs(self): def _format_arg(self, opt, spec, val): if opt == "left_central_surfaces": return Cell2Str(val) - return super(ExtractAdditionalSurfaceParameters, self)._format_arg( - opt, spec, val - ) + return super()._format_arg(opt, spec, val) class ExtractROIBasedSurfaceMeasuresInputSpec(SPMCommandInputSpec): @@ -257,7 +255,7 @@ def _format_arg(self, opt, spec, val): elif opt == "lh_roi_atlas": return Cell2Str(val) - return super(ExtractROIBasedSurfaceMeasures, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() diff --git a/nipype/interfaces/cmtk/__init__.py b/nipype/interfaces/cmtk/__init__.py index fc45bc986e..426130e1a5 100644 --- a/nipype/interfaces/cmtk/__init__.py +++ b/nipype/interfaces/cmtk/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """CMP implements a full processing pipeline for creating connectomes with dMRI data.""" from .cmtk import ROIGen, CreateMatrix, CreateNodes from .nx import NetworkXMetrics, AverageNetworks diff --git a/nipype/interfaces/cmtk/base.py b/nipype/interfaces/cmtk/base.py index 17d3070504..d0c226dc49 100644 --- a/nipype/interfaces/cmtk/base.py +++ b/nipype/interfaces/cmtk/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for cmtk """ diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index bc5f2de2a5..e49c7c8547 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import pickle diff --git a/nipype/interfaces/cmtk/convert.py b/nipype/interfaces/cmtk/convert.py index a45daddcd6..72d105b715 100644 --- a/nipype/interfaces/cmtk/convert.py +++ b/nipype/interfaces/cmtk/convert.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os import os.path as op import datetime @@ -134,7 +132,7 @@ def _run_interface(self, runtime): for ntwk in self.inputs.graphml_networks: # There must be a better way to deal with the unique name problem # (i.e. tracks and networks can't use the same name, and previously we were pulling them both from the input files) - ntwk_name = "Network {cnt}".format(cnt=count) + ntwk_name = f"Network {count}" a.add_connectome_network_from_graphml(ntwk_name, ntwk) count += 1 diff --git a/nipype/interfaces/cmtk/nbs.py b/nipype/interfaces/cmtk/nbs.py index a2bd42abee..4b4be74ebb 100644 --- a/nipype/interfaces/cmtk/nbs.py +++ b/nipype/interfaces/cmtk/nbs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -42,9 +41,7 @@ def ntwks_to_matrices(in_files, edge_key): edge_key ] # Setting the edge requested edge value as weight value except: - raise KeyError( - "the graph edges do not have {} attribute".format(edge_key) - ) + raise KeyError(f"the graph edges do not have {edge_key} attribute") matrix[:, :, idx] = nx.to_numpy_array(graph) # Retrieve the matrix return matrix diff --git a/nipype/interfaces/cmtk/nx.py b/nipype/interfaces/cmtk/nx.py index 15449515f4..bbf082ce03 100644 --- a/nipype/interfaces/cmtk/nx.py +++ b/nipype/interfaces/cmtk/nx.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op diff --git a/nipype/interfaces/cmtk/parcellation.py b/nipype/interfaces/cmtk/parcellation.py index ae5f3223db..6195715065 100644 --- a/nipype/interfaces/cmtk/parcellation.py +++ b/nipype/interfaces/cmtk/parcellation.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -264,7 +263,7 @@ def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): log = cmp_config.get_logger() for out in comp: - mris_cmd = 'mris_ca_label %s %s "%s/surf/%s.sphere.reg" "%s" "%s" ' % ( + mris_cmd = 'mris_ca_label {} {} "{}/surf/{}.sphere.reg" "{}" "{}" '.format( subject_id, out[0], op.join(subjects_dir, subject_id), @@ -277,11 +276,13 @@ def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): annot = '--annotation "%s"' % out[4] - mri_an_cmd = 'mri_annotation2label --subject %s --hemi %s --outdir "%s" %s' % ( - subject_id, - out[0], - op.join(output_dir, out[3]), - annot, + mri_an_cmd = ( + 'mri_annotation2label --subject {} --hemi {} --outdir "{}" {}'.format( + subject_id, + out[0], + op.join(output_dir, out[3]), + annot, + ) ) iflogger.info(mri_an_cmd) runCmd(mri_an_cmd, log) @@ -316,12 +317,12 @@ def create_annot_label(subject_id, subjects_dir, fs_dir, parcellation_name): ) runCmd(mri_cmd, log) runCmd("mris_volmask %s" % subject_id, log) - mri_cmd = 'mri_convert -i "%s/mri/ribbon.mgz" -o "%s/mri/ribbon.nii.gz"' % ( + mri_cmd = 'mri_convert -i "{}/mri/ribbon.mgz" -o "{}/mri/ribbon.nii.gz"'.format( op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id), ) runCmd(mri_cmd, log) - mri_cmd = 'mri_convert -i "%s/mri/aseg.mgz" -o "%s/mri/aseg.nii.gz"' % ( + mri_cmd = 'mri_convert -i "{}/mri/aseg.mgz" -o "{}/mri/aseg.nii.gz"'.format( op.join(subjects_dir, subject_id), op.join(subjects_dir, subject_id), ) @@ -407,15 +408,17 @@ def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name, dilation): labelpath = op.join(output_dir, parval["fs_label_subdir_name"] % hemi) # construct .label file name - fname = "%s.%s.label" % (hemi, brv["dn_fsname"]) + fname = "{}.{}.label".format(hemi, brv["dn_fsname"]) # execute fs mri_label2vol to generate volume roi from the label file # store it in temporary file to be overwritten for each region - mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % ( - op.join(labelpath, fname), - op.join(fs_dir, "mri", "orig.mgz"), - op.join(output_dir, "tmp.nii.gz"), + mri_cmd = ( + 'mri_label2vol --label "{}" --temp "{}" --o "{}" --identity'.format( + op.join(labelpath, fname), + op.join(fs_dir, "mri", "orig.mgz"), + op.join(output_dir, "tmp.nii.gz"), + ) ) runCmd(mri_cmd, log) @@ -656,7 +659,7 @@ def crop_and_move_datasets( raise Exception("File %s does not exist." % d[0]) # reslice to original volume because the roi creation with freesurfer # changed to 256x256x256 resolution - mri_cmd = 'mri_convert -rl "%s" -rt nearest "%s" -nc "%s"' % (orig, d[0], d[1]) + mri_cmd = f'mri_convert -rl "{orig}" -rt nearest "{d[0]}" -nc "{d[1]}"' runCmd(mri_cmd, log) diff --git a/nipype/interfaces/cmtk/tests/__init__.py b/nipype/interfaces/cmtk/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/cmtk/tests/__init__.py +++ b/nipype/interfaces/cmtk/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dcm2nii.py b/nipype/interfaces/dcm2nii.py index 6b6de041bf..07eb1bb4db 100644 --- a/nipype/interfaces/dcm2nii.py +++ b/nipype/interfaces/dcm2nii.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """dcm2nii converts images from the proprietary scanner DICOM format to NIfTI.""" import os import re @@ -150,11 +149,11 @@ def _format_arg(self, opt, spec, val): val = True if opt == "source_names": return spec.argstr % val[0] - return super(Dcm2nii, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): self._config_created = False - new_runtime = super(Dcm2nii, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) ( self.output_files, self.reoriented_files, @@ -202,7 +201,7 @@ def _parse_stdout(self, stdout): # just above for l in (bvecs, bvals): l[-1] = os.path.join( - os.path.dirname(l[-1]), "x%s" % (os.path.basename(l[-1]),) + os.path.dirname(l[-1]), f"x{os.path.basename(l[-1])}" ) elif re.search(".*->(.*)", line): val = re.search(".*->(.*)", line) @@ -443,13 +442,11 @@ def _format_arg(self, opt, spec, val): val = True if opt == "source_names": return spec.argstr % (os.path.dirname(val[0]) or ".") - return super(Dcm2niix, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): # may use return code 1 despite conversion - runtime = super(Dcm2niix, self)._run_interface( - runtime, correct_return_codes=(0, 1) - ) + runtime = super()._run_interface(runtime, correct_return_codes=(0, 1)) self._parse_files(self._parse_stdout(runtime.stdout)) return runtime diff --git a/nipype/interfaces/dcmstack.py b/nipype/interfaces/dcmstack.py index 49acf9f509..6ddd252749 100644 --- a/nipype/interfaces/dcmstack.py +++ b/nipype/interfaces/dcmstack.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """dcmstack allows series of DICOM images to be stacked into multi-dimensional arrays.""" import os @@ -250,7 +249,7 @@ def _make_name_map(self): def _outputs(self): self._make_name_map() - outputs = super(LookupMeta, self)._outputs() + outputs = super()._outputs() undefined_traits = {} for out_name in list(self._meta_keys.values()): outputs.add_trait(out_name, traits.Any) diff --git a/nipype/interfaces/diffusion_toolkit/__init__.py b/nipype/interfaces/diffusion_toolkit/__init__.py index c3927800a3..ef8dce7e4e 100644 --- a/nipype/interfaces/diffusion_toolkit/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Diffusion Toolkit performs data reconstruction and fiber tracking on diffusion MR images.""" from .base import Info from .postproc import SplineFilter, TrackMerge diff --git a/nipype/interfaces/diffusion_toolkit/base.py b/nipype/interfaces/diffusion_toolkit/base.py index 2068f18988..051d80e0f1 100644 --- a/nipype/interfaces/diffusion_toolkit/base.py +++ b/nipype/interfaces/diffusion_toolkit/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtk module provides basic functions for interfacing with @@ -19,7 +18,7 @@ __docformat__ = "restructuredtext" -class Info(object): +class Info: """Handle dtk output type and version information. Examples diff --git a/nipype/interfaces/diffusion_toolkit/dti.py b/nipype/interfaces/diffusion_toolkit/dti.py index c42db76d5b..37a76ab6f0 100644 --- a/nipype/interfaces/diffusion_toolkit/dti.py +++ b/nipype/interfaces/diffusion_toolkit/dti.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit @@ -107,15 +106,15 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): with open(_gradient_matrix_file, "w") as gradient_matrix_f: for i in range(len(bvals)): gradient_matrix_f.write( - "%s, %s, %s, %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i], bvals[i]) + f"{bvecs_x[i]}, {bvecs_y[i]}, {bvecs_z[i]}, {bvals[i]}\n" ) return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) - return super(DTIRecon, self)._format_arg("bvecs", spec, new_val) - return super(DTIRecon, self)._format_arg(name, spec, value) + return super()._format_arg("bvecs", spec, new_val) + return super()._format_arg(name, spec, value) def _list_outputs(self): out_prefix = self.inputs.out_prefix @@ -277,7 +276,7 @@ def _run_interface(self, runtime): copy=False, ) - return super(DTITracker, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/diffusion_toolkit/odf.py b/nipype/interfaces/diffusion_toolkit/odf.py index e1819912b6..3b30cc7c69 100644 --- a/nipype/interfaces/diffusion_toolkit/odf.py +++ b/nipype/interfaces/diffusion_toolkit/odf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit @@ -109,15 +108,15 @@ def _create_gradient_matrix(self, bvecs_file, bvals_file): for i in range(len(bvals)): if int(bvals[i]) == 0: continue - gradient_matrix_f.write("%s %s %s\n" % (bvecs_x[i], bvecs_y[i], bvecs_z[i])) + gradient_matrix_f.write(f"{bvecs_x[i]} {bvecs_y[i]} {bvecs_z[i]}\n") gradient_matrix_f.close() return _gradient_matrix_file def _format_arg(self, name, spec, value): if name == "bvecs": new_val = self._create_gradient_matrix(self.inputs.bvecs, self.inputs.bvals) - return super(HARDIMat, self)._format_arg("bvecs", spec, new_val) - return super(HARDIMat, self)._format_arg(name, spec, value) + return super()._format_arg("bvecs", spec, new_val) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -388,7 +387,7 @@ def _run_interface(self, runtime): copy=False, ) - return super(ODFTracker, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/diffusion_toolkit/postproc.py b/nipype/interfaces/diffusion_toolkit/postproc.py index 534b747a0d..5190843875 100644 --- a/nipype/interfaces/diffusion_toolkit/postproc.py +++ b/nipype/interfaces/diffusion_toolkit/postproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by diffusion toolkit diff --git a/nipype/interfaces/diffusion_toolkit/tests/__init__.py b/nipype/interfaces/diffusion_toolkit/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/diffusion_toolkit/tests/__init__.py +++ b/nipype/interfaces/diffusion_toolkit/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dipy/__init__.py b/nipype/interfaces/dipy/__init__.py index ec840871ba..d12c13844f 100644 --- a/nipype/interfaces/dipy/__init__.py +++ b/nipype/interfaces/dipy/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """DIPY is a computational neuroimaging tool for diffusion MRI.""" from .tracks import StreamlineTractography, TrackDensityMap from .tensors import TensorMode, DTI diff --git a/nipype/interfaces/dipy/anisotropic_power.py b/nipype/interfaces/dipy/anisotropic_power.py index 7ad82fb678..c222ea8f6a 100644 --- a/nipype/interfaces/dipy/anisotropic_power.py +++ b/nipype/interfaces/dipy/anisotropic_power.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import numpy as np import nibabel as nb diff --git a/nipype/interfaces/dipy/base.py b/nipype/interfaces/dipy/base.py index 161ed33227..064b2fc744 100644 --- a/nipype/interfaces/dipy/base.py +++ b/nipype/interfaces/dipy/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Base interfaces for dipy """ import os.path as op @@ -134,10 +133,9 @@ def convert_to_traits_type(dipy_type, is_file=False): return traits.Complex, is_mandatory else: msg = ( - "Error during convert_to_traits_type({0}).".format(dipy_type) - + "Unknown DIPY type." + f"Error during convert_to_traits_type({dipy_type})." + "Unknown DIPY type." ) - raise IOError(msg) + raise OSError(msg) def create_interface_specs(class_name, params=None, BaseClass=TraitedSpec): @@ -218,13 +216,13 @@ def dipy_to_nipype_interface(cls_name, dipy_flow, BaseClass=DipyBaseInterface): input_parameters = parser.positional_parameters + optional_params input_spec = create_interface_specs( - "{}InputSpec".format(cls_name), + f"{cls_name}InputSpec", input_parameters, BaseClass=BaseInterfaceInputSpec, ) output_spec = create_interface_specs( - "{}OutputSpec".format(cls_name), output_parameters, BaseClass=TraitedSpec + f"{cls_name}OutputSpec", output_parameters, BaseClass=TraitedSpec ) def _run_interface(self, runtime): diff --git a/nipype/interfaces/dipy/preprocess.py b/nipype/interfaces/dipy/preprocess.py index 867ba79d81..12b25323cd 100644 --- a/nipype/interfaces/dipy/preprocess.py +++ b/nipype/interfaces/dipy/preprocess.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os.path as op import nibabel as nb import numpy as np @@ -103,7 +101,7 @@ def _gen_outfilename(self): if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath("%s_reslice%s" % (fname, fext)) + return op.abspath(f"{fname}_reslice{fext}") class DenoiseInputSpec(TraitedSpec): @@ -204,7 +202,7 @@ def _gen_outfilename(self): if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - return op.abspath("%s_denoise%s" % (fname, fext)) + return op.abspath(f"{fname}_denoise{fext}") def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): @@ -218,7 +216,7 @@ def resample_proxy(in_file, order=3, new_zooms=None, out_file=None): if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath("./%s_reslice%s" % (fname, fext)) + out_file = op.abspath(f"./{fname}_reslice{fext}") img = nb.load(in_file) hdr = img.header.copy() @@ -258,7 +256,7 @@ def nlmeans_proxy(in_file, settings, snr=None, smask=None, nmask=None, out_file= if fext == ".gz": fname, fext2 = op.splitext(fname) fext = fext2 + fext - out_file = op.abspath("./%s_denoise%s" % (fname, fext)) + out_file = op.abspath(f"./{fname}_denoise{fext}") img = nb.load(in_file) hdr = img.header diff --git a/nipype/interfaces/dipy/reconstruction.py b/nipype/interfaces/dipy/reconstruction.py index 14a2dff462..6e36e695f2 100644 --- a/nipype/interfaces/dipy/reconstruction.py +++ b/nipype/interfaces/dipy/reconstruction.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Interfaces to the reconstruction algorithms in dipy diff --git a/nipype/interfaces/dipy/setup.py b/nipype/interfaces/dipy/setup.py index 082d88f841..38a7622894 100644 --- a/nipype/interfaces/dipy/setup.py +++ b/nipype/interfaces/dipy/setup.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index e1867342bb..8ed13a52a8 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from multiprocessing import Pool, cpu_count import os.path as op @@ -134,7 +133,7 @@ def _run_interface(self, runtime): nsticks = len(self.inputs.in_dirs) if len(self.inputs.in_frac) != nsticks: raise RuntimeError( - ("Number of sticks and their volume fractions" " must match.") + "Number of sticks and their volume fractions" " must match." ) # Volume fractions of isotropic compartments @@ -256,9 +255,7 @@ def _run_interface(self, runtime): ) result = np.array(pool.map(_compute_voxel, args)) if np.shape(result)[1] != ndirs: - raise RuntimeError( - ("Computed directions do not match number" "of b-values.") - ) + raise RuntimeError("Computed directions do not match number" "of b-values.") signal = np.zeros((shape[0], shape[1], shape[2], ndirs)) signal[msk > 0] = result diff --git a/nipype/interfaces/dipy/tensors.py b/nipype/interfaces/dipy/tensors.py index f2b197f372..f8be2b5c70 100644 --- a/nipype/interfaces/dipy/tensors.py +++ b/nipype/interfaces/dipy/tensors.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import numpy as np import nibabel as nb @@ -77,7 +75,7 @@ def _list_outputs(self): outputs["out_file"] = self._gen_filename("dti") for metric in ["fa", "md", "rd", "ad", "color_fa"]: - outputs["{}_file".format(metric)] = self._gen_filename(metric) + outputs[f"{metric}_file"] = self._gen_filename(metric) return outputs diff --git a/nipype/interfaces/dipy/tests/__init__.py b/nipype/interfaces/dipy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/dipy/tests/__init__.py +++ b/nipype/interfaces/dipy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/dipy/tracks.py b/nipype/interfaces/dipy/tracks.py index e97250dd26..b1a912e63b 100644 --- a/nipype/interfaces/dipy/tracks.py +++ b/nipype/interfaces/dipy/tracks.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os.path as op import numpy as np import nibabel as nb @@ -226,7 +224,7 @@ def _run_interface(self, runtime): if not (isdefined(self.inputs.in_model) or isdefined(self.inputs.in_peaks)): raise RuntimeError( - ("At least one of in_model or in_peaks should " "be supplied") + "At least one of in_model or in_peaks should " "be supplied" ) img = nb.load(self.inputs.in_file) diff --git a/nipype/interfaces/dtitk/base.py b/nipype/interfaces/dtitk/base.py index efbd79ad98..2b6a2af6a4 100644 --- a/nipype/interfaces/dtitk/base.py +++ b/nipype/interfaces/dtitk/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The dtitk module provides classes for interfacing with the `DTITK @@ -36,7 +35,7 @@ LOGGER = logging.getLogger("nipype.interface") -class DTITKRenameMixin(object): +class DTITKRenameMixin: def __init__(self, *args, **kwargs): classes = [cls.__name__ for cls in self.__class__.mro()] dep_name = classes[0] @@ -50,7 +49,7 @@ def __init__(self, *args, **kwargs): "".format(dep_name, new_name), DeprecationWarning, ) - super(DTITKRenameMixin, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class CommandLineDtitk(CommandLine): diff --git a/nipype/interfaces/dtitk/registration.py b/nipype/interfaces/dtitk/registration.py index 4a50d5b1ad..c0da9ebc8d 100644 --- a/nipype/interfaces/dtitk/registration.py +++ b/nipype/interfaces/dtitk/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK registration interfaces @@ -118,7 +117,7 @@ class Rigid(CommandLineDtitk): return super(Rigid, self)._format_arg(name, spec, value)""" def _run_interface(self, runtime): - runtime = super(Rigid, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if """.aff doesn't exist or can't be opened""" in runtime.stderr: self.raise_exception(runtime) return runtime @@ -445,7 +444,7 @@ class AffScalarVol(CommandLineDtitk): def _format_arg(self, name, spec, value): if name == "interpolation": value = {"trilinear": 0, "NN": 1}[value] - return super(AffScalarVol, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class DiffeoSymTensor3DVolInputSpec(CommandLineInputSpec): @@ -530,7 +529,7 @@ class DiffeoSymTensor3DVol(CommandLineDtitk): def _format_arg(self, name, spec, value): if name == "resampling_type": value = {"forward": 0, "backward": 1}[value] - return super(DiffeoSymTensor3DVol, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class DiffeoScalarVolInputSpec(CommandLineInputSpec): @@ -607,7 +606,7 @@ def _format_arg(self, name, spec, value): value = {"forward": 0, "backward": 1}[value] elif name == "interpolation": value = {"trilinear": 0, "NN": 1}[value] - return super(DiffeoScalarVol, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class RigidTask(DTITKRenameMixin, Rigid): diff --git a/nipype/interfaces/dtitk/tests/__init__.py b/nipype/interfaces/dtitk/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/interfaces/dtitk/tests/__init__.py +++ b/nipype/interfaces/dtitk/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/dtitk/utils.py b/nipype/interfaces/dtitk/utils.py index da030439de..0a81a8e8f4 100644 --- a/nipype/interfaces/dtitk/utils.py +++ b/nipype/interfaces/dtitk/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """DTITK utility interfaces diff --git a/nipype/interfaces/dynamic_slicer.py b/nipype/interfaces/dynamic_slicer.py index 6dc6a7e154..1fede10507 100644 --- a/nipype/interfaces/dynamic_slicer.py +++ b/nipype/interfaces/dynamic_slicer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Experimental Slicer wrapper - Work in progress.""" @@ -39,7 +38,7 @@ def _grab_xml(self, module): raise Exception(cmd.cmdline + " failed:\n%s" % ret.runtime.stderr) def _outputs(self): - base = super(SlicerCommandLine, self)._outputs() + base = super()._outputs() undefined_output_traits = {} for key in [ node.getElementsByTagName("name")[0].firstChild.nodeValue @@ -53,9 +52,7 @@ def _outputs(self): def __init__(self, module, **inputs): warnings.warn("slicer is Not fully implemented", RuntimeWarning) - super(SlicerCommandLine, self).__init__( - command="Slicer3 --launch %s " % module, name=module, **inputs - ) + super().__init__(command="Slicer3 --launch %s " % module, name=module, **inputs) dom = self._grab_xml(module) self._outputs_filenames = {} @@ -193,7 +190,7 @@ def _format_arg(self, name, spec, value): else: fname = value return spec.argstr % fname - return super(SlicerCommandLine, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) # test = SlicerCommandLine(module="BRAINSFit") diff --git a/nipype/interfaces/elastix/__init__.py b/nipype/interfaces/elastix/__init__.py index 8f60ed8ff1..1f1116af69 100644 --- a/nipype/interfaces/elastix/__init__.py +++ b/nipype/interfaces/elastix/__init__.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """elastix is a toolbox for rigid and nonrigid registration of images.""" diff --git a/nipype/interfaces/elastix/base.py b/nipype/interfaces/elastix/base.py index 6e26937793..61fe288ff6 100644 --- a/nipype/interfaces/elastix/base.py +++ b/nipype/interfaces/elastix/base.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The :py:mod:`nipype.interfaces.elastix` provides the interface to diff --git a/nipype/interfaces/elastix/registration.py b/nipype/interfaces/elastix/registration.py index 9c6074014b..44076e4118 100644 --- a/nipype/interfaces/elastix/registration.py +++ b/nipype/interfaces/elastix/registration.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -83,7 +81,7 @@ def _list_outputs(self): for i, params in enumerate(self.inputs.parameters): config = {} - with open(params, "r") as f: + with open(params) as f: for line in f.readlines(): line = line.strip() if not line.startswith("//") and line: diff --git a/nipype/interfaces/elastix/tests/__init__.py b/nipype/interfaces/elastix/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/elastix/tests/__init__.py +++ b/nipype/interfaces/elastix/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/elastix/utils.py b/nipype/interfaces/elastix/utils.py index 0cddcaba5d..fdd1889e3d 100644 --- a/nipype/interfaces/elastix/utils.py +++ b/nipype/interfaces/elastix/utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# coding: utf-8 # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -96,7 +94,7 @@ def _run_interface(self, runtime): contents = "" - with open(self.inputs.transform_file, "r") as f: + with open(self.inputs.transform_file) as f: contents = f.read() if isdefined(self.inputs.output_type): diff --git a/nipype/interfaces/freesurfer/__init__.py b/nipype/interfaces/freesurfer/__init__.py index 4efa90039a..b6863c9ded 100644 --- a/nipype/interfaces/freesurfer/__init__.py +++ b/nipype/interfaces/freesurfer/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """FreeSurfer is an open source software suite for processing and analyzing brain MRI images.""" diff --git a/nipype/interfaces/freesurfer/base.py b/nipype/interfaces/freesurfer/base.py index 6e9bb5942d..0baedd4b82 100644 --- a/nipype/interfaces/freesurfer/base.py +++ b/nipype/interfaces/freesurfer/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with @@ -127,7 +126,7 @@ class FSCommand(CommandLine): _subjects_dir = None def __init__(self, **inputs): - super(FSCommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._subjects_dir_update, "subjects_dir") if not self._subjects_dir: self._subjects_dir = Info.subjectsdir() @@ -147,7 +146,7 @@ def run(self, **inputs): if "subjects_dir" in inputs: self.inputs.subjects_dir = inputs["subjects_dir"] self._subjects_dir_update() - return super(FSCommand, self).run(**inputs) + return super().run(**inputs) def _gen_fname(self, basename, fname=None, cwd=None, suffix="_fs", use_ext=True): """Define a generic mapping for a single outfile @@ -242,7 +241,7 @@ class FSCommandOpenMP(FSCommand): _num_threads = None def __init__(self, **inputs): - super(FSCommandOpenMP, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not self._num_threads: self._num_threads = os.environ.get("OMP_NUM_THREADS", None) @@ -262,7 +261,7 @@ def run(self, **inputs): if "num_threads" in inputs: self.inputs.num_threads = inputs["num_threads"] self._num_threads_update() - return super(FSCommandOpenMP, self).run(**inputs) + return super().run(**inputs) def no_freesurfer(): diff --git a/nipype/interfaces/freesurfer/longitudinal.py b/nipype/interfaces/freesurfer/longitudinal.py index 086d0a96c8..8cf388f9e4 100644 --- a/nipype/interfaces/freesurfer/longitudinal.py +++ b/nipype/interfaces/freesurfer/longitudinal.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer @@ -157,7 +156,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % {"mean": 0, "median": 1}[value] if name in ("transform_outputs", "scaled_intensity_outputs"): value = self._list_outputs()[name] - return super(RobustTemplate, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -245,7 +244,7 @@ def _format_arg(self, name, spec, value): if name in ("in_segmentations", "in_segmentations_noCC", "in_norms"): # return enumeration value return spec.argstr % os.path.basename(value[0]) - return super(FuseSegmentations, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/freesurfer/model.py b/nipype/interfaces/freesurfer/model.py index 5c06a09238..6fea9d0ec1 100644 --- a/nipype/interfaces/freesurfer/model.py +++ b/nipype/interfaces/freesurfer/model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The freesurfer module provides basic functions for interfacing with @@ -137,7 +136,7 @@ def _list_outputs(self): outputs["out_file"] = outfile if not isdefined(outfile): outputs["out_file"] = os.path.join( - os.getcwd(), "concat_%s_%s.mgz" % (self.inputs.hemi, self.inputs.target) + os.getcwd(), f"concat_{self.inputs.hemi}_{self.inputs.target}.mgz" ) return outputs @@ -231,7 +230,7 @@ def run(self, **inputs): if isdefined(self.inputs.surf_measure_file): copy2subjdir(self, self.inputs.surf_measure_file, folder) - return super(MRISPreprocReconAll, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): # mris_preproc looks for these files in the surf dir @@ -241,7 +240,7 @@ def _format_arg(self, name, spec, value): if name == "surf_measure_file": basename = os.path.basename(value) return spec.argstr % basename.lstrip("rh.").lstrip("lh.") - return super(MRISPreprocReconAll, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class GLMFitInputSpec(FSTraitedSpec): @@ -491,7 +490,7 @@ def _format_arg(self, name, spec, value): if name == "surf": _si = self.inputs return spec.argstr % (_si.subject_id, _si.hemi, _si.surf_geo) - return super(GLMFit, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -565,7 +564,7 @@ def _gen_filename(self, name): class OneSampleTTest(GLMFit): def __init__(self, **kwargs): - super(OneSampleTTest, self).__init__(**kwargs) + super().__init__(**kwargs) self.inputs.one_sample = True @@ -697,7 +696,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % fname if name == "out_type": return "" - return super(Binarize, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name == "binary_file": @@ -1066,7 +1065,7 @@ def _format_arg(self, name, spec, value): ".mgz", "" ) return spec.argstr % (value, intensity_name) - return super(SegStats, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name == "summary_file": @@ -1158,7 +1157,7 @@ class SegStatsReconAll(SegStats): def _format_arg(self, name, spec, value): if name == "brainmask_file": return spec.argstr % os.path.basename(value) - return super(SegStatsReconAll, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def run(self, **inputs): if self.inputs.copy_inputs: @@ -1182,7 +1181,7 @@ def run(self, **inputs): ) copy2subjdir(self, self.inputs.in_intensity, "mri") copy2subjdir(self, self.inputs.brainmask_file, "mri") - return super(SegStatsReconAll, self).run(**inputs) + return super().run(**inputs) class Label2VolInputSpec(FSTraitedSpec): @@ -1411,7 +1410,7 @@ def _format_arg(self, name, spec, value): else: return "" # TODO: Fix bug when boolean values are set explicitly to false - return super(MS_LDA, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): pass @@ -1519,22 +1518,20 @@ def run(self, **inputs): if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir( - self, self.inputs.sphere_reg, "surf", "{0}.sphere.reg".format(hemi) - ) - copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) + copy2subjdir(self, self.inputs.sphere_reg, "surf", f"{hemi}.sphere.reg") + copy2subjdir(self, self.inputs.white, "surf", f"{hemi}.white") copy2subjdir( self, self.inputs.source_sphere_reg, "surf", - "{0}.sphere.reg".format(hemi), + f"{hemi}.sphere.reg", subject_id=self.inputs.source_subject, ) copy2subjdir( self, self.inputs.source_white, "surf", - "{0}.white".format(hemi), + f"{hemi}.white", subject_id=self.inputs.source_subject, ) @@ -1545,7 +1542,7 @@ def run(self, **inputs): if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(Label2Label, self).run(**inputs) + return super().run(**inputs) class Label2AnnotInputSpec(FSTraitedSpec): @@ -1618,7 +1615,7 @@ def run(self, **inputs): self, self.inputs.orig, folder="surf", - basename="{0}.orig".format(self.inputs.hemisphere), + basename=f"{self.inputs.hemisphere}.orig", ) # label dir must exist in order for output file to be written label_dir = os.path.join( @@ -1626,7 +1623,7 @@ def run(self, **inputs): ) if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(Label2Annot, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self._outputs().get() @@ -1716,7 +1713,7 @@ def _format_arg(self, name, spec, value): for item in ["lh.", "rh."]: surf = surf.replace(item, "") return spec.argstr % surf - return super(SphericalAverage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name == "in_average": diff --git a/nipype/interfaces/freesurfer/petsurfer.py b/nipype/interfaces/freesurfer/petsurfer.py index 272cba6e21..7536fac1a2 100644 --- a/nipype/interfaces/freesurfer/petsurfer.py +++ b/nipype/interfaces/freesurfer/petsurfer.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands for running PET analyses provided by FreeSurfer @@ -517,7 +516,7 @@ def _format_arg(self, name, spec, val): ) if name == 'mg': return spec.argstr % (val[0], ' '.join(val[1])) - return super(GTMPVC, self)._format_arg(name, spec, val) + return super()._format_arg(name, spec, val) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index 6970a70ce5..9505f5cd67 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various commands provided by FreeSurfer @@ -546,7 +545,7 @@ def _format_arg(self, name, spec, value): if name in ["in_type", "out_type", "template_type"]: if value == "niigz": return spec.argstr % "nii" - return super(MRIConvert, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _get_outfilename(self): outfile = self.inputs.out_file @@ -728,19 +727,19 @@ def cmdline(self): outdir = self._get_outdir() cmd = [] if not os.path.exists(outdir): - cmdstr = "%s -c \"import os; os.makedirs('%s')\"" % ( + cmdstr = "{} -c \"import os; os.makedirs('{}')\"".format( op.basename(sys.executable), outdir, ) cmd.extend([cmdstr]) infofile = os.path.join(outdir, "shortinfo.txt") if not os.path.exists(infofile): - cmdstr = "dcmdir-info-mgh %s > %s" % (self.inputs.dicom_dir, infofile) + cmdstr = f"dcmdir-info-mgh {self.inputs.dicom_dir} > {infofile}" cmd.extend([cmdstr]) files = self._get_filelist(outdir) for infile, outfile in files: if not os.path.exists(outfile): - single_cmd = "%s%s %s %s" % ( + single_cmd = "{}{} {} {}".format( self._cmd_prefix, self.cmd, infile, @@ -1572,11 +1571,11 @@ def _format_arg(self, name, trait_spec, value): ) ): return None - return super(ReconAll, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) @property def cmdline(self): - cmd = super(ReconAll, self).cmdline + cmd = super().cmdline # Adds '-expert' flag if expert flags are passed # Mutually exclusive with 'expert' input parameter @@ -1619,8 +1618,8 @@ def cmdline(self): no_run = True flags = [] for step, outfiles, infiles in steps: - flag = "-{}".format(step) - noflag = "-no{}".format(step) + flag = f"-{step}" + noflag = f"-no{step}" if noflag in cmd: continue elif flag in cmd: @@ -1652,7 +1651,7 @@ def _prep_expert_file(self): for binary in self._binaries: args = getattr(self.inputs, binary) if isdefined(args): - lines.append("{} {}\n".format(binary, args)) + lines.append(f"{binary} {args}\n") if lines == []: return "" @@ -1664,7 +1663,7 @@ def _prep_expert_file(self): expert_fname = os.path.abspath("expert.opts") with open(expert_fname, "w") as fobj: fobj.write(contents) - return " -expert {}".format(expert_fname) + return f" -expert {expert_fname}" def _get_expert_file(self): # Read pre-existing options file, if it exists @@ -1678,7 +1677,7 @@ def _get_expert_file(self): ) if not os.path.exists(xopts_file): return "" - with open(xopts_file, "r") as fobj: + with open(xopts_file) as fobj: return fobj.read() @property @@ -1890,7 +1889,7 @@ def _format_arg(self, name, spec, value): "init_cost_file", ) and isinstance(value, bool): value = self._list_outputs()[name] - return super(BBRegister, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name == "out_reg_file": @@ -2353,7 +2352,7 @@ def _format_arg(self, name, spec, value): ) if name in options and isinstance(value, bool): value = self._list_outputs()[name] - return super(RobustRegister, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -2441,7 +2440,7 @@ def _format_arg(self, name, spec, value): cmd = " ".join((cmd, "-at %s" % self.inputs.xfm_list[i])) cmd = " ".join((cmd, file)) return cmd - return super(FitMSParams, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -2894,7 +2893,7 @@ class CARegister(FSCommandOpenMP): def _format_arg(self, name, spec, value): if name == "l_files" and len(value) == 1: value.append("identity.nofile") - return super(CARegister, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_fname(self, name): if name == "out_file": @@ -3115,19 +3114,19 @@ def run(self, **inputs): self, self.inputs.smoothwm, folder="surf", - basename="{0}.smoothwm".format(self.inputs.hemisphere), + basename=f"{self.inputs.hemisphere}.smoothwm", ) copy2subjdir( self, self.inputs.curv, folder="surf", - basename="{0}.curv".format(self.inputs.hemisphere), + basename=f"{self.inputs.hemisphere}.curv", ) copy2subjdir( self, self.inputs.sulc, folder="surf", - basename="{0}.sulc".format(self.inputs.hemisphere), + basename=f"{self.inputs.hemisphere}.sulc", ) # The label directory must exist in order for an output to be written @@ -3137,7 +3136,7 @@ def run(self, **inputs): if not os.path.isdir(label_dir): os.makedirs(label_dir) - return super(MRIsCALabel, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self.output_spec().get() @@ -3231,7 +3230,7 @@ def _format_arg(self, name, spec, value): # mri_cc can't use abspaths just the basename basename = os.path.basename(value) return spec.argstr % basename - return super(SegmentCC, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -3246,7 +3245,7 @@ def run(self, **inputs): inputs["subjects_dir"] = self.inputs.subjects_dir for originalfile in [self.inputs.in_file, self.inputs.in_norm]: copy2subjdir(self, originalfile, folder="mri") - return super(SegmentCC, self).run(**inputs) + return super().run(**inputs) def aggregate_outputs(self, runtime=None, needed_outputs=None): # it is necessary to find the output files and move @@ -3273,7 +3272,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if not os.path.isdir(os.path.dirname(out_tmp)): os.makedirs(os.path.dirname(out_tmp)) shutil.move(out_tmp, out_file) - return super(SegmentCC, self).aggregate_outputs(runtime, needed_outputs) + return super().aggregate_outputs(runtime, needed_outputs) class SegmentWMInputSpec(FSTraitedSpec): @@ -3487,4 +3486,4 @@ class ConcatenateLTA(FSCommand): def _format_arg(self, name, spec, value): if name == "out_type": value = {"VOX2VOX": 0, "RAS2RAS": 1}[value] - return super(ConcatenateLTA, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) diff --git a/nipype/interfaces/freesurfer/registration.py b/nipype/interfaces/freesurfer/registration.py index 8b1624c87d..948714fb05 100644 --- a/nipype/interfaces/freesurfer/registration.py +++ b/nipype/interfaces/freesurfer/registration.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Provides interfaces to various longitudinal commands provided by freesurfer @@ -71,7 +70,7 @@ class MPRtoMNI305(FSScriptCommand): output_spec = MPRtoMNI305OutputSpec def __init__(self, **inputs): - super(MPRtoMNI305, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._environ_update, "target") self.inputs.on_trait_change(self._environ_update, "reference_dir") @@ -85,7 +84,7 @@ def _format_arg(self, opt, spec, val): val, os.path.abspath(retval + ext), copy=True, hashmethod="content" ) return retval - return super(MPRtoMNI305, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _environ_update(self): # refdir = os.path.join(Info.home(), val) @@ -98,7 +97,7 @@ def _get_fname(self, fname): return split_filename(fname)[1] def _list_outputs(self): - outputs = super(MPRtoMNI305, self)._list_outputs() + outputs = super()._list_outputs() fullname = "_".join( [ self._get_fname(self.inputs.in_file), @@ -308,7 +307,7 @@ class Register(FSCommand): def _format_arg(self, opt, spec, val): if opt == "curv": return spec.argstr - return super(Register, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _gen_filename(self, name): if name == "out_file": @@ -388,7 +387,7 @@ def _format_arg(self, opt, spec, val): if opt == "template": if isdefined(self.inputs.template_param): return spec.argstr % (val + "#" + str(self.inputs.template_param)) - return super(Paint, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() @@ -587,7 +586,7 @@ def _format_arg(self, opt, spec, val): val = self._list_outputs()[opt] elif opt == "reference_mask" and val is False: return "--no-ref-mask" - return super(MRICoreg, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/freesurfer/tests/__init__.py b/nipype/interfaces/freesurfer/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/freesurfer/tests/__init__.py +++ b/nipype/interfaces/freesurfer/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/freesurfer/tests/test_model.py b/nipype/interfaces/freesurfer/tests/test_model.py index 73a2d1f5c6..0c56a87522 100644 --- a/nipype/interfaces/freesurfer/tests/test_model.py +++ b/nipype/interfaces/freesurfer/tests/test_model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index 00df9393b6..a6e2c3cbf9 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -66,7 +65,7 @@ def test_fitmsparams(create_files_in_directory): # .inputs based parameters setting fit.inputs.in_files = filelist fit.inputs.out_dir = outdir - assert fit.cmdline == "mri_ms_fitparms %s %s %s" % ( + assert fit.cmdline == "mri_ms_fitparms {} {} {}".format( filelist[0], filelist[1], outdir, @@ -137,7 +136,7 @@ def test_mandatory_outvol(create_files_in_directory): ext = ext2 + ext assert mni.cmdline == ( - "mri_nu_correct.mni --i %s --n 4 --o %s_output%s" % (filelist[0], base, ext) + f"mri_nu_correct.mni --i {filelist[0]} --n 4 --o {base}_output{ext}" ) # test with custom outfile diff --git a/nipype/interfaces/freesurfer/tests/test_utils.py b/nipype/interfaces/freesurfer/tests/test_utils.py index 09584a404e..323c04166d 100644 --- a/nipype/interfaces/freesurfer/tests/test_utils.py +++ b/nipype/interfaces/freesurfer/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/freesurfer/utils.py b/nipype/interfaces/freesurfer/utils.py index 921bb7488f..94da167bd2 100644 --- a/nipype/interfaces/freesurfer/utils.py +++ b/nipype/interfaces/freesurfer/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to assorted Freesurfer utility programs. @@ -341,7 +340,7 @@ def _format_arg(self, name, spec, value): else: range = "%.3f" % range method = dict(point="", max="-max", average="-avg")[value] - return "--proj%s%s %s" % (units, method, range) + return f"--proj{units}{method} {range}" if name == "reg_header": return spec.argstr % self.inputs.subject_id @@ -373,7 +372,7 @@ def _format_arg(self, name, spec, value): if value is True: return spec.argstr % "sphere.reg" - return super(SampleToSurface, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _get_outfilename(self, opt="out_file"): outfile = getattr(self.inputs, opt) @@ -608,7 +607,7 @@ def _format_arg(self, name, spec, value): ) if value in implicit_filetypes: return "" - return super(SurfaceTransform, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -652,7 +651,7 @@ def _list_outputs(self): use_ext = False outputs["out_file"] = fname_presuffix( source, - suffix=".%s%s" % (self.inputs.target_subject, ext), + suffix=f".{self.inputs.target_subject}{ext}", newpath=os.getcwd(), use_ext=use_ext, ) @@ -998,7 +997,7 @@ def _format_arg(self, name, spec, value): if len(value) == 2: return "-fminmax %.3f %.3f" % value else: - return "-fminmax %.3f %.3f -fmid %.3f" % ( + return "-fminmax {:.3f} {:.3f} -fmid {:.3f}".format( value[0], value[2], value[1], @@ -1011,11 +1010,11 @@ def _format_arg(self, name, spec, value): if re.match(r"%s[\.\-_]" % self.inputs.hemi, value[:3]): value = value[3:] return "-annotation %s" % value - return super(SurfaceSnapshots, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _run_interface(self, runtime): if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % ( + stem = "{}_{}_{}".format( self.inputs.subject_id, self.inputs.hemi, self.inputs.surface, @@ -1031,7 +1030,7 @@ def _run_interface(self, runtime): raise RuntimeError("Graphics are not enabled -- cannot run tksurfer") runtime.environ["_SNAPSHOT_STEM"] = stem self._write_tcl_script() - runtime = super(SurfaceSnapshots, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) # If a display window can't be opened, this will crash on # aggregate_outputs. Let's try to parse stderr and raise a # better exception here if that happened. @@ -1085,7 +1084,7 @@ def _write_tcl_script(self): def _list_outputs(self): outputs = self._outputs().get() if not isdefined(self.inputs.screenshot_stem): - stem = "%s_%s_%s" % ( + stem = "{}_{}_{}".format( self.inputs.subject_id, self.inputs.hemi, self.inputs.surface, @@ -1133,7 +1132,7 @@ class ImageInfo(FSCommand): output_spec = ImageInfoOutputSpec def info_regexp(self, info, field, delim="\n"): - m = re.search(r"%s\s*:\s+(.+?)%s" % (field, delim), info) + m = re.search(fr"{field}\s*:\s+(.+?){delim}", info) if m: return m.group(1) else: @@ -1299,7 +1298,7 @@ class MRIsConvert(FSCommand): def _format_arg(self, name, spec, value): if name == "out_file" and not os.path.isabs(value): value = os.path.abspath(value) - return super(MRIsConvert, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1764,7 +1763,7 @@ def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(SmoothTessellation, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if "failed" in runtime.stderr: self.raise_exception(runtime) return runtime @@ -1968,7 +1967,7 @@ def _format_arg(self, name, spec, value): spec = "--lta-inv %s" if name in ("fsl_out", "lta_out") and value is True: value = self._list_outputs()[name] - return super(Tkregister2, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2064,7 +2063,7 @@ def _format_arg(self, name, spec, value): return value # os.path.abspath(value) # if name == 'copy_name' and value: # self.input_spec.transform - return super(AddXFormToHeader, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2543,24 +2542,24 @@ def run(self, **inputs): self, self.inputs.in_orig, folder="surf", - basename="{0}.orig".format(hemi), + basename=f"{hemi}.orig", ) copy2subjdir( self, self.inputs.in_inflated, folder="surf", - basename="{0}.inflated".format(hemi), + basename=f"{hemi}.inflated", ) copy2subjdir(self, self.inputs.in_brain, folder="mri", basename="brain.mgz") copy2subjdir(self, self.inputs.in_wm, folder="mri", basename="wm.mgz") - return super(FixTopology, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): if name == "sphere": # get the basename and take out the hemisphere suffix = os.path.basename(value).split(".", 1)[1] return spec.argstr % suffix - return super(FixTopology, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2793,7 +2792,7 @@ def run(self, **inputs): self, self.inputs.in_white, "surf", - "{0}.white".format(self.inputs.hemisphere), + f"{self.inputs.hemisphere}.white", ) for originalfile in [self.inputs.in_aseg, self.inputs.in_T1]: copy2subjdir(self, originalfile, folder="mri") @@ -2808,7 +2807,7 @@ def run(self, **inputs): self, self.inputs.in_label, "label", - "{0}.aparc.annot".format(self.inputs.hemisphere), + f"{self.inputs.hemisphere}.aparc.annot", ) else: os.makedirs( @@ -2816,7 +2815,7 @@ def run(self, **inputs): self.inputs.subjects_dir, self.inputs.subject_id, "label" ) ) - return super(MakeSurfaces, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): if name in ["in_T1", "in_aseg"]: @@ -2845,7 +2844,7 @@ def _format_arg(self, name, spec, value): basename = os.path.basename(value) suffix = basename.split(".")[1] return spec.argstr % suffix - return super(MakeSurfaces, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -2948,7 +2947,7 @@ def _format_arg(self, name, spec, value): if name == "in_file": basename = os.path.basename(value) return spec.argstr % basename - return super(Curvature, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -3069,7 +3068,7 @@ def _format_arg(self, name, spec, value): if name in ["surface", "curvfile1", "curvfile2"]: prefix = os.path.basename(value).split(".")[1] return spec.argstr % prefix - return super(CurvatureStats, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -3084,7 +3083,7 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.surface, "surf") copy2subjdir(self, self.inputs.curvfile1, "surf") copy2subjdir(self, self.inputs.curvfile2, "surf") - return super(CurvatureStats, self).run(**inputs) + return super().run(**inputs) class JacobianInputSpec(FSTraitedSpec): @@ -3316,12 +3315,12 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.in_aseg, "mri") copy2subjdir(self, self.inputs.aseg, "mri", "aseg.mgz") - return super(VolumeMask, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): if name == "in_aseg": return spec.argstr % os.path.basename(value).rstrip(".mgz") - return super(VolumeMask, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -3504,17 +3503,17 @@ def run(self, **inputs): self, self.inputs.thickness, "surf", - "{0}.thickness".format(self.inputs.hemisphere), + f"{self.inputs.hemisphere}.thickness", ) if isdefined(self.inputs.cortex_label): copy2subjdir( self, self.inputs.cortex_label, "label", - "{0}.cortex.label".format(self.inputs.hemisphere), + f"{self.inputs.hemisphere}.cortex.label", ) createoutputdirs(self._list_outputs()) - return super(ParcellationStats, self).run(**inputs) + return super().run(**inputs) def _gen_filename(self, name): if name in ["out_table", "out_color"]: @@ -3665,21 +3664,15 @@ def run(self, **inputs): if "subjects_dir" in inputs: inputs["subjects_dir"] = self.inputs.subjects_dir hemi = self.inputs.hemisphere - copy2subjdir( - self, self.inputs.annotation, "label", "{0}.aparc.annot".format(hemi) - ) - copy2subjdir( - self, self.inputs.cortex, "label", "{0}.cortex.label".format(hemi) - ) - copy2subjdir(self, self.inputs.white, "surf", "{0}.white".format(hemi)) - copy2subjdir( - self, self.inputs.thickness, "surf", "{0}.thickness".format(hemi) - ) + copy2subjdir(self, self.inputs.annotation, "label", f"{hemi}.aparc.annot") + copy2subjdir(self, self.inputs.cortex, "label", f"{hemi}.cortex.label") + copy2subjdir(self, self.inputs.white, "surf", f"{hemi}.white") + copy2subjdir(self, self.inputs.thickness, "surf", f"{hemi}.thickness") copy2subjdir(self, self.inputs.orig, "mri", "orig.mgz") copy2subjdir(self, self.inputs.rawavg, "mri", "rawavg.mgz") # need to create output directories createoutputdirs(self._list_outputs()) - return super(Contrast, self).run(**inputs) + return super().run(**inputs) def _list_outputs(self): outputs = self._outputs().get() @@ -3907,7 +3900,7 @@ def run(self, **inputs): copy2subjdir(self, self.inputs.lh_annotation, "label") copy2subjdir(self, self.inputs.rh_annotation, "label") - return super(Aparc2Aseg, self).run(**inputs) + return super().run(**inputs) def _format_arg(self, name, spec, value): if name == "aseg": @@ -3917,7 +3910,7 @@ def _format_arg(self, name, spec, value): elif name == "out_file": return spec.argstr % os.path.abspath(value) - return super(Aparc2Aseg, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -4201,7 +4194,7 @@ class LTAConvert(CommandLine): def _format_arg(self, name, spec, value): if name.startswith("out_") and value is True: value = self._list_outputs()[name] - return super(LTAConvert, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/fsl/__init__.py b/nipype/interfaces/fsl/__init__.py index 0d2e9664d2..ffeb9b39fc 100644 --- a/nipype/interfaces/fsl/__init__.py +++ b/nipype/interfaces/fsl/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/fsl/aroma.py b/nipype/interfaces/fsl/aroma.py index f4cab884a2..85033a3f88 100644 --- a/nipype/interfaces/fsl/aroma.py +++ b/nipype/interfaces/fsl/aroma.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This commandline module provides classes for interfacing with the @@ -146,7 +145,7 @@ class ICA_AROMA(CommandLine): def _format_arg(self, name, trait_spec, value): if name == "out_dir": return trait_spec.argstr % os.path.abspath(value) - return super(ICA_AROMA, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/fsl/base.py b/nipype/interfaces/fsl/base.py index dbafde7f87..3b117b4cc5 100644 --- a/nipype/interfaces/fsl/base.py +++ b/nipype/interfaces/fsl/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -168,7 +167,7 @@ class FSLCommand(CommandLine): ] def __init__(self, **inputs): - super(FSLCommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._output_update, "output_type") if self._output_type is None: diff --git a/nipype/interfaces/fsl/dti.py b/nipype/interfaces/fsl/dti.py index 7253b7023b..40b48c770a 100644 --- a/nipype/interfaces/fsl/dti.py +++ b/nipype/interfaces/fsl/dti.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -290,7 +289,7 @@ class FSLXCommand(FSLCommand): def _run_interface(self, runtime): self._out_dir = os.getcwd() - runtime = super(FSLXCommand, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime @@ -452,7 +451,7 @@ class BEDPOSTX5(FSLXCommand): _can_resume = True def __init__(self, **inputs): - super(BEDPOSTX5, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._cuda_update, "use_gpu") def _cuda_update(self): @@ -475,7 +474,7 @@ def _run_interface(self, runtime): _, _, ext = split_filename(self.inputs.grad_dev) copyfile(self.inputs.grad_dev, os.path.join(subjectdir, "grad_dev" + ext)) - retval = super(BEDPOSTX5, self)._run_interface(runtime) + retval = super()._run_interface(runtime) self._out_dir = subjectdir + ".bedpostX" return retval @@ -816,7 +815,7 @@ def __init__(self, **inputs): ("Deprecated: Please use create_bedpostx_pipeline " "instead"), DeprecationWarning, ) - return super(ProbTrackX, self).__init__(**inputs) + return super().__init__(**inputs) def _run_interface(self, runtime): for i in range(1, len(self.inputs.thsamples) + 1): @@ -853,7 +852,7 @@ def _run_interface(self, runtime): f.write("%s\n" % seed) f.close() - runtime = super(ProbTrackX, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime @@ -861,12 +860,12 @@ def _run_interface(self, runtime): def _format_arg(self, name, spec, value): if name == "target_masks" and isdefined(value): fname = "targets.txt" - return super(ProbTrackX, self)._format_arg(name, spec, [fname]) + return super()._format_arg(name, spec, [fname]) elif name == "seed" and isinstance(value, list): fname = "seeds.txt" - return super(ProbTrackX, self)._format_arg(name, spec, fname) + return super()._format_arg(name, spec, fname) else: - return super(ProbTrackX, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1068,7 +1067,7 @@ class ProbTrackX2(ProbTrackX): output_spec = ProbTrackX2OutputSpec def _list_outputs(self): - outputs = super(ProbTrackX2, self)._list_outputs() + outputs = super()._list_outputs() if not isdefined(self.inputs.out_dir): out_dir = os.getcwd() @@ -1200,7 +1199,7 @@ def _run_interface(self, runtime): self.inputs.out_file = self._gen_fname( base_name, cwd=os.path.abspath(pth), suffix="_vreg" ) - return super(VecReg, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() @@ -1274,7 +1273,7 @@ def _list_outputs(self): self._gen_fname( base_name, cwd=cwd, - suffix="_proj_seg_thr_{}".format(self.inputs.threshold), + suffix=f"_proj_seg_thr_{self.inputs.threshold}", ) ) return outputs @@ -1328,7 +1327,7 @@ class FindTheBiggest(FSLCommand): def _run_interface(self, runtime): if not isdefined(self.inputs.out_file): self.inputs.out_file = self._gen_fname("biggestSegmentation", suffix="") - return super(FindTheBiggest, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): outputs = self.output_spec().get() @@ -1440,7 +1439,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % self._list_outputs()["skeleton_file"] else: return spec.argstr % value - return super(TractSkeleton, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1514,7 +1513,7 @@ def _format_arg(self, name, spec, value): if name == "local_max_file": if isinstance(value, bool): return spec.argstr % self._list_outputs()["local_max_file"] - return super(DistanceMap, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/fsl/epi.py b/nipype/interfaces/fsl/epi.py index 26ce76ca4a..35d36f17d0 100644 --- a/nipype/interfaces/fsl/epi.py +++ b/nipype/interfaces/fsl/epi.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -105,7 +104,7 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.nocheck) or not self.inputs.nocheck: skip += ["nocheck"] - return super(PrepareFieldmap, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() @@ -113,7 +112,7 @@ def _list_outputs(self): return outputs def _run_interface(self, runtime): - runtime = super(PrepareFieldmap, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.returncode == 0: out_file = self.inputs.out_fieldmap @@ -356,10 +355,10 @@ def _format_arg(self, name, trait_spec, value): if path != "": if not os.path.exists(path): raise ValueError("out_base path must exist if provided") - return super(TOPUP, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): - outputs = super(TOPUP, self)._list_outputs() + outputs = super()._list_outputs() del outputs["out_base"] base_path = None if isdefined(self.inputs.out_base): @@ -408,10 +407,8 @@ def _generate_encfile(self): if len(self.inputs.encoding_direction) != len(durations): if len(self.inputs.readout_times) != 1: raise ValueError( - ( - "Readout time must be a float or match the" - "length of encoding directions" - ) + "Readout time must be a float or match the" + "length of encoding directions" ) durations = durations * len(self.inputs.encoding_direction) @@ -430,7 +427,7 @@ def _generate_encfile(self): def _overload_extension(self, value, name=None): if name == "out_base": return value - return super(TOPUP, self)._overload_extension(value, name) + return super()._overload_extension(value, name) class ApplyTOPUPInputSpec(FSLCommandInputSpec): @@ -538,12 +535,12 @@ def _parse_inputs(self, skip=None): if not isdefined(self.inputs.in_index): self.inputs.in_index = list(range(1, len(self.inputs.in_files) + 1)) - return super(ApplyTOPUP, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _format_arg(self, name, spec, value): if name == "in_topup_fieldcoef": return spec.argstr % value.split("_fieldcoef")[0] - return super(ApplyTOPUP, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class EddyInputSpec(FSLCommandInputSpec): @@ -951,7 +948,7 @@ class Eddy(FSLCommand): _num_threads = 1 def __init__(self, **inputs): - super(Eddy, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change(self._num_threads_update, "num_threads") if not isdefined(self.inputs.num_threads): self.inputs.num_threads = self._num_threads @@ -984,7 +981,7 @@ def _run_interface(self, runtime): ) ): self._cmd = "eddy" - runtime = super(Eddy, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) # Restore command to avoid side-effects self._cmd = cmd @@ -997,7 +994,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % fname_presuffix(value, use_ext=False) if name == "out_base": return spec.argstr % os.path.abspath(value) - return super(Eddy, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -1411,10 +1408,10 @@ def __init__(self, **inputs): ), DeprecationWarning, ) - return super(EPIDeWarp, self).__init__(**inputs) + return super().__init__(**inputs) def _run_interface(self, runtime): - runtime = super(EPIDeWarp, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime @@ -1514,10 +1511,10 @@ def __init__(self, **inputs): ("Deprecated: Please use nipype.interfaces.fsl.epi.Eddy " "instead"), DeprecationWarning, ) - return super(EddyCorrect, self).__init__(**inputs) + return super().__init__(**inputs) def _run_interface(self, runtime): - runtime = super(EddyCorrect, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py index 1b6d62a5bc..503355d4b1 100644 --- a/nipype/interfaces/fsl/fix.py +++ b/nipype/interfaces/fsl/fix.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fix module provides classes for interfacing with the `FSL FIX @@ -375,7 +374,7 @@ class Cleaner(CommandLine): def _get_cleaned_functional_filename(self, artifacts_list_filename): """extract the proper filename from the first line of the artifacts file""" - artifacts_list_file = open(artifacts_list_filename, "r") + artifacts_list_file = open(artifacts_list_filename) functional_filename, extension = artifacts_list_file.readline().split(".") artifacts_list_file_path, artifacts_list_filename = os.path.split( artifacts_list_filename diff --git a/nipype/interfaces/fsl/maths.py b/nipype/interfaces/fsl/maths.py index d8669c4422..d146a7795c 100644 --- a/nipype/interfaces/fsl/maths.py +++ b/nipype/interfaces/fsl/maths.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -117,7 +116,7 @@ def _format_arg(self, name, spec, value): arg += "p" arg += " %.10f" % value return arg - return super(Threshold, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class StdImageInput(MathsInput): @@ -345,7 +344,7 @@ def _format_arg(self, name, spec, value): if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) return spec.argstr % sigma - return super(IsotropicSmooth, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ApplyMaskInput(MathsInput): @@ -416,7 +415,7 @@ class DilateImage(MathsCommand): def _format_arg(self, name, spec, value): if name == "operation": return spec.argstr % dict(mean="M", modal="D", max="F")[value] - return super(DilateImage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ErodeInput(KernelInput): @@ -440,7 +439,7 @@ def _format_arg(self, name, spec, value): if value: return "-eroF" return "-ero" - return super(ErodeImage, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class SpatialFilterInput(KernelInput): @@ -501,7 +500,7 @@ class UnaryMaths(MathsCommand): def _list_outputs(self): self._suffix = "_" + self.inputs.operation - return super(UnaryMaths, self)._list_outputs() + return super()._list_outputs() class BinaryMathsInput(MathsInput): @@ -579,7 +578,7 @@ class MultiImageMaths(MathsCommand): def _format_arg(self, name, spec, value): if name == "op_string": return value % tuple(self.inputs.operand_files) - return super(MultiImageMaths, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class TemporalFilterInput(MathsInput): diff --git a/nipype/interfaces/fsl/model.py b/nipype/interfaces/fsl/model.py index 24adb907cf..087a3caa1f 100644 --- a/nipype/interfaces/fsl/model.py +++ b/nipype/interfaces/fsl/model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -144,10 +143,10 @@ class Level1Design(BaseInterface): output_spec = Level1DesignOutputSpec def _create_ev_file(self, evfname, evinfo): - f = open(evfname, "wt") + f = open(evfname, "w") for i in evinfo: if len(i) == 3: - f.write("%f %f %f\n" % (i[0], i[1], i[2])) + f.write(f"{i[0]:f} {i[1]:f} {i[2]:f}\n") else: f.write("%f\n" % i[0]) f.close() @@ -466,7 +465,7 @@ def _list_outputs(self): outputs = self._outputs().get() is_ica = False outputs["feat_dir"] = None - with open(self.inputs.fsf_file, "rt") as fp: + with open(self.inputs.fsf_file) as fp: text = fp.read() if "set fmri(inmelodic) 1" in text: is_ica = True @@ -523,13 +522,11 @@ class FEATModel(FSLCommand): def _format_arg(self, name, trait_spec, value): if name == "fsf_file": - return super(FEATModel, self)._format_arg( - name, trait_spec, self._get_design_root(value) - ) + return super()._format_arg(name, trait_spec, self._get_design_root(value)) elif name == "ev_files": return "" else: - return super(FEATModel, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) @@ -827,7 +824,7 @@ class FILMGLS(FSLCommand): def _get_pe_files(self, cwd): files = None if isdefined(self.inputs.design_file): - fp = open(self.inputs.design_file, "rt") + fp = open(self.inputs.design_file) for line in fp.readlines(): if line.startswith("/NumWaves"): numpes = int(line.split()[-1]) @@ -842,14 +839,14 @@ def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, "rt") + fp = open(self.inputs.tcon_file) for line in fp.readlines(): if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, "rt") + fp = open(self.inputs.fcon_file) for line in fp.readlines(): if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) @@ -952,7 +949,7 @@ def _run_interface(self, runtime): for i, rundir in enumerate(ensure_list(self.inputs.feat_dirs)): fsf_txt += fsf_dirs.substitute(runno=i + 1, rundir=os.path.abspath(rundir)) fsf_txt += fsf_footer.substitute() - f = open(os.path.join(os.getcwd(), "register.fsf"), "wt") + f = open(os.path.join(os.getcwd(), "register.fsf"), "w") f.write(fsf_txt) f.close() @@ -1136,7 +1133,7 @@ def _run_interface(self, runtime): if os.access(os.path.join(cwd, log_dir), os.F_OK): rmtree(os.path.join(cwd, log_dir)) - return super(FLAMEO, self)._run_interface(runtime) + return super()._run_interface(runtime) # ohinds: 2010-04-06 # made these compatible with flameo @@ -1280,7 +1277,7 @@ def _run_interface(self, runtime): # The returncode is meaningless in ContrastMgr. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(ContrastMgr, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime @@ -1292,7 +1289,7 @@ def _format_arg(self, name, trait_spec, value): path, _ = os.path.split(value) return path else: - return super(ContrastMgr, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _get_design_root(self, infile): _, fname = os.path.split(infile) @@ -1302,14 +1299,14 @@ def _get_numcons(self): numtcons = 0 numfcons = 0 if isdefined(self.inputs.tcon_file): - fp = open(self.inputs.tcon_file, "rt") + fp = open(self.inputs.tcon_file) for line in fp.readlines(): if line.startswith("/NumContrasts"): numtcons = int(line.split()[-1]) break fp.close() if isdefined(self.inputs.fcon_file): - fp = open(self.inputs.fcon_file, "rt") + fp = open(self.inputs.fcon_file) for line in fp.readlines(): if line.startswith("/NumContrasts"): numfcons = int(line.split()[-1]) @@ -1386,7 +1383,7 @@ def _run_interface(self, runtime): cwd = os.getcwd() mat_txt = [ "/NumWaves 1", - "/NumPoints {:d}".format(self.inputs.num_copes), + f"/NumPoints {self.inputs.num_copes:d}", "/PPheights 1", "", "/Matrix", @@ -1410,7 +1407,7 @@ def _run_interface(self, runtime): grp_txt = [ "/NumWaves 1", - "/NumPoints {:d}".format(self.inputs.num_copes), + f"/NumPoints {self.inputs.num_copes:d}", "", "/Matrix", ] @@ -1422,7 +1419,7 @@ def _run_interface(self, runtime): # write design files for i, name in enumerate(["design.mat", "design.con", "design.grp"]): - f = open(os.path.join(cwd, name), "wt") + f = open(os.path.join(cwd, name), "w") f.write(txt[name]) f.close() @@ -1591,7 +1588,7 @@ def _run_interface(self, runtime): if ("fts" in key) and (nfcons == 0): continue filename = key.replace("_", ".") - f = open(os.path.join(cwd, filename), "wt") + f = open(os.path.join(cwd, filename), "w") f.write(val) f.close() @@ -2098,7 +2095,7 @@ def _format_arg(self, name, spec, value): else: fname = value return spec.argstr % fname - return super(Cluster, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class DualRegressionInputSpec(FSLCommandInputSpec): @@ -2338,21 +2335,17 @@ def _list_outputs(self): prefix = "clusterm" if prefix: outputs["t_p_files"] = glob( - self._gen_fname("%s_%s_p_tstat*" % (self.inputs.base_name, prefix)) + self._gen_fname(f"{self.inputs.base_name}_{prefix}_p_tstat*") ) outputs["t_corrected_p_files"] = glob( - self._gen_fname( - "%s_%s_corrp_tstat*.nii" % (self.inputs.base_name, prefix) - ) + self._gen_fname(f"{self.inputs.base_name}_{prefix}_corrp_tstat*.nii") ) outputs["f_p_files"] = glob( - self._gen_fname("%s_%s_p_fstat*.nii" % (self.inputs.base_name, prefix)) + self._gen_fname(f"{self.inputs.base_name}_{prefix}_p_fstat*.nii") ) outputs["f_corrected_p_files"] = glob( - self._gen_fname( - "%s_%s_corrp_fstat*.nii" % (self.inputs.base_name, prefix) - ) + self._gen_fname(f"{self.inputs.base_name}_{prefix}_corrp_fstat*.nii") ) return outputs @@ -2509,7 +2502,7 @@ class GLM(FSLCommand): output_spec = GLMOutputSpec def _list_outputs(self): - outputs = super(GLM, self)._list_outputs() + outputs = super()._list_outputs() if isdefined(self.inputs.out_cope): outputs["out_cope"] = os.path.abspath(self.inputs.out_cope) diff --git a/nipype/interfaces/fsl/possum.py b/nipype/interfaces/fsl/possum.py index 88797aaecd..494be7490c 100644 --- a/nipype/interfaces/fsl/possum.py +++ b/nipype/interfaces/fsl/possum.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/fsl/preprocess.py b/nipype/interfaces/fsl/preprocess.py index 8899c6f975..804ea3cfc6 100644 --- a/nipype/interfaces/fsl/preprocess.py +++ b/nipype/interfaces/fsl/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -160,13 +159,13 @@ def _run_interface(self, runtime): # The returncode is meaningless in BET. So check the output # in stderr and if it's set, then update the returncode # accordingly. - runtime = super(BET, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if runtime.stderr: self.raise_exception(runtime) return runtime def _format_arg(self, name, spec, value): - formatted = super(BET, self)._format_arg(name, spec, value) + formatted = super()._format_arg(name, spec, value) if name == "in_file": # Convert to relative path to prevent BET failure # with long paths. @@ -391,7 +390,7 @@ class FAST(FSLCommand): def _format_arg(self, name, spec, value): # first do what should be done in general - formatted = super(FAST, self)._format_arg(name, spec, value) + formatted = super()._format_arg(name, spec, value) if name == "in_files": # FAST needs the -S parameter value to correspond to the number # of input images, otherwise it will ignore all but the first @@ -739,7 +738,7 @@ class FLIRT(FSLCommand): _log_written = False def aggregate_outputs(self, runtime=None, needed_outputs=None): - outputs = super(FLIRT, self).aggregate_outputs( + outputs = super().aggregate_outputs( runtime=runtime, needed_outputs=needed_outputs ) if self.inputs.save_log and not self._log_written: @@ -761,7 +760,7 @@ def _parse_inputs(self, skip=None): "uses_qform arguments to run" ) skip.append("save_log") - return super(FLIRT, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) class ApplyXFMInputSpec(FLIRTInputSpec): @@ -901,7 +900,7 @@ def _format_arg(self, name, spec, value): return "" else: return spec.argstr % value - return super(MCFLIRT, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1336,7 +1335,7 @@ def _format_arg(self, name, spec, value): return spec.argstr % value[0] if name in list(self.filemap.keys()): return spec.argstr % self._list_outputs()[name] - return super(FNIRT, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_filename(self, name): if name in ["warped_file", "log_file"]: @@ -1354,7 +1353,7 @@ def write_config(self, configfile): """ try: fid = open(configfile, "w+") - except IOError: + except OSError: print("unable to create config_file %s" % (configfile)) for item in list(self.inputs.get().items()): @@ -1481,7 +1480,7 @@ class ApplyWarp(FSLCommand): def _format_arg(self, name, spec, value): if name == "superlevel": return spec.argstr % str(value) - return super(ApplyWarp, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1681,7 +1680,7 @@ def _format_arg(self, name, spec, value): for filename, thresh in value: arglist.extend([filename, "%.10f" % thresh]) return " ".join(arglist) - return super(SUSAN, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1912,10 +1911,7 @@ def _parse_inputs(self, skip=None): if not input_phase and not input_vsm and not input_fmap: raise RuntimeError( - ( - "Either phasemap_in_file, shift_in_file or fmap_in_file must " - "be set." - ) + "Either phasemap_in_file, shift_in_file or fmap_in_file must " "be set." ) if not isdefined(self.inputs.in_file): @@ -1956,10 +1952,8 @@ def _parse_inputs(self, skip=None): trait_spec.name_source = "shift_in_file" else: raise RuntimeError( - ( - "Either phasemap_in_file, shift_in_file or " - "fmap_in_file must be set." - ) + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." ) if vsm_save_unmasked: @@ -1991,10 +1985,8 @@ def _parse_inputs(self, skip=None): trait_spec.name_source = "fmap_in_file" else: raise RuntimeError( - ( - "Either phasemap_in_file, shift_in_file or " - "fmap_in_file must be set." - ) + "Either phasemap_in_file, shift_in_file or " + "fmap_in_file must be set." ) if fmap_save_unmasked: @@ -2004,7 +1996,7 @@ def _parse_inputs(self, skip=None): else: skip += ["save_fmap", "save_unmasked_fmap", "fmap_out_file"] - return super(FUGUE, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) class PRELUDEInputSpec(FSLCommandInputSpec): @@ -2096,7 +2088,7 @@ class PRELUDE(FSLCommand): _cmd = "prelude" def __init__(self, **kwargs): - super(PRELUDE, self).__init__(**kwargs) + super().__init__(**kwargs) warn("This has not been fully tested. Please report any failures.") def _list_outputs(self): @@ -2270,9 +2262,9 @@ def _gen_fname(self, basename): method = thres.replace(".", "") if basename == "original_segmentations": - return op.abspath("%s_all_%s_origsegs.nii.gz" % (outname, method)) + return op.abspath(f"{outname}_all_{method}_origsegs.nii.gz") if basename == "segmentation_file": - return op.abspath("%s_all_%s_firstseg.nii.gz" % (outname, method)) + return op.abspath(f"{outname}_all_{method}_firstseg.nii.gz") return None diff --git a/nipype/interfaces/fsl/tests/__init__.py b/nipype/interfaces/fsl/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/fsl/tests/__init__.py +++ b/nipype/interfaces/fsl/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/fsl/tests/test_FILMGLS.py b/nipype/interfaces/fsl/tests/test_FILMGLS.py index a308e9da50..ce2cc57ffd 100644 --- a/nipype/interfaces/fsl/tests/test_FILMGLS.py +++ b/nipype/interfaces/fsl/tests/test_FILMGLS.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from nipype.interfaces.fsl.model import FILMGLS, FILMGLSInputSpec diff --git a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py index f13ddfaccf..157a217dbc 100644 --- a/nipype/interfaces/fsl/tests/test_Level1Design_functions.py +++ b/nipype/interfaces/fsl/tests/test_Level1Design_functions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from ...base import Undefined from ..model import Level1Design @@ -35,4 +34,4 @@ def test_level1design(tmpdir): do_tempfilter, key, ) - assert "set fmri(convolve1) {0}".format(val) in output_txt + assert f"set fmri(convolve1) {val}" in output_txt diff --git a/nipype/interfaces/fsl/tests/test_base.py b/nipype/interfaces/fsl/tests/test_base.py index b030a28a18..c1cc56fc36 100644 --- a/nipype/interfaces/fsl/tests/test_base.py +++ b/nipype/interfaces/fsl/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/fsl/tests/test_dti.py b/nipype/interfaces/fsl/tests/test_dti.py index 0a6a2e8d63..5e5297d608 100644 --- a/nipype/interfaces/fsl/tests/test_dti.py +++ b/nipype/interfaces/fsl/tests/test_dti.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -371,7 +370,7 @@ def test_tbss_skeleton(create_files_in_directory): bones.inputs.data_file = "b.nii" # Even though that's silly # Now we get a command line - assert bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii %s b.nii %s" % ( + assert bones.cmdline == "tbss_skeleton -i a.nii -p 0.200 b.nii {} b.nii {}".format( Info.standard_image("LowerCingulum_1mm.nii.gz"), os.path.join(newdir, "b_skeletonised.nii"), ) @@ -409,7 +408,7 @@ def test_distancemap(create_files_in_directory): # And we should be able to write out a maxima map mapper.inputs.local_max_file = True - assert mapper.cmdline == "distancemap --out=%s --in=a.nii --localmax=%s" % ( + assert mapper.cmdline == "distancemap --out={} --in=a.nii --localmax={}".format( os.path.join(newdir, "a_dstmap.nii"), os.path.join(newdir, "a_lclmax.nii"), ) diff --git a/nipype/interfaces/fsl/tests/test_epi.py b/nipype/interfaces/fsl/tests/test_epi.py index e8f408de45..6e796324a8 100644 --- a/nipype/interfaces/fsl/tests/test_epi.py +++ b/nipype/interfaces/fsl/tests/test_epi.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/fsl/tests/test_maths.py b/nipype/interfaces/fsl/tests/test_maths.py index d082910e42..189fff8b3f 100644 --- a/nipype/interfaces/fsl/tests/test_maths.py +++ b/nipype/interfaces/fsl/tests/test_maths.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -28,10 +27,10 @@ def test_maths_base(create_files_in_directory_plus_output_type): # Set an in file maths.inputs.in_file = "a.nii" - out_file = "a_maths{}".format(out_ext) + out_file = f"a_maths{out_ext}" # Now test the most basic command line - assert maths.cmdline == "fslmaths a.nii {}".format(os.path.join(testdir, out_file)) + assert maths.cmdline == f"fslmaths a.nii {os.path.join(testdir, out_file)}" # Now test that we can set the various data types dtypes = ["float", "char", "int", "short", "double", "input"] @@ -103,9 +102,9 @@ def test_threshold(create_files_in_directory_plus_output_type): cmdline = "fslmaths a.nii {} b.nii" for val in [0, 0.0, -1, -1.5, -0.5, 0.5, 3, 400, 400.5]: thresh.inputs.thresh = val - assert thresh.cmdline == cmdline.format("-thr {:.10f}".format(val)) + assert thresh.cmdline == cmdline.format(f"-thr {val:.10f}") - val = "{:.10f}".format(42) + val = f"{42:.10f}" thresh = fsl.Threshold( in_file="a.nii", out_file="b.nii", thresh=42, use_robust_range=True ) @@ -144,7 +143,7 @@ def test_meanimage(create_files_in_directory_plus_output_type): # Test the auto naming meaner = fsl.MeanImage(in_file="a.nii") assert meaner.cmdline == "fslmaths a.nii -Tmean {}".format( - os.path.join(testdir, "a_mean{}".format(out_ext)) + os.path.join(testdir, f"a_mean{out_ext}") ) @@ -196,7 +195,7 @@ def test_maximage(create_files_in_directory_plus_output_type): # Test the auto naming maxer = fsl.MaxImage(in_file="a.nii") assert maxer.cmdline == "fslmaths a.nii -Tmax {}".format( - os.path.join(testdir, "a_max{}".format(out_ext)) + os.path.join(testdir, f"a_max{out_ext}") ) @@ -226,7 +225,7 @@ def test_smooth(create_files_in_directory_plus_output_type): # Test automatic naming smoother = fsl.IsotropicSmooth(in_file="a.nii", sigma=5) assert smoother.cmdline == "fslmaths a.nii -s {:.5f} {}".format( - 5, os.path.join(testdir, "a_smooth{}".format(out_ext)) + 5, os.path.join(testdir, f"a_smooth{out_ext}") ) @@ -251,7 +250,7 @@ def test_mask(create_files_in_directory_plus_output_type): # Test auto name generation masker = fsl.ApplyMask(in_file="a.nii", mask_file="b.nii") assert masker.cmdline == "fslmaths a.nii -mas b.nii " + os.path.join( - testdir, "a_masked{}".format(out_ext) + testdir, f"a_masked{out_ext}" ) @@ -273,7 +272,7 @@ def test_dilation(create_files_in_directory_plus_output_type): for op in ["mean", "modal", "max"]: cv = dict(mean="M", modal="D", max="F") diller.inputs.operation = op - assert diller.cmdline == "fslmaths a.nii -dil{} b.nii".format(cv[op]) + assert diller.cmdline == f"fslmaths a.nii -dil{cv[op]} b.nii" # Now test the different kernel options for k in ["3D", "2D", "box", "boxv", "gauss", "sphere"]: @@ -281,8 +280,7 @@ def test_dilation(create_files_in_directory_plus_output_type): diller.inputs.kernel_shape = k diller.inputs.kernel_size = size assert ( - diller.cmdline - == "fslmaths a.nii -kernel {} {:.4f} -dilF b.nii".format(k, size) + diller.cmdline == f"fslmaths a.nii -kernel {k} {size:.4f} -dilF b.nii" ) # Test that we can use a file kernel @@ -296,7 +294,7 @@ def test_dilation(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name dil = fsl.DilateImage(in_file="a.nii", operation="max") assert dil.cmdline == "fslmaths a.nii -dilF {}".format( - os.path.join(testdir, "a_dil{}".format(out_ext)) + os.path.join(testdir, f"a_dil{out_ext}") ) @@ -320,7 +318,7 @@ def test_erosion(create_files_in_directory_plus_output_type): # Test that we don't need to request an out name erode = fsl.ErodeImage(in_file="a.nii") assert erode.cmdline == "fslmaths a.nii -ero {}".format( - os.path.join(testdir, "a_ero{}".format(out_ext)) + os.path.join(testdir, f"a_ero{out_ext}") ) @@ -341,12 +339,12 @@ def test_spatial_filter(create_files_in_directory_plus_output_type): # Test the different operations for op in ["mean", "meanu", "median"]: filter.inputs.operation = op - assert filter.cmdline == "fslmaths a.nii -f{} b.nii".format(op) + assert filter.cmdline == f"fslmaths a.nii -f{op} b.nii" # Test that we don't need to ask for an out name filter = fsl.SpatialFilter(in_file="a.nii", operation="mean") assert filter.cmdline == "fslmaths a.nii -fmean {}".format( - os.path.join(testdir, "a_filt{}".format(out_ext)) + os.path.join(testdir, f"a_filt{out_ext}") ) @@ -368,13 +366,13 @@ def test_unarymaths(create_files_in_directory_plus_output_type): ops = ["exp", "log", "sin", "cos", "sqr", "sqrt", "recip", "abs", "bin", "index"] for op in ops: maths.inputs.operation = op - assert maths.cmdline == "fslmaths a.nii -{} b.nii".format(op) + assert maths.cmdline == f"fslmaths a.nii -{op} b.nii" # Test that we don't need to ask for an out file for op in ops: maths = fsl.UnaryMaths(in_file="a.nii", operation=op) assert maths.cmdline == "fslmaths a.nii -{} {}".format( - op, os.path.join(testdir, "a_{}{}".format(op, out_ext)) + op, os.path.join(testdir, f"a_{op}{out_ext}") ) @@ -400,7 +398,7 @@ def test_binarymaths(create_files_in_directory_plus_output_type): maths = fsl.BinaryMaths(in_file="a.nii", out_file="c.nii", operation=op) if ent == "b.nii": maths.inputs.operand_file = ent - assert maths.cmdline == "fslmaths a.nii -{} b.nii c.nii".format(op) + assert maths.cmdline == f"fslmaths a.nii -{op} b.nii c.nii" else: maths.inputs.operand_value = ent assert maths.cmdline == "fslmaths a.nii -{} {:.8f} c.nii".format( @@ -411,7 +409,7 @@ def test_binarymaths(create_files_in_directory_plus_output_type): for op in ops: maths = fsl.BinaryMaths(in_file="a.nii", operation=op, operand_file="b.nii") assert maths.cmdline == "fslmaths a.nii -{} b.nii {}".format( - op, os.path.join(testdir, "a_maths{}".format(out_ext)) + op, os.path.join(testdir, f"a_maths{out_ext}") ) @@ -470,5 +468,5 @@ def test_tempfilt(create_files_in_directory_plus_output_type): # Test that we don't need to ask for an out file filt = fsl.TemporalFilter(in_file="a.nii", highpass_sigma=64) assert filt.cmdline == "fslmaths a.nii -bptf 64.000000 -1.000000 {}".format( - os.path.join(testdir, "a_filt{}".format(out_ext)) + os.path.join(testdir, f"a_filt{out_ext}") ) diff --git a/nipype/interfaces/fsl/tests/test_model.py b/nipype/interfaces/fsl/tests/test_model.py index 8c12f04fa4..7278f1f6aa 100644 --- a/nipype/interfaces/fsl/tests/test_model.py +++ b/nipype/interfaces/fsl/tests/test_model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/fsl/tests/test_preprocess.py b/nipype/interfaces/fsl/tests/test_preprocess.py index a5752d8d49..b322077675 100644 --- a/nipype/interfaces/fsl/tests/test_preprocess.py +++ b/nipype/interfaces/fsl/tests/test_preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -43,12 +42,12 @@ def test_bet(setup_infile): # Test generated outfile name better.inputs.in_file = tmp_infile outfile = fsl_name(better, "foo_brain") - realcmd = "bet %s %s" % (tmp_infile, outfile) + realcmd = f"bet {tmp_infile} {outfile}" assert better.cmdline == realcmd # Test specified outfile name outfile = fsl_name(better, "/newdata/bar") better.inputs.out_file = outfile - realcmd = "bet %s %s" % (tmp_infile, outfile) + realcmd = f"bet {tmp_infile} {outfile}" assert better.cmdline == realcmd # infile foo.nii doesn't exist @@ -104,13 +103,13 @@ def test_fast(setup_infile): assert faster.inputs.manual_seg == Undefined assert faster.inputs != fasted.inputs assert fasted.cmdline == "fast -v -S 1 %s" % (tmp_infile) - assert fasted2.cmdline == "fast -v -S 2 %s %s" % (tmp_infile, tmp_infile) + assert fasted2.cmdline == f"fast -v -S 2 {tmp_infile} {tmp_infile}" faster = fsl.FAST() faster.inputs.in_files = tmp_infile assert faster.cmdline == "fast -S 1 %s" % (tmp_infile) faster.inputs.in_files = [tmp_infile, tmp_infile] - assert faster.cmdline == "fast -S 2 %s %s" % (tmp_infile, tmp_infile) + assert faster.cmdline == f"fast -S 2 {tmp_infile} {tmp_infile}" # Our options and some test values for them # Should parallel the opt_map structure in the class for clarity @@ -123,7 +122,7 @@ def test_fast(setup_infile): "segments": ("-g", True), "init_transform": ("-a %s" % (tmp_infile), "%s" % (tmp_infile)), "other_priors": ( - "-A %s %s %s" % (tmp_infile, tmp_infile, tmp_infile), + f"-A {tmp_infile} {tmp_infile} {tmp_infile}", (["%s" % (tmp_infile), "%s" % (tmp_infile), "%s" % (tmp_infile)]), ), "no_pve": ("--nopve", True), @@ -242,7 +241,7 @@ def test_flirt(setup_flirt): pth, fname, ext = split_filename(infile) outfile = fsl_name(flirter, "%s_flirt" % fname) outmat = "%s_flirt.mat" % fname - realcmd = "flirt -in %s -ref %s -out %s -omat %s" % ( + realcmd = "flirt -in {} -ref {} -out {} -omat {}".format( infile, reffile, outfile, @@ -303,7 +302,7 @@ def test_flirt(setup_flirt): else: value = trait_spec.default param = trait_spec.argstr % value - cmdline = "flirt -in %s -ref %s" % (infile, reffile) + cmdline = f"flirt -in {infile} -ref {reffile}" # Handle autogeneration of outfile pth, fname, ext = split_filename(infile) outfile = fsl_name(fsl.FLIRT(), "%s_flirt" % fname) diff --git a/nipype/interfaces/fsl/tests/test_utils.py b/nipype/interfaces/fsl/tests/test_utils.py index ca52354dd4..8c295a9fb4 100644 --- a/nipype/interfaces/fsl/tests/test_utils.py +++ b/nipype/interfaces/fsl/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -75,7 +74,7 @@ def test_fslmerge(create_files_in_directory_plus_output_type): # verify that providing a tr value updates the dimension to tr merger.inputs.tr = 2.25 - assert merger.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( + assert merger.cmdline == "fslmerge -tr foo_merged.nii {} {:.2f}".format( " ".join(filelist), 2.25, ) @@ -89,7 +88,7 @@ def test_fslmerge(create_files_in_directory_plus_output_type): tr=2.25, ) - assert merger2.cmdline == "fslmerge -tr foo_merged.nii %s %.2f" % ( + assert merger2.cmdline == "fslmerge -tr foo_merged.nii {} {:.2f}".format( " ".join(filelist), 2.25, ) @@ -168,9 +167,12 @@ def test_overlay(create_files_in_directory_plus_output_type): auto_thresh_bg=True, out_file="foo2_overlay.nii", ) - assert overlay2.cmdline == "overlay 1 0 %s -a %s 2.50 10.00 foo2_overlay.nii" % ( - filelist[1], - filelist[0], + assert ( + overlay2.cmdline + == "overlay 1 0 {} -a {} 2.50 10.00 foo2_overlay.nii".format( + filelist[1], + filelist[0], + ) ) @@ -196,9 +198,12 @@ def test_slicer(create_files_in_directory_plus_output_type): slicer.inputs.all_axial = True slicer.inputs.image_width = 750 slicer.inputs.out_file = "foo_bar.png" - assert slicer.cmdline == "slicer %s %s -L -i 10.000 20.000 -A 750 foo_bar.png" % ( - filelist[0], - filelist[1], + assert ( + slicer.cmdline + == "slicer {} {} -L -i 10.000 20.000 -A 750 foo_bar.png".format( + filelist[0], + filelist[1], + ) ) # .run based parameter setting @@ -313,7 +318,7 @@ def test_convertxfm(create_files_in_directory_plus_output_type): cvt2 = fsl.ConvertXFM( in_file=filelist[0], in_file2=filelist[1], concat_xfm=True, out_file="bar.mat" ) - assert cvt2.cmdline == "convert_xfm -omat bar.mat -concat %s %s" % ( + assert cvt2.cmdline == "convert_xfm -omat bar.mat -concat {} {}".format( filelist[1], filelist[0], ) diff --git a/nipype/interfaces/fsl/utils.py b/nipype/interfaces/fsl/utils.py index 734eccc74d..c9366cc014 100644 --- a/nipype/interfaces/fsl/utils.py +++ b/nipype/interfaces/fsl/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The fsl module provides classes for interfacing with the `FSL @@ -261,8 +260,8 @@ class Smooth(FSLCommand): def _format_arg(self, name, trait_spec, value): if name == "fwhm": sigma = float(value) / np.sqrt(8 * np.log(2)) - return super(Smooth, self)._format_arg(name, trait_spec, sigma) - return super(Smooth, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, sigma) + return super()._format_arg(name, trait_spec, value) class SliceInputSpec(FSLCommandInputSpec): @@ -410,7 +409,7 @@ def _format_arg(self, name, spec, value): if isdefined(self.inputs.tr): return "-tr" return spec.argstr % value - return super(Merge, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class ExtractROIInputSpec(FSLCommandInputSpec): @@ -484,7 +483,7 @@ class ExtractROI(FSLCommand): def _format_arg(self, name, spec, value): if name == "crop_list": return " ".join(map(str, sum(list(map(list, value)), []))) - return super(ExtractROI, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): """Create a Bunch which contains all possible files generated @@ -630,7 +629,7 @@ def _gen_filename(self, name): return None def _parse_inputs(self, skip=None): - return super(ImageMaths, self)._parse_inputs(skip=["suffix"]) + return super()._parse_inputs(skip=["suffix"]) def _list_outputs(self): suffix = "_maths" # ohinds: build suffix @@ -716,7 +715,7 @@ def _format_arg(self, name, trait_spec, value): except IndexError: n_cols = 1 return trait_spec.argstr % ",".join(map(str, list(range(1, n_cols + 1)))) - return super(FilterRegressor, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() @@ -805,7 +804,7 @@ def _format_arg(self, name, trait_spec, value): return self.inputs.op_string % self.inputs.mask_file else: raise ValueError("-k %s option in op_string requires mask_file") - return super(ImageStats, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def aggregate_outputs(self, runtime=None, needed_outputs=None): outputs = self._outputs() @@ -814,7 +813,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if runtime is None: try: out_stat = load_json(outfile)["stat"] - except IOError: + except OSError: return self.run().outputs else: out_stat = [] @@ -883,7 +882,7 @@ class AvScale(CommandLine): _cmd = "avscale" def _run_interface(self, runtime): - runtime = super(AvScale, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) expr = re.compile( r"Rotation & Translation Matrix:\n(?P[0-9\. \n-]+)[\s\n]*" @@ -1070,12 +1069,12 @@ def _format_arg(self, name, spec, value): else: return "1" if name == "show_negative_stats": - return "%s %.2f %.2f" % ( + return "{} {:.2f} {:.2f}".format( self.inputs.stat_image, self.inputs.stat_thresh[0] * -1, self.inputs.stat_thresh[1] * -1, ) - return super(Overlay, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1085,7 +1084,7 @@ def _list_outputs(self): not isdefined(self.inputs.show_negative_stats) or not self.inputs.show_negative_stats ): - stem = "%s_and_%s" % ( + stem = "{}_and_{}".format( split_filename(self.inputs.stat_image)[1], split_filename(self.inputs.stat_image2)[1], ) @@ -1233,7 +1232,7 @@ def _format_arg(self, name, spec, value): return "-L" else: return "" - return super(Slicer, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1350,7 +1349,7 @@ def _format_arg(self, name, spec, value): return "--ymin=%d --ymax=%d" % value elif name == "plot_size": return "-h %d -w %d" % value - return super(PlotTimeSeries, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1444,7 +1443,7 @@ def _format_arg(self, name, spec, value): if self.inputs.plot_type == "displacement": title = "-t 'MCFLIRT estimated mean displacement (mm)'" labels = "-a abs,rel" - return "%s %s" % (title, labels) + return f"{title} {labels}" # Get the right starting and ending position depending on source # package @@ -1453,17 +1452,17 @@ def _format_arg(self, name, spec, value): ) # Format the title properly - sfstr = "--start=%d --finish=%d" % sfdict["%s_%s" % (source, value[:3])] + sfstr = "--start=%d --finish=%d" % sfdict[f"{source}_{value[:3]}"] titledict = dict(fsl="MCFLIRT", spm="Realign") unitdict = dict(rot="radians", tra="mm") - title = "'%s estimated %s (%s)'" % ( + title = "'{} estimated {} ({})'".format( titledict[source], value, unitdict[value[:3]], ) - return "-t %s %s -a x,y,z" % (title, sfstr) + return f"-t {title} {sfstr} -a x,y,z" elif name == "plot_size": return "-h %d -w %d" % value elif name == "in_file": @@ -1473,7 +1472,7 @@ def _format_arg(self, name, spec, value): else: return "-i %s" % value - return super(PlotMotionParams, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _list_outputs(self): outputs = self._outputs().get() @@ -1577,7 +1576,7 @@ def _list_outputs(self): if self.inputs.concat_xfm: _, infile2, _ = split_filename(self.inputs.in_file2) outfile = fname_presuffix( - "%s_%s" % (infile1, infile2), + f"{infile1}_{infile2}", suffix=".mat", newpath=os.getcwd(), use_ext=False, @@ -2026,7 +2025,7 @@ def _parse_inputs(self, skip=None): skip += self.inputs._ofs[:1] + self.inputs._ofs[3:] else: skip += self.inputs._ofs[1:] - return super(Complex, self)._parse_inputs(skip) + return super()._parse_inputs(skip) def _gen_filename(self, name): if name == "complex_out_file": @@ -2241,7 +2240,7 @@ def _parse_inputs(self, skip=None): skip += ["out_jacobian"] skip += ["write_jacobian"] - return super(WarpUtils, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) class ConvertWarpInputSpec(FSLCommandInputSpec): @@ -2505,21 +2504,19 @@ def __init__(self, command=None, **inputs): self._in_file = None self._outformat = None - super(WarpPoints, self).__init__(command=command, **inputs) + super().__init__(command=command, **inputs) def _format_arg(self, name, trait_spec, value): if name == "out_file": return "" - return super(WarpPoints, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): fname, ext = op.splitext(self.inputs.in_coords) setattr(self, "_in_file", fname) setattr(self, "_outformat", ext[1:]) - first_args = super(WarpPoints, self)._parse_inputs( - skip=["in_coords", "out_file"] - ) + first_args = super()._parse_inputs(skip=["in_coords", "out_file"]) second_args = fname + ".txt" @@ -2584,7 +2581,7 @@ def _coords_to_trk(self, points, out_file): def _overload_extension(self, value, name): if name == "out_file": - return "%s.%s" % (value, getattr(self, "_outformat")) + return "{}.{}".format(value, getattr(self, "_outformat")) def _run_interface(self, runtime): fname = getattr(self, "_in_file") @@ -2598,7 +2595,7 @@ def _run_interface(self, runtime): tmpfile = self._tmpfile self._trk_to_coords(fname, out_file=tmpfile) - runtime = super(WarpPoints, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) newpoints = np.fromstring("\n".join(runtime.stdout.split("\n")[1:]), sep=" ") if tmpfile is not None: diff --git a/nipype/interfaces/image.py b/nipype/interfaces/image.py index 1535b1baf8..f86ae7ef15 100644 --- a/nipype/interfaces/image.py +++ b/nipype/interfaces/image.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 4f6e455959..0152e200e5 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Set of interfaces that allow interaction with data. Currently @@ -85,7 +84,7 @@ def copytree(src, dst, use_hardlink=False): hashmethod="content", use_hardlink=use_hardlink, ) - except (IOError, os.error) as why: + except (OSError, os.error) as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files @@ -135,12 +134,12 @@ def _get_head_bucket(s3_resource, bucket_name): ) raise Exception(err_msg) else: - err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( + err_msg = "Unable to connect to bucket: {}. Error message:\n{}".format( bucket_name, exc, ) except Exception as exc: - err_msg = "Unable to connect to bucket: %s. Error message:\n%s" % ( + err_msg = "Unable to connect to bucket: {}. Error message:\n{}".format( bucket_name, exc, ) @@ -155,14 +154,14 @@ def _list_outputs(self): raise NotImplementedError def _outputs(self): - return self._add_output_traits(super(IOBase, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return base # Class to track percentage of S3 file upload -class ProgressPercentage(object): +class ProgressPercentage: """ Callable class instsance (via __call__ method) that displays upload percentage of a file to S3 @@ -256,12 +255,12 @@ class DataSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): - super(DataSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(DataSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) # DataSink outputs @@ -346,7 +345,7 @@ def __init__(self, infields=None, force_run=True, **kwargs): Indicates the input fields to be dynamically created """ - super(DataSink, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -476,7 +475,7 @@ def _return_aws_keys(self): # Check if creds exist if creds_path and os.path.exists(creds_path): - with open(creds_path, "r") as creds_in: + with open(creds_path) as creds_in: # Grab csv rows row1 = creds_in.readline() row2 = creds_in.readline() @@ -886,7 +885,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: outfields = ["outfiles"] - super(S3DataGrabber, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -964,12 +963,12 @@ def _list_outputs(self): if re.match(template, fname): filelist.append(fname) if len(filelist) == 0: - msg = "Output key: %s Template: %s returned no files" % ( + msg = "Output key: {} Template: {} returned no files".format( key, template, ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) else: @@ -1012,12 +1011,12 @@ def _list_outputs(self): if re.match(filledtemplate, fname): outfiles.append(fname) if len(outfiles) == 0: - msg = "Output key: %s Template: %s returned no files" % ( + msg = "Output key: {} Template: {} returned no files".format( key, filledtemplate, ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) outputs[key].append(None) @@ -1171,7 +1170,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): """ if not outfields: outfields = ["outfiles"] - super(DataGrabber, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -1239,12 +1238,12 @@ def _list_outputs(self): if not args: filelist = glob.glob(template) if len(filelist) == 0: - msg = "Output key: %s Template: %s returned no files" % ( + msg = "Output key: {} Template: {} returned no files".format( key, template, ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) else: @@ -1284,12 +1283,12 @@ def _list_outputs(self): ) outfiles = glob.glob(filledtemplate) if len(outfiles) == 0: - msg = "Output key: %s Template: %s returned no files" % ( + msg = "Output key: {} Template: {} returned no files".format( key, filledtemplate, ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) outputs[key].append(None) @@ -1392,14 +1391,14 @@ def __init__(self, templates, **kwargs): used to select files. """ - super(SelectFiles, self).__init__(**kwargs) + super().__init__(**kwargs) # Infer the infields and outfields from the template infields = [] for name, template in list(templates.items()): for _, field_name, _, _ in string.Formatter().parse(template): if field_name is not None: - field_name = re.match("\w+", field_name).group() + field_name = re.match(r"\w+", field_name).group() if field_name not in infields: infields.append(field_name) @@ -1421,13 +1420,9 @@ def _add_output_traits(self, base): def _list_outputs(self): """Find the files and expose them as interface outputs.""" outputs = {} - info = dict( - [ - (k, v) - for k, v in list(self.inputs.__dict__.items()) - if k in self._infields - ] - ) + info = { + k: v for k, v in list(self.inputs.__dict__.items()) if k in self._infields + } force_lists = self.inputs.force_lists if isinstance(force_lists, bool): @@ -1461,12 +1456,12 @@ def _list_outputs(self): # Handle the case where nothing matched if not filelist: - msg = "No files were found matching %s template: %s" % ( + msg = "No files were found matching {} template: {}".format( field, filled_template, ) if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) @@ -1920,7 +1915,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): See class examples for usage """ - super(XNATSource, self).__init__(**kwargs) + super().__init__(**kwargs) undefined_traits = {} # used for mandatory inputs check self._infields = infields @@ -1992,7 +1987,7 @@ def _list_outputs(self): if not args: file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError("Template %s returned no files" % template) + raise OSError("Template %s returned no files" % template) outputs[key] = simplify_list( [ str(file_object.get()) @@ -2027,7 +2022,7 @@ def _list_outputs(self): file_objects = xnat.select(target).get("obj") if file_objects == []: - raise IOError("Template %s " "returned no files" % target) + raise OSError("Template %s " "returned no files" % target) outfiles = simplify_list( [ @@ -2040,7 +2035,7 @@ def _list_outputs(self): file_objects = xnat.select(template).get("obj") if file_objects == []: - raise IOError("Template %s " "returned no files" % template) + raise OSError("Template %s " "returned no files" % template) outfiles = simplify_list( [ @@ -2104,7 +2099,7 @@ def __setattr__(self, key, value): if key not in self.copyable_trait_names(): self._outputs[key] = value else: - super(XNATSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class XNATSink(LibraryBaseInterface, IOBase): @@ -2232,7 +2227,7 @@ def push_file(self, xnat, file_name, out_key, uri_template_args): uri_template_args["container_id"] += "_results" # define resource level - uri_template_args["resource_label"] = "%s_%s" % ( + uri_template_args["resource_label"] = "{}_{}".format( uri_template_args["container_id"], out_key.split(".")[0], ) @@ -2306,7 +2301,7 @@ class SQLiteSink(LibraryBaseInterface, IOBase): _pkg = "sqlite3" def __init__(self, input_names, **inputs): - super(SQLiteSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = ensure_list(input_names) add_traits(self.inputs, [name for name in self._input_names]) @@ -2368,7 +2363,7 @@ class MySQLSink(IOBase): input_spec = MySQLSinkInputSpec def __init__(self, input_names, **inputs): - super(MySQLSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = ensure_list(input_names) add_traits(self.inputs, [name for name in self._input_names]) @@ -2509,7 +2504,7 @@ def __init__(self, infields=None, outfields=None, **kwargs): kwargs = kwargs.copy() kwargs["infields"] = infields kwargs["outfields"] = outfields - super(SSHDataGrabber, self).__init__(**kwargs) + super().__init__(**kwargs) if None in (self.inputs.username, self.inputs.password): raise ValueError( "either both username and password " "are provided or none of them" @@ -2544,7 +2539,7 @@ def _get_files_over_ssh(self, template): # no files msg = "Output template: %s returned no files" % template if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: warn(msg) @@ -2575,7 +2570,7 @@ def _get_files_over_ssh(self, template): for f in files_to_download: try: sftp.get(os.path.join(template_dir, f), f) - except IOError: + except OSError: iflogger.info("remote file %s not found" % f) # return value @@ -2726,7 +2721,7 @@ def _list_outputs(self): outputs = {} if isdefined(self.inputs.in_file): - with open(self.inputs.in_file, "r") as f: + with open(self.inputs.in_file) as f: data = simplejson.load(f) if not isinstance(data, dict): @@ -2752,12 +2747,12 @@ class JSONFileSinkInputSpec(DynamicTraitedSpec, BaseInterfaceInputSpec): def __setattr__(self, key, value): if key not in self.copyable_trait_names(): if not isdefined(value): - super(JSONFileSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) self._outputs[key] = value else: if key in self._outputs: self._outputs[key] = value - super(JSONFileSinkInputSpec, self).__setattr__(key, value) + super().__setattr__(key, value) class JSONFileSinkOutputSpec(TraitedSpec): @@ -2796,7 +2791,7 @@ class JSONFileSink(IOBase): output_spec = JSONFileSinkOutputSpec def __init__(self, infields=[], force_run=True, **inputs): - super(JSONFileSink, self).__init__(**inputs) + super().__init__(**inputs) self._input_names = infields undefined_traits = {} @@ -2917,7 +2912,7 @@ def __init__(self, infields=None, **kwargs): infields : list of str Indicates the input fields to be dynamically created """ - super(BIDSDataGrabber, self).__init__(**kwargs) + super().__init__(**kwargs) if not isdefined(self.inputs.output_query): self.inputs.output_query = { @@ -2938,7 +2933,7 @@ def __init__(self, infields=None, **kwargs): from bids import layout as bidslayout bids_config = join(dirname(bidslayout.__file__), "config", "bids.json") - bids_config = json.load(open(bids_config, "r")) + bids_config = json.load(open(bids_config)) infields = [i["name"] for i in bids_config["entities"]] self._infields = infields or [] @@ -2980,7 +2975,7 @@ def _list_outputs(self): if len(filelist) == 0: msg = "Output key: %s returned no files" % key if self.inputs.raise_on_empty: - raise IOError(msg) + raise OSError(msg) else: iflogger.warning(msg) filelist = Undefined diff --git a/nipype/interfaces/matlab.py b/nipype/interfaces/matlab.py index f68c5ea43d..de959988f4 100644 --- a/nipype/interfaces/matlab.py +++ b/nipype/interfaces/matlab.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to run MATLAB scripts.""" @@ -97,7 +96,7 @@ def __init__(self, matlab_cmd=None, **inputs): """initializes interface to matlab (default 'matlab -nodesktop -nosplash') """ - super(MatlabCommand, self).__init__(**inputs) + super().__init__(**inputs) if matlab_cmd and isdefined(matlab_cmd): self._cmd = matlab_cmd elif self._default_matlab_cmd: @@ -153,7 +152,7 @@ def set_default_paths(cls, paths): def _run_interface(self, runtime): self.terminal_output = "allatonce" - runtime = super(MatlabCommand, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) try: # Matlab can leave the terminal in a barbbled state os.system("stty sane") @@ -170,7 +169,7 @@ def _format_arg(self, name, trait_spec, value): if self.inputs.uses_mcr: argstr = "%s" return self._gen_matlab_command(argstr, value) - return super(MatlabCommand, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _gen_matlab_command(self, argstr, script_lines): """Generates commands and, if mfile specified, writes it to disk.""" @@ -208,12 +207,12 @@ def _gen_matlab_command(self, argstr, script_lines): script_lines = "\n".join(prescript) + script_lines + "\n".join(postscript) if mfile: - with open(os.path.join(cwd, self.inputs.script_file), "wt") as mfile: + with open(os.path.join(cwd, self.inputs.script_file), "w") as mfile: mfile.write(script_lines) if self.inputs.uses_mcr: script = "%s" % (os.path.join(cwd, self.inputs.script_file)) else: - script = "addpath('%s');%s" % ( + script = "addpath('{}');{}".format( cwd, self.inputs.script_file.split(".")[0], ) diff --git a/nipype/interfaces/meshfix.py b/nipype/interfaces/meshfix.py index 505426bfe2..097fdf4010 100644 --- a/nipype/interfaces/meshfix.py +++ b/nipype/interfaces/meshfix.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """MeshFix corrects topological errors in polygonal meshes.""" diff --git a/nipype/interfaces/minc/__init__.py b/nipype/interfaces/minc/__init__.py index a69e38eeb2..b05ef82b5d 100644 --- a/nipype/interfaces/minc/__init__.py +++ b/nipype/interfaces/minc/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The MINC (McConnell Brain Imaging Centre, Montreal Neurological Institute) toolkit. diff --git a/nipype/interfaces/minc/base.py b/nipype/interfaces/minc/base.py index 7fe817dcaa..8731627693 100644 --- a/nipype/interfaces/minc/base.py +++ b/nipype/interfaces/minc/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC @@ -28,7 +27,7 @@ def no_minc(): return not check_minc() -class Info(object): +class Info: """Handle MINC version information. version refers to the version of MINC on the system @@ -52,7 +51,7 @@ def version(): clout = CommandLine( command="mincinfo", args="-version", terminal_output="allatonce" ).run() - except IOError: + except OSError: return None out = clout.runtime.stdout diff --git a/nipype/interfaces/minc/minc.py b/nipype/interfaces/minc/minc.py index 4e740afab4..4742c64e72 100644 --- a/nipype/interfaces/minc/minc.py +++ b/nipype/interfaces/minc/minc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The minc module provides classes for interfacing with the `MINC @@ -691,7 +690,7 @@ def _format_arg(self, name, spec, value): return "-p %d,%d" % (value[0], value[1]) else: raise ValueError("Invalid precision argument: " + str(value)) - return super(Dump, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class AverageInputSpec(CommandLineInputSpec): @@ -1633,10 +1632,10 @@ def _format_arg(self, name, spec, value): if isinstance(value, bool) and value: return "--title" elif isinstance(value, str): - return "--title --title_text %s" % (value,) + return f"--title --title_text {value}" else: raise ValueError('Unknown value for "title" argument: ' + str(value)) - return super(Pik, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class BlurInputSpec(CommandLineInputSpec): @@ -1803,7 +1802,7 @@ def _list_outputs(self): @property def cmdline(self): output_file_base = self.inputs.output_file_base - orig_cmdline = super(Blur, self).cmdline + orig_cmdline = super().cmdline if isdefined(output_file_base): return orig_cmdline @@ -1811,7 +1810,7 @@ def cmdline(self): # FIXME this seems like a bit of a hack. Can we force output_file # to show up in cmdline by default, even if it isn't specified in # the instantiation of Pik? - return "%s %s" % (orig_cmdline, self._gen_output_base()) + return f"{orig_cmdline} {self._gen_output_base()}" class MathInputSpec(CommandLineInputSpec): @@ -2205,13 +2204,13 @@ def _format_arg(self, name, spec, value): if isinstance(value, bool) and value: return spec.argstr elif isinstance(value, bool) and not value: - raise ValueError("Does not make sense to specify %s=False" % (name,)) + raise ValueError(f"Does not make sense to specify {name}=False") elif isinstance(value, float): - return "%s -const %s" % (spec.argstr, value) + return f"{spec.argstr} -const {value}" else: - raise ValueError("Invalid %s argument: %s" % (name, value)) + raise ValueError(f"Invalid {name} argument: {value}") - return super(Math, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _parse_inputs(self): """A number of the command line options expect precisely one or two files.""" @@ -2269,7 +2268,7 @@ def _parse_inputs(self): % (n, nr_input_files) ) - return super(Math, self)._parse_inputs() + return super()._parse_inputs() class ResampleInputSpec(CommandLineInputSpec): @@ -3154,7 +3153,7 @@ class Gennlxfm(CommandLine): _cmd = "gennlxfm" def _list_outputs(self): - outputs = super(Gennlxfm, self)._list_outputs() + outputs = super()._list_outputs() outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) @@ -3219,10 +3218,10 @@ class XfmConcat(CommandLine): _cmd = "xfmconcat" def _list_outputs(self): - outputs = super(XfmConcat, self)._list_outputs() + outputs = super()._list_outputs() if os.path.exists(outputs["output_file"]): - if "grid" in open(outputs["output_file"], "r").read(): + if "grid" in open(outputs["output_file"]).read(): outputs["output_grids"] = glob.glob( re.sub(".(nlxfm|xfm)$", "_grid_*.mnc", outputs["output_file"]) ) @@ -3410,7 +3409,7 @@ def _list_outputs(self): outputs["output_xfm"] = os.path.abspath(self._gen_filename("output_xfm")) assert os.path.exists(outputs["output_xfm"]) - if "grid" in open(outputs["output_xfm"], "r").read(): + if "grid" in open(outputs["output_xfm"]).read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_xfm"] ) @@ -3512,7 +3511,7 @@ def _list_outputs(self): outputs["output_file"] = os.path.abspath(self._gen_outfilename()) assert os.path.exists(outputs["output_file"]) - if "grid" in open(outputs["output_file"], "r").read(): + if "grid" in open(outputs["output_file"]).read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) @@ -3583,7 +3582,7 @@ def _list_outputs(self): outputs["output_file"] = os.path.abspath(self._gen_outfilename()) assert os.path.exists(outputs["output_file"]) - if "grid" in open(outputs["output_file"], "r").read(): + if "grid" in open(outputs["output_file"]).read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["output_file"] ) @@ -3843,11 +3842,11 @@ class VolSymm(CommandLine): _cmd = "volsymm" def _list_outputs(self): - outputs = super(VolSymm, self)._list_outputs() + outputs = super()._list_outputs() # Have to manually check for the grid files. if os.path.exists(outputs["trans_file"]): - if "grid" in open(outputs["trans_file"], "r").read(): + if "grid" in open(outputs["trans_file"]).read(): outputs["output_grid"] = re.sub( ".(nlxfm|xfm)$", "_grid_0.mnc", outputs["trans_file"] ) diff --git a/nipype/interfaces/minc/testdata.py b/nipype/interfaces/minc/testdata.py index 1c33fe2b2b..9bd06355d1 100644 --- a/nipype/interfaces/minc/testdata.py +++ b/nipype/interfaces/minc/testdata.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import os from ...testing import example_data diff --git a/nipype/interfaces/minc/tests/__init__.py b/nipype/interfaces/minc/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/minc/tests/__init__.py +++ b/nipype/interfaces/minc/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mipav/__init__.py b/nipype/interfaces/mipav/__init__.py index 85cc052c1e..51dd200dce 100644 --- a/nipype/interfaces/mipav/__init__.py +++ b/nipype/interfaces/mipav/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """MIPAV enables quantitative analysis and visualization of multimodal medical images.""" from .developer import ( JistLaminarVolumetricLayering, diff --git a/nipype/interfaces/mipav/developer.py b/nipype/interfaces/mipav/developer.py index 03069fcf98..f0d67eb9ba 100644 --- a/nipype/interfaces/mipav/developer.py +++ b/nipype/interfaces/mipav/developer.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/mipav/generate_classes.py b/nipype/interfaces/mipav/generate_classes.py index 55f0f6a5db..ab91e48150 100644 --- a/nipype/interfaces/mipav/generate_classes.py +++ b/nipype/interfaces/mipav/generate_classes.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - if __name__ == "__main__": from nipype.interfaces.slicer.generate_classes import generate_all_classes diff --git a/nipype/interfaces/mipav/tests/__init__.py b/nipype/interfaces/mipav/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mipav/tests/__init__.py +++ b/nipype/interfaces/mipav/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mixins/reporting.py b/nipype/interfaces/mixins/reporting.py index 182738ca64..90ca804618 100644 --- a/nipype/interfaces/mixins/reporting.py +++ b/nipype/interfaces/mixins/reporting.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ class mixin and utilities for enabling reports for nipype interfaces """ @@ -31,11 +30,11 @@ class ReportCapableInterface(BaseInterface): _out_report = None def __init__(self, generate_report=False, **kwargs): - super(ReportCapableInterface, self).__init__(**kwargs) + super().__init__(**kwargs) self.generate_report = generate_report def _post_run_hook(self, runtime): - runtime = super(ReportCapableInterface, self)._post_run_hook(runtime) + runtime = super()._post_run_hook(runtime) # leave early if there's nothing to do if not self.generate_report: @@ -53,7 +52,7 @@ def _post_run_hook(self, runtime): def _list_outputs(self): try: - outputs = super(ReportCapableInterface, self)._list_outputs() + outputs = super()._list_outputs() except NotImplementedError: outputs = {} if self._out_report is not None: diff --git a/nipype/interfaces/mixins/tests/__init__.py b/nipype/interfaces/mixins/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mixins/tests/__init__.py +++ b/nipype/interfaces/mixins/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mne/__init__.py b/nipype/interfaces/mne/__init__.py index 820780e54d..4a4750e8a4 100644 --- a/nipype/interfaces/mne/__init__.py +++ b/nipype/interfaces/mne/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- """MNE is a software for exploring, visualizing, and analyzing human neurophysiological data.""" from .base import WatershedBEM diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 9fa880d44c..45f7e0e9c8 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os.path as op import glob diff --git a/nipype/interfaces/mne/tests/__init__.py b/nipype/interfaces/mne/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mne/tests/__init__.py +++ b/nipype/interfaces/mne/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix/__init__.py b/nipype/interfaces/mrtrix/__init__.py index 3aafdc1db7..232dc119cf 100644 --- a/nipype/interfaces/mrtrix/__init__.py +++ b/nipype/interfaces/mrtrix/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """MRTrix version 2 (DEPRECATED) -- tools to perform various types of diffusion MRI analyses.""" diff --git a/nipype/interfaces/mrtrix/convert.py b/nipype/interfaces/mrtrix/convert.py index 8fa2d3a058..8a0e14d3eb 100644 --- a/nipype/interfaces/mrtrix/convert.py +++ b/nipype/interfaces/mrtrix/convert.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os.path as op @@ -119,7 +118,7 @@ def track_gen(track_points): if len(pts_str) < (n_pts * bytesize): if not n_streams == stream_count: raise nb.trackvis.HeaderError( - "Expecting %s points, found only %s" % (stream_count, n_streams) + f"Expecting {stream_count} points, found only {n_streams}" ) iflogger.error( "Expecting %s points, found only %s", stream_count, n_streams diff --git a/nipype/interfaces/mrtrix/preprocess.py b/nipype/interfaces/mrtrix/preprocess.py index c79f9016e9..93bfbbcfd6 100644 --- a/nipype/interfaces/mrtrix/preprocess.py +++ b/nipype/interfaces/mrtrix/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/mrtrix/tensors.py b/nipype/interfaces/mrtrix/tensors.py index 70b789d4e3..3680282b89 100644 --- a/nipype/interfaces/mrtrix/tensors.py +++ b/nipype/interfaces/mrtrix/tensors.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/mrtrix/tests/__init__.py b/nipype/interfaces/mrtrix/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mrtrix/tests/__init__.py +++ b/nipype/interfaces/mrtrix/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix/tracking.py b/nipype/interfaces/mrtrix/tracking.py index 4624d2c780..53e805eeb6 100644 --- a/nipype/interfaces/mrtrix/tracking.py +++ b/nipype/interfaces/mrtrix/tracking.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -433,7 +432,7 @@ class DiffusionTensorStreamlineTrack(StreamlineTrack): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "DT_STREAM" - return super(DiffusionTensorStreamlineTrack, self).__init__(command, **inputs) + return super().__init__(command, **inputs) class ProbabilisticSphericallyDeconvolutedStreamlineTrackInputSpec( @@ -467,9 +466,7 @@ class ProbabilisticSphericallyDeconvolutedStreamlineTrack(StreamlineTrack): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_PROB" - return super( - ProbabilisticSphericallyDeconvolutedStreamlineTrack, self - ).__init__(command, **inputs) + return super().__init__(command, **inputs) class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): @@ -494,6 +491,4 @@ class SphericallyDeconvolutedStreamlineTrack(StreamlineTrack): def __init__(self, command=None, **inputs): inputs["inputmodel"] = "SD_STREAM" - return super(SphericallyDeconvolutedStreamlineTrack, self).__init__( - command, **inputs - ) + return super().__init__(command, **inputs) diff --git a/nipype/interfaces/mrtrix3/base.py b/nipype/interfaces/mrtrix3/base.py index 1a0ad94c4d..ba8c253e73 100644 --- a/nipype/interfaces/mrtrix3/base.py +++ b/nipype/interfaces/mrtrix3/base.py @@ -110,7 +110,7 @@ def _format_arg(self, name, trait_spec, value): if name == "out_bvec": return trait_spec.argstr % (value, self.inputs.out_bval) - return super(MRTrix3Base, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _parse_inputs(self, skip=None): if skip is None: @@ -131,7 +131,7 @@ def _parse_inputs(self, skip=None): except AttributeError: pass - return super(MRTrix3Base, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) @property def version(self): diff --git a/nipype/interfaces/mrtrix3/connectivity.py b/nipype/interfaces/mrtrix3/connectivity.py index 95e3546266..c111afe969 100644 --- a/nipype/interfaces/mrtrix3/connectivity.py +++ b/nipype/interfaces/mrtrix3/connectivity.py @@ -221,7 +221,7 @@ def _parse_inputs(self, skip=None): "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", ) - return super(LabelConfig, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() @@ -313,7 +313,7 @@ def _parse_inputs(self, skip=None): "src/dwi/tractography/connectomics/" "example_configs/fs_default.txt", ) - return super(LabelConvert, self)._parse_inputs(skip=skip) + return super()._parse_inputs(skip=skip) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/mrtrix3/tests/__init__.py b/nipype/interfaces/mrtrix3/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/mrtrix3/tests/__init__.py +++ b/nipype/interfaces/mrtrix3/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/mrtrix3/tracking.py b/nipype/interfaces/mrtrix3/tracking.py index 301f5deeff..bc724b42a6 100644 --- a/nipype/interfaces/mrtrix3/tracking.py +++ b/nipype/interfaces/mrtrix3/tracking.py @@ -367,7 +367,7 @@ def _format_arg(self, name, trait_spec, value): value = ["%f" % v for v in value] return trait_spec.argstr % ",".join(value) - return super(Tractography, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _list_outputs(self): outputs = self.output_spec().get() diff --git a/nipype/interfaces/niftyfit/base.py b/nipype/interfaces/niftyfit/base.py index cdd116eb38..1239fa967b 100644 --- a/nipype/interfaces/niftyfit/base.py +++ b/nipype/interfaces/niftyfit/base.py @@ -32,7 +32,7 @@ class NiftyFitCommand(CommandLine): def __init__(self, **inputs): """Init method calling super. No version to be checked.""" - super(NiftyFitCommand, self).__init__(**inputs) + super().__init__(**inputs) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if basename == "": diff --git a/nipype/interfaces/niftyfit/dwi.py b/nipype/interfaces/niftyfit/dwi.py index 9adb6b3817..b065e15169 100644 --- a/nipype/interfaces/niftyfit/dwi.py +++ b/nipype/interfaces/niftyfit/dwi.py @@ -338,7 +338,7 @@ def _format_arg(self, name, trait_spec, value): return "" if name == "tenmap2_file" and self.inputs.ten_type != "lower-tri": return "" - return super(FitDwi, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) class DwiToolInputSpec(CommandLineInputSpec): @@ -600,4 +600,4 @@ def _format_arg(self, name, trait_spec, value): self.inputs.dti_flag2 ): return "" - return super(DwiTool, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) diff --git a/nipype/interfaces/niftyfit/tests/test_asl.py b/nipype/interfaces/niftyfit/tests/test_asl.py index b500a9aa68..b919e0a483 100644 --- a/nipype/interfaces/niftyfit/tests/test_asl.py +++ b/nipype/interfaces/niftyfit/tests/test_asl.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/niftyfit/tests/test_qt1.py b/nipype/interfaces/niftyfit/tests/test_qt1.py index 930176467f..794e6c5130 100644 --- a/nipype/interfaces/niftyfit/tests/test_qt1.py +++ b/nipype/interfaces/niftyfit/tests/test_qt1.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/niftyreg/__init__.py b/nipype/interfaces/niftyreg/__init__.py index 1bc01a9ad5..e22eae03ed 100644 --- a/nipype/interfaces/niftyreg/__init__.py +++ b/nipype/interfaces/niftyreg/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/niftyreg/base.py b/nipype/interfaces/niftyreg/base.py index f62a92b84a..c507f27d72 100644 --- a/nipype/interfaces/niftyreg/base.py +++ b/nipype/interfaces/niftyreg/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -61,7 +60,7 @@ class NiftyRegCommand(CommandLine): def __init__(self, required_version=None, **inputs): self.num_threads = 1 - super(NiftyRegCommand, self).__init__(**inputs) + super().__init__(**inputs) self.required_version = required_version _version = self.version if _version: @@ -120,7 +119,7 @@ def exists(self): def _format_arg(self, name, spec, value): if name == "omp_core_val": self.numthreads = value - return super(NiftyRegCommand, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _gen_fname(self, basename, out_dir=None, suffix=None, ext=None): if basename == "": diff --git a/nipype/interfaces/niftyreg/reg.py b/nipype/interfaces/niftyreg/reg.py index 1fc357227f..2d31d874e3 100644 --- a/nipype/interfaces/niftyreg/reg.py +++ b/nipype/interfaces/niftyreg/reg.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -153,12 +152,12 @@ class RegAladin(NiftyRegCommand): output_spec = RegAladinOutputSpec def _list_outputs(self): - outputs = super(RegAladin, self)._list_outputs() + outputs = super()._list_outputs() # Make a list of the linear transformation file and the input image aff = os.path.abspath(outputs["aff_file"]) flo = os.path.abspath(self.inputs.flo_file) - outputs["avg_output"] = "%s %s" % (aff, flo) + outputs["avg_output"] = f"{aff} {flo}" return outputs @@ -386,7 +385,7 @@ def _remove_extension(in_file): return os.path.join(dn, bn) def _list_outputs(self): - outputs = super(RegF3D, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.vel_flag is True: res_name = self._remove_extension(outputs["res_file"]) @@ -398,7 +397,7 @@ def _list_outputs(self): if self.inputs.vel_flag is True and isdefined(self.inputs.aff_file): cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs["avg_output"] = "%s %s %s" % ( + outputs["avg_output"] = "{} {} {}".format( self.inputs.aff_file, cpp_file, flo_file, @@ -406,6 +405,6 @@ def _list_outputs(self): else: cpp_file = os.path.abspath(outputs["cpp_file"]) flo_file = os.path.abspath(self.inputs.flo_file) - outputs["avg_output"] = "%s %s" % (cpp_file, flo_file) + outputs["avg_output"] = f"{cpp_file} {flo_file}" return outputs diff --git a/nipype/interfaces/niftyreg/regutils.py b/nipype/interfaces/niftyreg/regutils.py index 30799d8f2e..2d02586ec5 100644 --- a/nipype/interfaces/niftyreg/regutils.py +++ b/nipype/interfaces/niftyreg/regutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The regutils module provides classes for interfacing with the `niftyreg @@ -117,12 +116,12 @@ def _format_arg(self, name, spec, value): inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: - return super(RegResample, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.nii.gz") class RegJacobianInputSpec(NiftyRegCommandInputSpec): @@ -190,7 +189,7 @@ class RegJacobian(NiftyRegCommand): def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.type - return os.path.join(path, "{0}_{1}.nii.gz".format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.nii.gz") class RegToolsInputSpec(NiftyRegCommandInputSpec): @@ -339,7 +338,7 @@ def _format_arg(self, name, spec, value): inter_val = {"NN": 0, "LIN": 1, "CUB": 3, "SINC": 4} return spec.argstr % inter_val[value] else: - return super(RegTools, self)._format_arg(name, spec, value) + return super()._format_arg(name, spec, value) class RegAverageInputSpec(NiftyRegCommandInputSpec): @@ -523,11 +522,11 @@ def _list_outputs(self): @property def cmdline(self): """Rewrite the cmdline to write options in text_file.""" - argv = super(RegAverage, self).cmdline + argv = super().cmdline reg_average_cmd = os.path.join(os.getcwd(), "reg_average_cmd") with open(reg_average_cmd, "w") as f: f.write(argv) - return "%s --cmd_file %s" % (self.cmd, reg_average_cmd) + return f"{self.cmd} --cmd_file {reg_average_cmd}" class RegTransformInputSpec(NiftyRegCommandInputSpec): @@ -960,4 +959,4 @@ class RegMeasure(NiftyRegCommand): def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) suffix = self.inputs.measure_type - return os.path.join(path, "{0}_{1}.txt".format(base, suffix)) + return os.path.join(path, f"{base}_{suffix}.txt") diff --git a/nipype/interfaces/niftyreg/tests/__init__.py b/nipype/interfaces/niftyreg/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/niftyreg/tests/__init__.py +++ b/nipype/interfaces/niftyreg/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/niftyreg/tests/test_reg.py b/nipype/interfaces/niftyreg/tests/test_reg.py index 59773c880e..4af3e15bdb 100644 --- a/nipype/interfaces/niftyreg/tests/test_reg.py +++ b/nipype/interfaces/niftyreg/tests/test_reg.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/niftyreg/tests/test_regutils.py b/nipype/interfaces/niftyreg/tests/test_regutils.py index 3efc9efb56..86ec9e5d3a 100644 --- a/nipype/interfaces/niftyreg/tests/test_regutils.py +++ b/nipype/interfaces/niftyreg/tests/test_regutils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -219,7 +218,7 @@ def test_reg_average(): argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = "%s %s -avg %s %s %s -omp 1" % ( + expected_argv = "{} {} -avg {} {} {} -omp 1".format( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), one_file, @@ -230,7 +229,7 @@ def test_reg_average(): assert argv.decode("utf-8") == expected_argv # Test command line with text file - expected_cmd = "%s --cmd_file %s" % ( + expected_cmd = "{} --cmd_file {}".format( get_custom_path("reg_average"), reg_average_cmd, ) @@ -252,7 +251,7 @@ def test_reg_average(): argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = "%s %s -avg %s %s %s -omp 1" % ( + expected_argv = "{} {} -avg {} {} {} -omp 1".format( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.txt"), one_file, @@ -277,7 +276,7 @@ def test_reg_average(): argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = "%s %s -avg_lts %s %s %s -omp 1" % ( + expected_argv = "{} {} -avg_lts {} {} {} -omp 1".format( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.txt"), one_file, @@ -314,7 +313,7 @@ def test_reg_average(): argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = "%s %s -avg_tran %s -omp 1 %s %s %s %s %s %s" % ( + expected_argv = "{} {} -avg_tran {} -omp 1 {} {} {} {} {} {}".format( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), ref_file, @@ -361,7 +360,7 @@ def test_reg_average(): argv = f_obj.read() os.remove(reg_average_cmd) - expected_argv = "%s %s -demean3 %s -omp 1 %s %s %s %s %s %s %s %s %s" % ( + expected_argv = "{} {} -demean3 {} -omp 1 {} {} {} {} {} {} {} {} {}".format( get_custom_path("reg_average"), os.path.join(os.getcwd(), "avg_out.nii.gz"), ref_file, diff --git a/nipype/interfaces/niftyseg/base.py b/nipype/interfaces/niftyseg/base.py index 65f1f9ff14..efc6c51721 100644 --- a/nipype/interfaces/niftyseg/base.py +++ b/nipype/interfaces/niftyseg/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -28,9 +27,7 @@ class NiftySegCommand(NiftyFitCommand): _min_version = None def __init__(self, **inputs): - super(NiftySegCommand, self).__init__(**inputs) + super().__init__(**inputs) def get_version(self): - return super(NiftySegCommand, self).version_from_command( - cmd="seg_EM", flag="--version" - ) + return super().version_from_command(cmd="seg_EM", flag="--version") diff --git a/nipype/interfaces/niftyseg/em.py b/nipype/interfaces/niftyseg/em.py index d6fb4d5180..eb45fbdb26 100644 --- a/nipype/interfaces/niftyseg/em.py +++ b/nipype/interfaces/niftyseg/em.py @@ -170,4 +170,4 @@ def _format_arg(self, opt, spec, val): _nb_priors = len(self.inputs.priors) return "-priors %d %s" % (_nb_priors, " ".join(self.inputs.priors)) else: - return super(EM, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/label_fusion.py b/nipype/interfaces/niftyseg/label_fusion.py index aa255247d2..232468508c 100644 --- a/nipype/interfaces/niftyseg/label_fusion.py +++ b/nipype/interfaces/niftyseg/label_fusion.py @@ -186,7 +186,7 @@ def _format_arg(self, opt, spec, val): if opt == "classifier_type" and val == "STEPS": return self.get_steps_args() - return super(LabelFusion, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def get_steps_args(self): if not isdefined(self.inputs.template_file): @@ -283,7 +283,7 @@ def _overload_extension(self, value, name=None): path, base, _ = split_filename(value) _, _, ext = split_filename(self.inputs.in_file) suffix = self.inputs.classifier_type.lower() - return os.path.join(path, "{0}_{1}{2}".format(base, suffix, ext)) + return os.path.join(path, f"{base}_{suffix}{ext}") class CalcTopNCCInputSpec(CommandLineInputSpec): @@ -344,7 +344,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None): if runtime is None or not runtime.stdout: try: out_files = load_json(outfile)["files"] - except IOError: + except OSError: return self.run().outputs else: out_files = [] diff --git a/nipype/interfaces/niftyseg/maths.py b/nipype/interfaces/niftyseg/maths.py index 4f5edfe0f4..726dba3e7a 100644 --- a/nipype/interfaces/niftyseg/maths.py +++ b/nipype/interfaces/niftyseg/maths.py @@ -89,7 +89,7 @@ def _overload_extension(self, value, name=None): if suffix != "_merged" and isdefined(self.inputs.operation): suffix = "_" + self.inputs.operation - return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) + return os.path.join(path, f"{base}{suffix}{ext}") class UnaryMathsInput(MathsInput): @@ -356,12 +356,12 @@ def _format_arg(self, opt, spec, val): # Only float if val in ["pow", "thr", "uthr", "smo", "edge", "sobel3", "sobel5", "smol"]: if not isdefined(self.inputs.operand_value): - err = "operand_value not set for {0}.".format(val) + err = f"operand_value not set for {val}." raise NipypeInterfaceError(err) # only files elif val in ["min", "llsnorm", "masknan", "hdr_copy"]: if not isdefined(self.inputs.operand_file): - err = "operand_file not set for {0}.".format(val) + err = f"operand_file not set for {val}." raise NipypeInterfaceError(err) # splitinter: elif val == "splitinter": @@ -372,16 +372,16 @@ def _format_arg(self, opt, spec, val): if opt == "operand_value" and float(val) == 0.0: return "0" - return super(BinaryMaths, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _overload_extension(self, value, name=None): if self.inputs.operation == "hdr_copy": path, base, _ = split_filename(value) _, base, ext = split_filename(self.inputs.operand_file) suffix = self.inputs.operation - return os.path.join(path, "{0}{1}{2}".format(base, suffix, ext)) + return os.path.join(path, f"{base}{suffix}{ext}") else: - return super(BinaryMaths, self)._overload_extension(value, name) + return super()._overload_extension(value, name) class BinaryMathsInputInteger(MathsInput): @@ -600,4 +600,4 @@ def _format_arg(self, opt, spec, val): if opt == "merge_files": return "-merge %d %d %s" % (len(val), self.inputs.dimension, " ".join(val)) - return super(Merge, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/niftyseg/stats.py b/nipype/interfaces/niftyseg/stats.py index 88e173ac5d..8db7764fce 100644 --- a/nipype/interfaces/niftyseg/stats.py +++ b/nipype/interfaces/niftyseg/stats.py @@ -67,7 +67,7 @@ def _parse_stdout(self, stdout): return np.array(out).squeeze() def _run_interface(self, runtime): - new_runtime = super(StatsCommand, self)._run_interface(runtime) + new_runtime = super()._run_interface(runtime) self.output = self._parse_stdout(new_runtime.stdout) return new_runtime diff --git a/nipype/interfaces/niftyseg/tests/test_stats.py b/nipype/interfaces/niftyseg/tests/test_stats.py index 7f3824e01a..b3bb9a3bb0 100644 --- a/nipype/interfaces/niftyseg/tests/test_stats.py +++ b/nipype/interfaces/niftyseg/tests/test_stats.py @@ -28,7 +28,7 @@ def test_unary_stats(): unarys.inputs.in_file = in_file unarys.inputs.operation = "a" - expected_cmd = "{cmd} {in_file} -a".format(cmd=cmd, in_file=in_file) + expected_cmd = f"{cmd} {in_file} -a" assert unarys.cmdline == expected_cmd @@ -53,6 +53,6 @@ def test_binary_stats(): binarys.inputs.operand_value = 2 binarys.inputs.operation = "sa" - expected_cmd = "{cmd} {in_file} -sa 2.00000000".format(cmd=cmd, in_file=in_file) + expected_cmd = f"{cmd} {in_file} -sa 2.00000000" assert binarys.cmdline == expected_cmd diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index 95494e7f5f..9622baecf0 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Nilearn is a Python library for fast and easy statistical learning on NeuroImaging data.""" diff --git a/nipype/interfaces/nipy/__init__.py b/nipype/interfaces/nipy/__init__.py index 72317edae0..c7fee0b8d8 100644 --- a/nipype/interfaces/nipy/__init__.py +++ b/nipype/interfaces/nipy/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """NIPY is a python project for analysis of structural and functional neuroimaging data.""" from .model import FitGLM, EstimateContrast from .preprocess import ComputeMask, SpaceTimeRealigner diff --git a/nipype/interfaces/nipy/base.py b/nipype/interfaces/nipy/base.py index 2d742d3e90..25aef8b873 100644 --- a/nipype/interfaces/nipy/base.py +++ b/nipype/interfaces/nipy/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nipy """ diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index d232fcea1b..6efef33dbb 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from .base import NipyBaseInterface diff --git a/nipype/interfaces/nipy/preprocess.py b/nipype/interfaces/nipy/preprocess.py index d9e7f65ade..b6ef21cb5b 100644 --- a/nipype/interfaces/nipy/preprocess.py +++ b/nipype/interfaces/nipy/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import nibabel as nb @@ -61,7 +60,7 @@ def _run_interface(self, runtime): brain_mask = compute_mask(**args) _, name, ext = split_filename(self.inputs.mean_volume) - self._brain_mask_path = os.path.abspath("%s_mask.%s" % (name, ext)) + self._brain_mask_path = os.path.abspath(f"{name}_mask.{ext}") nb.save( nb.Nifti1Image(brain_mask.astype(np.uint8), nii.affine), self._brain_mask_path, diff --git a/nipype/interfaces/nipy/tests/__init__.py b/nipype/interfaces/nipy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/nipy/tests/__init__.py +++ b/nipype/interfaces/nipy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nipy/utils.py b/nipype/interfaces/nipy/utils.py index 08eb80e0b2..cdbb5ea46a 100644 --- a/nipype/interfaces/nipy/utils.py +++ b/nipype/interfaces/nipy/utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import warnings import numpy as np import nibabel as nb @@ -63,7 +61,7 @@ def __init__(self, **inputs): ), DeprecationWarning, ) - super(Similarity, self).__init__(**inputs) + super().__init__(**inputs) def _run_interface(self, runtime): from nipy.algorithms.registration.histogram_registration import ( diff --git a/nipype/interfaces/nitime/__init__.py b/nipype/interfaces/nitime/__init__.py index f3fc84079a..883d417ba0 100644 --- a/nipype/interfaces/nitime/__init__.py +++ b/nipype/interfaces/nitime/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Nitime is a library for time-series analysis of data from neuroscience experiments.""" diff --git a/nipype/interfaces/nitime/analysis.py b/nipype/interfaces/nitime/analysis.py index d5b6c3a3de..351b981077 100644 --- a/nipype/interfaces/nitime/analysis.py +++ b/nipype/interfaces/nitime/analysis.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -264,7 +263,7 @@ def _make_output_files(self): fid.write("," + ",".join(self.ROIs) + "\n") # this writes ROI and data to a line for r, line in zip(self.ROIs, open(tmp_f)): - fid.write("%s,%s" % (r, line)) + fid.write(f"{r},{line}") fid.close() def _make_output_figures(self): diff --git a/nipype/interfaces/nitime/base.py b/nipype/interfaces/nitime/base.py index fb31cafc75..7e434f1d3e 100644 --- a/nipype/interfaces/nitime/base.py +++ b/nipype/interfaces/nitime/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Base interface for nitime """ diff --git a/nipype/interfaces/nitime/tests/__init__.py b/nipype/interfaces/nitime/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/nitime/tests/__init__.py +++ b/nipype/interfaces/nitime/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/nitime/tests/test_nitime.py b/nipype/interfaces/nitime/tests/test_nitime.py index 2f94ccd2d2..64bb8366a0 100644 --- a/nipype/interfaces/nitime/tests/test_nitime.py +++ b/nipype/interfaces/nitime/tests/test_nitime.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/petpvc.py b/nipype/interfaces/petpvc.py index 283677c59a..dbd1d805ed 100644 --- a/nipype/interfaces/petpvc.py +++ b/nipype/interfaces/petpvc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """PETPVC is a toolbox for partial volume correction in positron emission tomography.""" @@ -183,7 +182,7 @@ def _list_outputs(self): if not isdefined(outputs["out_file"]): method_name = self.inputs.pvc.lower() outputs["out_file"] = self._gen_fname( - self.inputs.in_file, suffix="_{}_pvc".format(method_name) + self.inputs.in_file, suffix=f"_{method_name}_pvc" ) outputs["out_file"] = os.path.abspath(outputs["out_file"]) diff --git a/nipype/interfaces/quickshear.py b/nipype/interfaces/quickshear.py index feb9ee22f4..a56a8841bc 100644 --- a/nipype/interfaces/quickshear.py +++ b/nipype/interfaces/quickshear.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Quickshear is a simple geometric defacing algorithm.""" from .base import CommandLineInputSpec, CommandLine, traits, TraitedSpec, File diff --git a/nipype/interfaces/r.py b/nipype/interfaces/r.py index a586de183c..780484bdf8 100644 --- a/nipype/interfaces/r.py +++ b/nipype/interfaces/r.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Interfaces to run R scripts.""" @@ -57,7 +56,7 @@ def __init__(self, r_cmd=None, **inputs): """initializes interface to r (default 'R') """ - super(RCommand, self).__init__(**inputs) + super().__init__(**inputs) if r_cmd and isdefined(r_cmd): self._cmd = r_cmd @@ -83,7 +82,7 @@ def set_default_rfile(self, rfile): def _run_interface(self, runtime): self.terminal_output = "allatonce" - runtime = super(RCommand, self)._run_interface(runtime) + runtime = super()._run_interface(runtime) if "R code threw an exception" in runtime.stderr: self.raise_exception(runtime) return runtime @@ -92,7 +91,7 @@ def _format_arg(self, name, trait_spec, value): if name in ["script"]: argstr = trait_spec.argstr return self._gen_r_command(argstr, value) - return super(RCommand, self)._format_arg(name, trait_spec, value) + return super()._format_arg(name, trait_spec, value) def _gen_r_command(self, argstr, script_lines): """Generates commands and, if rfile specified, writes it to disk.""" @@ -110,7 +109,7 @@ def _gen_r_command(self, argstr, script_lines): script = script.replace("$", "\\$") else: script_path = os.path.join(os.getcwd(), self.inputs.script_file) - with open(script_path, "wt") as rfile: + with open(script_path, "w") as rfile: rfile.write(script_lines) script = "source('%s')" % script_path diff --git a/nipype/interfaces/semtools/__init__.py b/nipype/interfaces/semtools/__init__.py index a09c926c37..ef43a28900 100644 --- a/nipype/interfaces/semtools/__init__.py +++ b/nipype/interfaces/semtools/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """SEM Tools are useful tools for Structural Equation Modeling.""" from .diffusion import * from .featurecreator import GenerateCsfClippedFromClassifiedImage diff --git a/nipype/interfaces/semtools/brains/__init__.py b/nipype/interfaces/semtools/brains/__init__.py index dd369fb168..9177db7e3d 100644 --- a/nipype/interfaces/semtools/brains/__init__.py +++ b/nipype/interfaces/semtools/brains/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .segmentation import SimilarityIndex, BRAINSTalairach, BRAINSTalairachMask from .utilities import ( HistogramMatchingFilter, diff --git a/nipype/interfaces/semtools/brains/classify.py b/nipype/interfaces/semtools/brains/classify.py index bc46613693..e1305471f0 100644 --- a/nipype/interfaces/semtools/brains/classify.py +++ b/nipype/interfaces/semtools/brains/classify.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/brains/segmentation.py b/nipype/interfaces/semtools/brains/segmentation.py index 79e25c2bda..75105fc0aa 100644 --- a/nipype/interfaces/semtools/brains/segmentation.py +++ b/nipype/interfaces/semtools/brains/segmentation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/brains/tests/__init__.py b/nipype/interfaces/semtools/brains/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/brains/tests/__init__.py +++ b/nipype/interfaces/semtools/brains/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/brains/utilities.py b/nipype/interfaces/semtools/brains/utilities.py index 78ee3c25c5..a1241aca19 100644 --- a/nipype/interfaces/semtools/brains/utilities.py +++ b/nipype/interfaces/semtools/brains/utilities.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/converters.py b/nipype/interfaces/semtools/converters.py index 4df811eff5..65fd1d4739 100644 --- a/nipype/interfaces/semtools/converters.py +++ b/nipype/interfaces/semtools/converters.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/__init__.py b/nipype/interfaces/semtools/diffusion/__init__.py index 28044fe337..1f56f11145 100644 --- a/nipype/interfaces/semtools/diffusion/__init__.py +++ b/nipype/interfaces/semtools/diffusion/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .diffusion import dtiaverage, dtiestim, dtiprocess, DWIConvert from .tractography import * from .gtract import ( diff --git a/nipype/interfaces/semtools/diffusion/diffusion.py b/nipype/interfaces/semtools/diffusion/diffusion.py index d352adf276..e990606e77 100644 --- a/nipype/interfaces/semtools/diffusion/diffusion.py +++ b/nipype/interfaces/semtools/diffusion/diffusion.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/gtract.py b/nipype/interfaces/semtools/diffusion/gtract.py index 2e5a5816c5..cb9bbaf638 100644 --- a/nipype/interfaces/semtools/diffusion/gtract.py +++ b/nipype/interfaces/semtools/diffusion/gtract.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/maxcurvature.py b/nipype/interfaces/semtools/diffusion/maxcurvature.py index be6bfd10e6..66c147ad81 100644 --- a/nipype/interfaces/semtools/diffusion/maxcurvature.py +++ b/nipype/interfaces/semtools/diffusion/maxcurvature.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/diffusion/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tractography/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/__init__.py index ac45b2050f..809910cf28 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .commandlineonly import fiberstats from .fiberprocess import fiberprocess from .fibertrack import fibertrack diff --git a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py index 6544282a00..fe4520f9df 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py +++ b/nipype/interfaces/semtools/diffusion/tractography/commandlineonly.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py index e069c8d6b7..d20a7cba53 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fiberprocess.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py index cd8f1a5cd3..663010632c 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py +++ b/nipype/interfaces/semtools/diffusion/tractography/fibertrack.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py +++ b/nipype/interfaces/semtools/diffusion/tractography/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py index 5cd092caa6..4be8a97811 100644 --- a/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py +++ b/nipype/interfaces/semtools/diffusion/tractography/ukftractography.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/featurecreator.py b/nipype/interfaces/semtools/featurecreator.py index 1e5b01f252..85c72a993f 100644 --- a/nipype/interfaces/semtools/featurecreator.py +++ b/nipype/interfaces/semtools/featurecreator.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/__init__.py b/nipype/interfaces/semtools/filtering/__init__.py index b5b7eccb20..159dc2c490 100644 --- a/nipype/interfaces/semtools/filtering/__init__.py +++ b/nipype/interfaces/semtools/filtering/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .denoising import UnbiasedNonLocalMeans from .featuredetection import ( GenerateSummedGradientImage, diff --git a/nipype/interfaces/semtools/filtering/denoising.py b/nipype/interfaces/semtools/filtering/denoising.py index 24b9055afc..22575b7842 100644 --- a/nipype/interfaces/semtools/filtering/denoising.py +++ b/nipype/interfaces/semtools/filtering/denoising.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/featuredetection.py b/nipype/interfaces/semtools/filtering/featuredetection.py index df22f88810..7e72c2c2b4 100644 --- a/nipype/interfaces/semtools/filtering/featuredetection.py +++ b/nipype/interfaces/semtools/filtering/featuredetection.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/filtering/tests/__init__.py b/nipype/interfaces/semtools/filtering/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/filtering/tests/__init__.py +++ b/nipype/interfaces/semtools/filtering/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/legacy/__init__.py b/nipype/interfaces/semtools/legacy/__init__.py index 343704adb7..de11d37760 100644 --- a/nipype/interfaces/semtools/legacy/__init__.py +++ b/nipype/interfaces/semtools/legacy/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- from .registration import scalartransform diff --git a/nipype/interfaces/semtools/legacy/registration.py b/nipype/interfaces/semtools/legacy/registration.py index 959a1b1dc0..e899aa44ab 100644 --- a/nipype/interfaces/semtools/legacy/registration.py +++ b/nipype/interfaces/semtools/legacy/registration.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/legacy/tests/__init__.py b/nipype/interfaces/semtools/legacy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/legacy/tests/__init__.py +++ b/nipype/interfaces/semtools/legacy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/registration/__init__.py b/nipype/interfaces/semtools/registration/__init__.py index 6d52169ab3..808e0f67eb 100644 --- a/nipype/interfaces/semtools/registration/__init__.py +++ b/nipype/interfaces/semtools/registration/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .specialized import VBRAINSDemonWarp, BRAINSDemonWarp, BRAINSTransformFromFiducials from .brainsresample import BRAINSResample from .brainsfit import BRAINSFit diff --git a/nipype/interfaces/semtools/registration/brainsfit.py b/nipype/interfaces/semtools/registration/brainsfit.py index 56c9da54f2..41d8b2c77f 100644 --- a/nipype/interfaces/semtools/registration/brainsfit.py +++ b/nipype/interfaces/semtools/registration/brainsfit.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/brainsresample.py b/nipype/interfaces/semtools/registration/brainsresample.py index a77a52dca3..f7351f17c4 100644 --- a/nipype/interfaces/semtools/registration/brainsresample.py +++ b/nipype/interfaces/semtools/registration/brainsresample.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/brainsresize.py b/nipype/interfaces/semtools/registration/brainsresize.py index 8de4d616b9..ae4c50fe83 100644 --- a/nipype/interfaces/semtools/registration/brainsresize.py +++ b/nipype/interfaces/semtools/registration/brainsresize.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/specialized.py b/nipype/interfaces/semtools/registration/specialized.py index 85f8509a5b..61e71611e7 100644 --- a/nipype/interfaces/semtools/registration/specialized.py +++ b/nipype/interfaces/semtools/registration/specialized.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/registration/tests/__init__.py b/nipype/interfaces/semtools/registration/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/registration/tests/__init__.py +++ b/nipype/interfaces/semtools/registration/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/segmentation/__init__.py b/nipype/interfaces/semtools/segmentation/__init__.py index 0cc6090203..8d11e465ef 100644 --- a/nipype/interfaces/semtools/segmentation/__init__.py +++ b/nipype/interfaces/semtools/segmentation/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .specialized import ( BRAINSCut, BRAINSROIAuto, diff --git a/nipype/interfaces/semtools/segmentation/specialized.py b/nipype/interfaces/semtools/segmentation/specialized.py index a7744775c4..ba33e7a268 100644 --- a/nipype/interfaces/semtools/segmentation/specialized.py +++ b/nipype/interfaces/semtools/segmentation/specialized.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/segmentation/tests/__init__.py b/nipype/interfaces/semtools/segmentation/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/segmentation/tests/__init__.py +++ b/nipype/interfaces/semtools/segmentation/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/testing/__init__.py b/nipype/interfaces/semtools/testing/__init__.py index d06a7ea2df..aa4b5619c8 100644 --- a/nipype/interfaces/semtools/testing/__init__.py +++ b/nipype/interfaces/semtools/testing/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .featuredetection import SphericalCoordinateGeneration from .landmarkscompare import LandmarksCompare from .generateaveragelmkfile import GenerateAverageLmkFile diff --git a/nipype/interfaces/semtools/testing/featuredetection.py b/nipype/interfaces/semtools/testing/featuredetection.py index 16735b3bb8..59d7dd3ea7 100644 --- a/nipype/interfaces/semtools/testing/featuredetection.py +++ b/nipype/interfaces/semtools/testing/featuredetection.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py index fe3aa71521..b1c701a119 100644 --- a/nipype/interfaces/semtools/testing/generateaveragelmkfile.py +++ b/nipype/interfaces/semtools/testing/generateaveragelmkfile.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/testing/landmarkscompare.py b/nipype/interfaces/semtools/testing/landmarkscompare.py index 9a5ad26883..a03ce653af 100644 --- a/nipype/interfaces/semtools/testing/landmarkscompare.py +++ b/nipype/interfaces/semtools/testing/landmarkscompare.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/tests/__init__.py b/nipype/interfaces/semtools/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/tests/__init__.py +++ b/nipype/interfaces/semtools/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/semtools/utilities/__init__.py b/nipype/interfaces/semtools/utilities/__init__.py index 2209064909..02a5540951 100644 --- a/nipype/interfaces/semtools/utilities/__init__.py +++ b/nipype/interfaces/semtools/utilities/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .brains import ( BRAINSConstellationModeler, landmarksConstellationWeights, diff --git a/nipype/interfaces/semtools/utilities/brains.py b/nipype/interfaces/semtools/utilities/brains.py index b5964e3555..a7b2591f09 100644 --- a/nipype/interfaces/semtools/utilities/brains.py +++ b/nipype/interfaces/semtools/utilities/brains.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/semtools/utilities/tests/__init__.py b/nipype/interfaces/semtools/utilities/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/semtools/utilities/tests/__init__.py +++ b/nipype/interfaces/semtools/utilities/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/__init__.py b/nipype/interfaces/slicer/__init__.py index 91c56b131f..d874ab88c4 100644 --- a/nipype/interfaces/slicer/__init__.py +++ b/nipype/interfaces/slicer/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ 3D Slicer is a platform for medical image informatics processing and visualization. diff --git a/nipype/interfaces/slicer/base.py b/nipype/interfaces/slicer/base.py index aae54ec00b..de00883265 100644 --- a/nipype/interfaces/slicer/base.py +++ b/nipype/interfaces/slicer/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from ..base import SEMLikeCommandLine diff --git a/nipype/interfaces/slicer/converters.py b/nipype/interfaces/slicer/converters.py index 6d4a824eea..f9aa9e458b 100644 --- a/nipype/interfaces/slicer/converters.py +++ b/nipype/interfaces/slicer/converters.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/diffusion/__init__.py b/nipype/interfaces/slicer/diffusion/__init__.py index d7f3089de5..807fecdde2 100644 --- a/nipype/interfaces/slicer/diffusion/__init__.py +++ b/nipype/interfaces/slicer/diffusion/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .diffusion import ( ResampleDTIVolume, DWIRicianLMMSEFilter, diff --git a/nipype/interfaces/slicer/diffusion/diffusion.py b/nipype/interfaces/slicer/diffusion/diffusion.py index d5ffadae17..56d398b3b3 100644 --- a/nipype/interfaces/slicer/diffusion/diffusion.py +++ b/nipype/interfaces/slicer/diffusion/diffusion.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/diffusion/tests/__init__.py b/nipype/interfaces/slicer/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/filtering/__init__.py b/nipype/interfaces/slicer/filtering/__init__.py index 13b79f8705..8d7a6c0da4 100644 --- a/nipype/interfaces/slicer/filtering/__init__.py +++ b/nipype/interfaces/slicer/filtering/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .morphology import GrayscaleGrindPeakImageFilter, GrayscaleFillHoleImageFilter from .denoising import ( GradientAnisotropicDiffusion, diff --git a/nipype/interfaces/slicer/filtering/arithmetic.py b/nipype/interfaces/slicer/filtering/arithmetic.py index f57d7adf37..47fb8da82b 100644 --- a/nipype/interfaces/slicer/filtering/arithmetic.py +++ b/nipype/interfaces/slicer/filtering/arithmetic.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/checkerboardfilter.py b/nipype/interfaces/slicer/filtering/checkerboardfilter.py index 6566f15248..429fdc83b8 100644 --- a/nipype/interfaces/slicer/filtering/checkerboardfilter.py +++ b/nipype/interfaces/slicer/filtering/checkerboardfilter.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/denoising.py b/nipype/interfaces/slicer/filtering/denoising.py index 6f3af80d2b..fa8518c1bc 100644 --- a/nipype/interfaces/slicer/filtering/denoising.py +++ b/nipype/interfaces/slicer/filtering/denoising.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/extractskeleton.py b/nipype/interfaces/slicer/filtering/extractskeleton.py index d6cca550e8..d0ec5ffe98 100644 --- a/nipype/interfaces/slicer/filtering/extractskeleton.py +++ b/nipype/interfaces/slicer/filtering/extractskeleton.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/histogrammatching.py b/nipype/interfaces/slicer/filtering/histogrammatching.py index f67e9a9e1a..c9eb9af89c 100644 --- a/nipype/interfaces/slicer/filtering/histogrammatching.py +++ b/nipype/interfaces/slicer/filtering/histogrammatching.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/imagelabelcombine.py b/nipype/interfaces/slicer/filtering/imagelabelcombine.py index b8990f3e21..b8e2202d4d 100644 --- a/nipype/interfaces/slicer/filtering/imagelabelcombine.py +++ b/nipype/interfaces/slicer/filtering/imagelabelcombine.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/morphology.py b/nipype/interfaces/slicer/filtering/morphology.py index 40e4960d2b..8338f56407 100644 --- a/nipype/interfaces/slicer/filtering/morphology.py +++ b/nipype/interfaces/slicer/filtering/morphology.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py index 1de5e87e2f..ccf066f4da 100644 --- a/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py +++ b/nipype/interfaces/slicer/filtering/n4itkbiasfieldcorrection.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py index 98c0baa178..74db29363a 100644 --- a/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py +++ b/nipype/interfaces/slicer/filtering/resamplescalarvectordwivolume.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/tests/__init__.py b/nipype/interfaces/slicer/filtering/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/filtering/tests/__init__.py +++ b/nipype/interfaces/slicer/filtering/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py index 2fdfc76d52..0bcf5b413c 100644 --- a/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py +++ b/nipype/interfaces/slicer/filtering/thresholdscalarvolume.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py index d8756b0b05..73c7ed5b47 100644 --- a/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py +++ b/nipype/interfaces/slicer/filtering/votingbinaryholefillingimagefilter.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/generate_classes.py b/nipype/interfaces/slicer/generate_classes.py index 89125a963f..83f76c45b2 100644 --- a/nipype/interfaces/slicer/generate_classes.py +++ b/nipype/interfaces/slicer/generate_classes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """This script generates Slicer Interfaces based on the CLI modules XML. CLI modules are selected from the hardcoded list below and generated code is placed in the cli_modules.py file (and imported in __init__.py). For this to work @@ -46,7 +45,7 @@ def add_class_to_package(class_codes, class_names, module_name, package_dir): import os\n\n\n""" f_m.write(imports) f_m.write("\n\n".join(class_codes)) - f_i.write("from %s import %s\n" % (module_name, ", ".join(class_names))) + f_i.write("from {} import {}\n".format(module_name, ", ".join(class_names))) f_m.close() f_i.close() @@ -127,7 +126,7 @@ def generate_all_classes( all_code = {} for module in modules_list: print("=" * 80) - print("Generating Definition for module {0}".format(module)) + print(f"Generating Definition for module {module}") print("^" * 80) package, code, module = generate_class( module, launcher, redirect_x=redirect_x, mipav_hacks=mipav_hacks @@ -331,7 +330,7 @@ def generate_class( ]: if not param.getElementsByTagName("channel"): raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{0}".format( + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field.\n{}".format( traitsParams ) ) @@ -382,7 +381,7 @@ def generate_class( ) else: raise RuntimeError( - "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{0}".format( + "Insufficient XML specification: each element of type 'file', 'directory', 'image', 'geometry', 'transform', or 'table' requires 'channel' field to be in ['input','output'].\n{}".format( traitsParams ) ) @@ -417,7 +416,7 @@ def generate_class( output_filenames_code = "_outputs_filenames = {" output_filenames_code += ",".join( - ["'%s':'%s'" % (key, value) for key, value in outputs_filenames.items()] + [f"'{key}':'{value}'" for key, value in outputs_filenames.items()] ) output_filenames_code += "}" @@ -431,7 +430,7 @@ def generate_class( output_spec = %module_name%OutputSpec _cmd = "%launcher% %name% " %output_filenames_code%\n""" - template += " _redirect_x = {0}\n".format(str(redirect_x)) + template += f" _redirect_x = {str(redirect_x)}\n" main_class = ( template.replace("%class_str%", class_string) @@ -494,9 +493,9 @@ def parse_params(params): list = [] for key, value in params.items(): if isinstance(value, (str, bytes)): - list.append('%s="%s"' % (key, value.replace('"', "'"))) + list.append('{}="{}"'.format(key, value.replace('"', "'"))) else: - list.append("%s=%s" % (key, value)) + list.append(f"{key}={value}") return ", ".join(list) diff --git a/nipype/interfaces/slicer/legacy/__init__.py b/nipype/interfaces/slicer/legacy/__init__.py index f65d44f058..4c0da2e0a0 100644 --- a/nipype/interfaces/slicer/legacy/__init__.py +++ b/nipype/interfaces/slicer/legacy/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .diffusion import * from .segmentation import OtsuThresholdSegmentation from .filtering import OtsuThresholdImageFilter, ResampleScalarVolume diff --git a/nipype/interfaces/slicer/legacy/converters.py b/nipype/interfaces/slicer/legacy/converters.py index 490eb5b23c..053415b189 100644 --- a/nipype/interfaces/slicer/legacy/converters.py +++ b/nipype/interfaces/slicer/legacy/converters.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/diffusion/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/__init__.py index 9a9143d214..b40040d9e7 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/__init__.py @@ -1,2 +1 @@ -# -*- coding: utf-8 -*- from .denoising import DWIUnbiasedNonLocalMeansFilter diff --git a/nipype/interfaces/slicer/legacy/diffusion/denoising.py b/nipype/interfaces/slicer/legacy/diffusion/denoising.py index 2e5345c34c..5adda02a5a 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/denoising.py +++ b/nipype/interfaces/slicer/legacy/diffusion/denoising.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/diffusion/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/legacy/filtering.py b/nipype/interfaces/slicer/legacy/filtering.py index dda29920b2..749ba3942c 100644 --- a/nipype/interfaces/slicer/legacy/filtering.py +++ b/nipype/interfaces/slicer/legacy/filtering.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/registration.py b/nipype/interfaces/slicer/legacy/registration.py index 109b5c0464..7eee2b069e 100644 --- a/nipype/interfaces/slicer/legacy/registration.py +++ b/nipype/interfaces/slicer/legacy/registration.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/segmentation.py b/nipype/interfaces/slicer/legacy/segmentation.py index 8440c776c0..0f51afe35c 100644 --- a/nipype/interfaces/slicer/legacy/segmentation.py +++ b/nipype/interfaces/slicer/legacy/segmentation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/legacy/tests/__init__.py b/nipype/interfaces/slicer/legacy/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/legacy/tests/__init__.py +++ b/nipype/interfaces/slicer/legacy/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/quantification/__init__.py b/nipype/interfaces/slicer/quantification/__init__.py index c0e1e870e7..c34d46d1b5 100644 --- a/nipype/interfaces/slicer/quantification/__init__.py +++ b/nipype/interfaces/slicer/quantification/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- from .changequantification import IntensityDifferenceMetric from .petstandarduptakevaluecomputation import PETStandardUptakeValueComputation diff --git a/nipype/interfaces/slicer/quantification/changequantification.py b/nipype/interfaces/slicer/quantification/changequantification.py index 8b529fa33b..0e0f5dc20a 100644 --- a/nipype/interfaces/slicer/quantification/changequantification.py +++ b/nipype/interfaces/slicer/quantification/changequantification.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py index 2fe281f09f..2c4c337dac 100644 --- a/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py +++ b/nipype/interfaces/slicer/quantification/petstandarduptakevaluecomputation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/quantification/tests/__init__.py b/nipype/interfaces/slicer/quantification/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/quantification/tests/__init__.py +++ b/nipype/interfaces/slicer/quantification/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/registration/__init__.py b/nipype/interfaces/slicer/registration/__init__.py index faa3c92b2f..ddb3988eae 100644 --- a/nipype/interfaces/slicer/registration/__init__.py +++ b/nipype/interfaces/slicer/registration/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from .specialized import ( ACPCTransform, FiducialRegistration, diff --git a/nipype/interfaces/slicer/registration/brainsfit.py b/nipype/interfaces/slicer/registration/brainsfit.py index 5780d29096..3baf06648c 100644 --- a/nipype/interfaces/slicer/registration/brainsfit.py +++ b/nipype/interfaces/slicer/registration/brainsfit.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/brainsresample.py b/nipype/interfaces/slicer/registration/brainsresample.py index 9512a05195..201bbeab55 100644 --- a/nipype/interfaces/slicer/registration/brainsresample.py +++ b/nipype/interfaces/slicer/registration/brainsresample.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/specialized.py b/nipype/interfaces/slicer/registration/specialized.py index 1de0c117ea..e03f1a95b1 100644 --- a/nipype/interfaces/slicer/registration/specialized.py +++ b/nipype/interfaces/slicer/registration/specialized.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/registration/tests/__init__.py b/nipype/interfaces/slicer/registration/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/registration/tests/__init__.py +++ b/nipype/interfaces/slicer/registration/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/segmentation/__init__.py b/nipype/interfaces/slicer/segmentation/__init__.py index 48fdc62f8c..4c7e36f0c1 100644 --- a/nipype/interfaces/slicer/segmentation/__init__.py +++ b/nipype/interfaces/slicer/segmentation/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- from .specialized import RobustStatisticsSegmenter, EMSegmentCommandLine, BRAINSROIAuto from .simpleregiongrowingsegmentation import SimpleRegionGrowingSegmentation diff --git a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py index 6d58a63ea9..98af4fdc1b 100644 --- a/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py +++ b/nipype/interfaces/slicer/segmentation/simpleregiongrowingsegmentation.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/segmentation/specialized.py b/nipype/interfaces/slicer/segmentation/specialized.py index da0bff4dd1..90374ddd0c 100644 --- a/nipype/interfaces/slicer/segmentation/specialized.py +++ b/nipype/interfaces/slicer/segmentation/specialized.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/segmentation/tests/__init__.py b/nipype/interfaces/slicer/segmentation/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/segmentation/tests/__init__.py +++ b/nipype/interfaces/slicer/segmentation/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/surface.py b/nipype/interfaces/slicer/surface.py index 6d7a7b2382..7a4c77af70 100644 --- a/nipype/interfaces/slicer/surface.py +++ b/nipype/interfaces/slicer/surface.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/slicer/tests/__init__.py b/nipype/interfaces/slicer/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/slicer/tests/__init__.py +++ b/nipype/interfaces/slicer/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/slicer/utilities.py b/nipype/interfaces/slicer/utilities.py index 01f469f259..2e0eae1165 100644 --- a/nipype/interfaces/slicer/utilities.py +++ b/nipype/interfaces/slicer/utilities.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf8 -*- """Autogenerated file - DO NOT EDIT If you spot a bug, please report it on the mailing list and/or change the generator.""" diff --git a/nipype/interfaces/spm/__init__.py b/nipype/interfaces/spm/__init__.py index 1823bef4da..160bbae150 100644 --- a/nipype/interfaces/spm/__init__.py +++ b/nipype/interfaces/spm/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM is a software package for the analysis of brain imaging data sequences.""" diff --git a/nipype/interfaces/spm/base.py b/nipype/interfaces/spm/base.py index 4e9b78e1f1..7e1662db50 100644 --- a/nipype/interfaces/spm/base.py +++ b/nipype/interfaces/spm/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with SPM tools. @@ -224,7 +223,7 @@ def getinfo(klass, matlab_cmd=None, paths=None, use_mcr=None): """ try: out = mlab.run() - except (IOError, RuntimeError) as e: + except (OSError, RuntimeError) as e: # if no Matlab at all -- exception could be raised # No Matlab -- no spm logger.debug("%s", e) @@ -306,7 +305,7 @@ class SPMCommand(BaseInterface): ] def __init__(self, **inputs): - super(SPMCommand, self).__init__(**inputs) + super().__init__(**inputs) self.inputs.on_trait_change( self._matlab_cmd_update, ["matlab_cmd", "mfile", "paths", "use_mcr"] ) @@ -360,7 +359,9 @@ def version(self): use_mcr=self.inputs.use_mcr, ) if info_dict: - return "%s.%s" % (info_dict["name"].split("SPM")[-1], info_dict["release"]) + return "{}.{}".format( + info_dict["name"].split("SPM")[-1], info_dict["release"] + ) @property def jobtype(self): @@ -484,7 +485,7 @@ def _generate_job(self, prefix="", contents=None): return jobstring if isinstance(contents, dict): for key, value in list(contents.items()): - newprefix = "%s.%s" % (prefix, key) + newprefix = f"{prefix}.{key}" jobstring += self._generate_job(newprefix, value) return jobstring if isinstance(contents, np.ndarray): @@ -503,9 +504,9 @@ def _generate_job(self, prefix="", contents=None): "{}" if not isinstance(el, (str, bytes)) else "'{}'" ] val_format = ", ".join(items_format).format - jobstring += "[{}];...\n".format(val_format(*val)) + jobstring += f"[{val_format(*val)}];...\n" elif isinstance(val, (str, bytes)): - jobstring += "'{}';...\n".format(val) + jobstring += f"'{val}';...\n" else: jobstring += "%s;...\n" % str(val) jobstring += "};\n" @@ -519,9 +520,9 @@ def _generate_job(self, prefix="", contents=None): jobstring += self._generate_job(newprefix, val[field]) return jobstring if isinstance(contents, (str, bytes)): - jobstring += "%s = '%s';\n" % (prefix, contents) + jobstring += f"{prefix} = '{contents}';\n" return jobstring - jobstring += "%s = %s;\n" % (prefix, str(contents)) + jobstring += f"{prefix} = {str(contents)};\n" return jobstring def _make_matlab_command(self, contents, postscript=None): @@ -561,7 +562,7 @@ def _make_matlab_command(self, contents, postscript=None): if self.mlab.inputs.mfile: if isdefined(self.inputs.use_v8struct) and self.inputs.use_v8struct: mscript += self._generate_job( - "jobs{1}.spm.%s.%s" % (self.jobtype, self.jobname), contents[0] + f"jobs{{1}}.spm.{self.jobtype}.{self.jobname}", contents[0] ) else: if self.jobname in [ @@ -576,13 +577,13 @@ def _make_matlab_command(self, contents, postscript=None): ]: # parentheses mscript += self._generate_job( - "jobs{1}.%s{1}.%s(1)" % (self.jobtype, self.jobname), + f"jobs{{1}}.{self.jobtype}{{1}}.{self.jobname}(1)", contents[0], ) else: # curly brackets mscript += self._generate_job( - "jobs{1}.%s{1}.%s{1}" % (self.jobtype, self.jobname), + f"jobs{{1}}.{self.jobtype}{{1}}.{self.jobname}{{1}}", contents[0], ) else: @@ -620,11 +621,11 @@ def __init__( self, value=NoDefaultSpecified, exists=False, resolve=False, **metadata ): """Create an ImageFileSPM trait.""" - super(ImageFileSPM, self).__init__( + super().__init__( value=value, exists=exists, types=["nifti1", "nifti2"], allow_compressed=False, resolve=resolve, - **metadata + **metadata, ) diff --git a/nipype/interfaces/spm/model.py b/nipype/interfaces/spm/model.py index 68a2484c86..8df967573e 100644 --- a/nipype/interfaces/spm/model.py +++ b/nipype/interfaces/spm/model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The spm module provides basic functions for interfacing with matlab @@ -153,13 +152,11 @@ def _format_arg(self, opt, spec, val): return [val] else: return val - return super(Level1Design, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" - einputs = super(Level1Design, self)._parse_inputs( - skip=("mask_threshold", "flags") - ) + einputs = super()._parse_inputs(skip=("mask_threshold", "flags")) if isdefined(self.inputs.flags): einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) for sessinfo in einputs[0]["sess"]: @@ -191,9 +188,7 @@ def _make_matlab_command(self, content): postscript += "save SPM SPM;\n" else: postscript = None - return super(Level1Design, self)._make_matlab_command( - content, postscript=postscript - ) + return super()._make_matlab_command(content, postscript=postscript) def _list_outputs(self): outputs = self._outputs().get() @@ -276,14 +271,14 @@ def _format_arg(self, opt, spec, val): return np.array([str(val)], dtype=object) if opt == "estimation_method": if isinstance(val, (str, bytes)): - return {"{}".format(val): 1} + return {f"{val}": 1} else: return val - return super(EstimateModel, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" - einputs = super(EstimateModel, self)._parse_inputs(skip=("flags")) + einputs = super()._parse_inputs(skip=("flags")) if isdefined(self.inputs.flags): einputs[0].update({flag: val for (flag, val) in self.inputs.flags.items()}) return einputs @@ -301,26 +296,22 @@ def _list_outputs(self): "Bayesian" in self.inputs.estimation_method.keys() or "Bayesian2" in self.inputs.estimation_method.keys() ): - outputs["labels"] = os.path.join(pth, "labels.{}".format(outtype)) + outputs["labels"] = os.path.join(pth, f"labels.{outtype}") outputs["SDerror"] = glob(os.path.join(pth, "Sess*_SDerror*")) outputs["ARcoef"] = glob(os.path.join(pth, "Sess*_AR_*")) if betas: - outputs["Cbetas"] = [ - os.path.join(pth, "C{}".format(beta)) for beta in betas - ] - outputs["SDbetas"] = [ - os.path.join(pth, "SD{}".format(beta)) for beta in betas - ] + outputs["Cbetas"] = [os.path.join(pth, f"C{beta}") for beta in betas] + outputs["SDbetas"] = [os.path.join(pth, f"SD{beta}") for beta in betas] if "Classical" in self.inputs.estimation_method.keys(): - outputs["residual_image"] = os.path.join(pth, "ResMS.{}".format(outtype)) - outputs["RPVimage"] = os.path.join(pth, "RPV.{}".format(outtype)) + outputs["residual_image"] = os.path.join(pth, f"ResMS.{outtype}") + outputs["RPVimage"] = os.path.join(pth, f"RPV.{outtype}") if self.inputs.write_residuals: outputs["residual_images"] = glob(os.path.join(pth, "Res_*")) if betas: outputs["beta_images"] = [os.path.join(pth, beta) for beta in betas] - outputs["mask_image"] = os.path.join(pth, "mask.{}".format(outtype)) + outputs["mask_image"] = os.path.join(pth, f"mask.{outtype}") outputs["spm_mat_file"] = os.path.join(pth, "SPM.mat") return outputs @@ -1055,11 +1046,11 @@ def _format_arg(self, opt, spec, val): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist - return super(FactorialDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" - einputs = super(FactorialDesign, self)._parse_inputs() + einputs = super()._parse_inputs() if not isdefined(self.inputs.spm_mat_dir): einputs[0]["dir"] = np.array([str(os.getcwd())], dtype=object) return einputs @@ -1098,7 +1089,7 @@ def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["in_files"]: return np.array(val, dtype=object) - return super(OneSampleTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class TwoSampleTTestDesignInputSpec(FactorialDesignInputSpec): @@ -1145,7 +1136,7 @@ def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["group1_files", "group2_files"]: return np.array(val, dtype=object) - return super(TwoSampleTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class PairedTTestDesignInputSpec(FactorialDesignInputSpec): @@ -1181,7 +1172,7 @@ def _format_arg(self, opt, spec, val): """Convert input to appropriate format for spm""" if opt in ["paired_files"]: return [dict(scans=np.array(files, dtype=object)) for files in val] - return super(PairedTTestDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) class MultipleRegressionDesignInputSpec(FactorialDesignInputSpec): @@ -1231,4 +1222,4 @@ def _format_arg(self, opt, spec, val): outdict[mapping[key]] = keyval outlist.append(outdict) return outlist - return super(MultipleRegressionDesign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) diff --git a/nipype/interfaces/spm/preprocess.py b/nipype/interfaces/spm/preprocess.py index a612865039..3cc65ce39f 100644 --- a/nipype/interfaces/spm/preprocess.py +++ b/nipype/interfaces/spm/preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """SPM wrappers for preprocessing data @@ -236,12 +235,12 @@ def _format_arg(self, opt, spec, val): if opt in ["phase_file", "magnitude_file", "anat_file", "epi_file"]: return scans_for_fname(ensure_list(val)) - return super(FieldMap, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm fieldmap options if set to None ignore""" - einputs = super(FieldMap, self)._parse_inputs() + einputs = super()._parse_inputs() return [{"calculatevdm": einputs[0]}] def _list_outputs(self): @@ -340,12 +339,12 @@ def _format_arg(self, opt, spec, val): ) if opt == 'vdmfile': return scans_for_fname(ensure_list(val)) - return super(ApplyVDM, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm fieldmap options if set to None ignore""" - einputs = super(ApplyVDM, self)._parse_inputs() + einputs = super()._parse_inputs() return [{"applyvdm": einputs[0]}] @@ -460,7 +459,7 @@ def _format_arg(self, opt, spec, val): return scans_for_fnames( ensure_list(val), keep4d=False, separate_sessions=True ) - return super(SliceTiming, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -612,11 +611,11 @@ def _format_arg(self, opt, spec, val): return scans_for_fnames( val, keep4d=False, separate_sessions=separate_sessions ) - return super(Realign, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm realign options if set to None ignore""" - einputs = super(Realign, self)._parse_inputs() + einputs = super()._parse_inputs() return [{"%s" % (self.inputs.jobtype): einputs[0]}] def _list_outputs(self): @@ -872,10 +871,10 @@ def _format_arg(self, opt, spec, val): return scans_for_fnames( ensure_list(val), keep4d=False, separate_sessions=True ) - return super(RealignUnwarp, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): - spmdict = super(RealignUnwarp, self)._parse_inputs(skip=())[0] + spmdict = super()._parse_inputs(skip=())[0] if isdefined(self.inputs.phase_map): pmscan = spmdict["data"]["pmscan"] @@ -1051,16 +1050,14 @@ def _format_arg(self, opt, spec, val): return scans_for_fnames(val + self.inputs.apply_to_files) else: return scans_for_fnames(val) - return super(Coregister, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """validate spm coregister options if set to None ignore""" if self.inputs.jobtype == "write": - einputs = super(Coregister, self)._parse_inputs( - skip=("jobtype", "apply_to_files") - ) + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) else: - einputs = super(Coregister, self)._parse_inputs(skip=("jobtype")) + einputs = super()._parse_inputs(skip=("jobtype")) jobtype = self.inputs.jobtype return [{"%s" % (jobtype): einputs[0]}] @@ -1228,13 +1225,11 @@ def _format_arg(self, opt, spec, val): if opt in ["write_wrap"]: if len(val) != 3: raise ValueError("%s must have 3 elements" % opt) - return super(Normalize, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): """Validate spm normalize options if set to None ignore""" - einputs = super(Normalize, self)._parse_inputs( - skip=("jobtype", "apply_to_files") - ) + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.source): @@ -1461,13 +1456,11 @@ def _format_arg(self, opt, spec, val): if opt in ["nonlinear_regularization"]: if len(val) != 5: raise ValueError("%s must have 5 elements" % opt) - return super(Normalize12, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self, skip=()): """validate spm normalize options if set to None ignore""" - einputs = super(Normalize12, self)._parse_inputs( - skip=("jobtype", "apply_to_files") - ) + einputs = super()._parse_inputs(skip=("jobtype", "apply_to_files")) if isdefined(self.inputs.apply_to_files): inputfiles = deepcopy(self.inputs.apply_to_files) if isdefined(self.inputs.image_to_align): @@ -1713,7 +1706,7 @@ def _format_arg(self, opt, spec, val): return scans_for_fname(val) if opt == "clean_masks": return clean_masks_dict[val] - return super(Segment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -1726,7 +1719,7 @@ def _list_outputs(self): [("modulated", "mw"), ("normalized", "w"), ("native", "")] ): if getattr(self.inputs, outtype)[idx]: - outfield = "%s_%s_image" % (image, tissue) + outfield = f"{image}_{tissue}_image" outputs[outfield] = fname_presuffix( f, prefix="%sc%d" % (prefix, tidx + 1) ) @@ -1901,11 +1894,9 @@ def _format_arg(self, opt, spec, val): new_tissues.append(new_tissue) return new_tissues elif opt == "write_deformation_fields": - return super(NewSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])] - ) + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) else: - return super(NewSegment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2151,11 +2142,9 @@ def _format_arg(self, opt, spec, val): new_tissues.append(new_tissue) return new_tissues elif opt == "write_deformation_fields": - return super(MultiChannelNewSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])] - ) + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) else: - return super(MultiChannelNewSegment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2292,7 +2281,7 @@ def _format_arg(self, opt, spec, val): else: return val - return super(Smooth, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2414,7 +2403,7 @@ def _format_arg(self, opt, spec, val): new_param["its"] = val[2] return [new_param] else: - return super(DARTEL, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2430,7 +2419,7 @@ def _list_outputs(self): for filename in self.inputs.image_files[0]: pth, base, ext = split_filename(filename) outputs["dartel_flow_fields"].append( - os.path.realpath("u_%s_%s%s" % (base, self.inputs.template_prefix, ext)) + os.path.realpath(f"u_{base}_{self.inputs.template_prefix}{ext}") ) return outputs @@ -2534,7 +2523,7 @@ def _format_arg(self, opt, spec, val): else: return [val, val, val] else: - return super(DARTELNorm2MNI, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2548,9 +2537,7 @@ def _list_outputs(self): prefix = "s" + prefix for filename in self.inputs.apply_to_files: pth, base, ext = split_filename(filename) - outputs["normalized_files"].append( - os.path.realpath("%s%s%s" % (prefix, base, ext)) - ) + outputs["normalized_files"].append(os.path.realpath(f"{prefix}{base}{ext}")) return outputs @@ -2617,7 +2604,7 @@ def _format_arg(self, opt, spec, val): if opt in ["flowfield_files"]: return scans_for_fnames(val, keep4d=True) else: - return super(CreateWarped, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2625,9 +2612,9 @@ def _list_outputs(self): for filename in self.inputs.image_files: pth, base, ext = split_filename(filename) if isdefined(self.inputs.modulate) and self.inputs.modulate: - outputs["warped_files"].append(os.path.realpath("mw%s%s" % (base, ext))) + outputs["warped_files"].append(os.path.realpath(f"mw{base}{ext}")) else: - outputs["warped_files"].append(os.path.realpath("w%s%s" % (base, ext))) + outputs["warped_files"].append(os.path.realpath(f"w{base}{ext}")) return outputs @@ -2664,7 +2651,7 @@ def _format_arg(self, opt, spec, val): return scans_for_fnames(val, keep4d=False, separate_sessions=False) else: - return super(ApplyDeformations, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): outputs = self._outputs().get() @@ -2691,7 +2678,7 @@ class VBMSegmentInputSpec(SPMCommandInputSpec): (2, 2, 2, 3, 4, 2), *([traits.Int()] * 6), usedefault=True, - desc="number of gaussians for each tissue class" + desc="number of gaussians for each tissue class", ) bias_regularization = traits.Enum( 0.0001, @@ -2973,7 +2960,7 @@ def _list_outputs(self): ) if self.inputs.pve_label_normalized: outputs["pve_label_normalized_images"].append( - os.path.join(pth, "w%sp0%s.nii" % (dartel_px, base)) + os.path.join(pth, f"w{dartel_px}p0{base}.nii") ) if self.inputs.pve_label_dartel == 1: outputs["pve_label_registered_images"].append( @@ -2990,16 +2977,16 @@ def _list_outputs(self): ) if self.inputs.bias_corrected_normalized: outputs["normalized_bias_corrected_images"].append( - os.path.join(pth, "wm%s%s.nii" % (dartel_px, base)) + os.path.join(pth, f"wm{dartel_px}{base}.nii") ) if self.inputs.deformation_field[0]: outputs["forward_deformation_field"].append( - os.path.join(pth, "y_%s%s.nii" % (dartel_px, base)) + os.path.join(pth, f"y_{dartel_px}{base}.nii") ) if self.inputs.deformation_field[1]: outputs["inverse_deformation_field"].append( - os.path.join(pth, "iy_%s%s.nii" % (dartel_px, base)) + os.path.join(pth, f"iy_{dartel_px}{base}.nii") ) if self.inputs.jacobian_determinant and do_dartel: @@ -3018,18 +3005,16 @@ def _format_arg(self, opt, spec, val): elif opt in ["dartel_template"]: return np.array([val], dtype=object) elif opt in ["deformation_field"]: - return super(VBMSegment, self)._format_arg( - opt, spec, [int(val[0]), int(val[1])] - ) + return super()._format_arg(opt, spec, [int(val[0]), int(val[1])]) else: - return super(VBMSegment, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _parse_inputs(self): if self.inputs.spatial_normalization == "low": - einputs = super(VBMSegment, self)._parse_inputs( + einputs = super()._parse_inputs( skip=("spatial_normalization", "dartel_template") ) einputs[0]["estwrite"]["extopts"]["dartelwarp"] = {"normlow": 1} return einputs else: - return super(VBMSegment, self)._parse_inputs(skip=("spatial_normalization")) + return super()._parse_inputs(skip=("spatial_normalization")) diff --git a/nipype/interfaces/spm/tests/__init__.py b/nipype/interfaces/spm/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/spm/tests/__init__.py +++ b/nipype/interfaces/spm/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/spm/tests/test_base.py b/nipype/interfaces/spm/tests/test_base.py index c2c991d742..1f653c0a86 100644 --- a/nipype/interfaces/spm/tests/test_base.py +++ b/nipype/interfaces/spm/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/tests/test_model.py b/nipype/interfaces/spm/tests/test_model.py index a960d06fb8..fd9a0236d7 100644 --- a/nipype/interfaces/spm/tests/test_model.py +++ b/nipype/interfaces/spm/tests/test_model.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/tests/test_preprocess.py b/nipype/interfaces/spm/tests/test_preprocess.py index de5c79caba..74608749ec 100644 --- a/nipype/interfaces/spm/tests/test_preprocess.py +++ b/nipype/interfaces/spm/tests/test_preprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/spm/tests/test_utils.py b/nipype/interfaces/spm/tests/test_utils.py index c41a0da966..83a9b1e43e 100644 --- a/nipype/interfaces/spm/tests/test_utils.py +++ b/nipype/interfaces/spm/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -21,7 +20,7 @@ def test_coreg(): assert not isdefined(coreg.inputs.mat) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) - mat = os.path.join(pth, "%s_to_%s.mat" % (mov, tgt)) + mat = os.path.join(pth, f"{mov}_to_{tgt}.mat") invmat = fname_presuffix(mat, prefix="inverse_") script = coreg._make_matlab_command(None) assert coreg.inputs.mat == mat diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 1c9e506d57..566e1a2cde 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -97,7 +96,7 @@ def _make_mat_file(self): """makes name for matfile if doesn exist""" pth, mv, _ = split_filename(self.inputs.moving) _, tgt, _ = split_filename(self.inputs.target) - mat = os.path.join(pth, "%s_to_%s.mat" % (mv, tgt)) + mat = os.path.join(pth, f"{mv}_to_{tgt}.mat") return mat def _make_matlab_command(self, _): @@ -107,16 +106,16 @@ def _make_matlab_command(self, _): if not isdefined(self.inputs.invmat): self.inputs.invmat = self._make_inv_file() script = """ - target = '%s'; - moving = '%s'; + target = '{}'; + moving = '{}'; targetv = spm_vol(target); movingv = spm_vol(moving); x = spm_coreg(targetv, movingv); M = spm_matrix(x); - save('%s' , 'M' ); + save('{}' , 'M' ); M = inv(M); - save('%s','M') - """ % ( + save('{}','M') + """.format( self.inputs.target, self.inputs.moving, self.inputs.mat, @@ -168,9 +167,9 @@ def _make_matlab_command(self, _): outputs = self._list_outputs() self.inputs.out_file = outputs["out_file"] script = """ - infile = '%s'; - outfile = '%s' - transform = load('%s'); + infile = '{}'; + outfile = '{}' + transform = load('{}'); V = spm_vol(infile); X = spm_read_vols(V); @@ -179,7 +178,7 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """ % ( + """.format( self.inputs.in_file, self.inputs.out_file, self.inputs.mat, @@ -492,13 +491,13 @@ def _format_arg(self, opt, spec, val): if val: return 1 return 0 - return super(DicomImport, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _run_interface(self, runtime): od = os.path.abspath(self.inputs.output_dir) if not os.path.isdir(od): os.mkdir(od) - return super(DicomImport, self)._run_interface(runtime) + return super()._run_interface(runtime) def _list_outputs(self): from glob import glob diff --git a/nipype/interfaces/tests/__init__.py b/nipype/interfaces/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/tests/__init__.py +++ b/nipype/interfaces/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/tests/test_io.py b/nipype/interfaces/tests/test_io.py index f460a203b7..3537c12d78 100644 --- a/nipype/interfaces/tests/test_io.py +++ b/nipype/interfaces/tests/test_io.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -536,7 +535,7 @@ def test_datafinder_depth(tmpdir): df.inputs.min_depth = min_depth df.inputs.max_depth = max_depth result = df.run() - expected = ["{}".format(x) for x in range(min_depth, max_depth + 1)] + expected = [f"{x}" for x in range(min_depth, max_depth + 1)] for path, exp_fname in zip(result.outputs.out_paths, expected): _, fname = os.path.split(path) assert fname == exp_fname @@ -595,7 +594,7 @@ def test_jsonsink(tmpdir, inputs_attributes): expected_data[key] = val res = js.run() - with open(res.outputs.out_file, "r") as f: + with open(res.outputs.out_file) as f: data = simplejson.load(f) assert data == expected_data diff --git a/nipype/interfaces/tests/test_matlab.py b/nipype/interfaces/tests/test_matlab.py index 21679a78e2..0150b64eb5 100644 --- a/nipype/interfaces/tests/test_matlab.py +++ b/nipype/interfaces/tests/test_matlab.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/tests/test_nilearn.py b/nipype/interfaces/tests/test_nilearn.py index 29a068552c..1fed076b47 100644 --- a/nipype/interfaces/tests/test_nilearn.py +++ b/nipype/interfaces/tests/test_nilearn.py @@ -156,7 +156,7 @@ def _test_4d_label( self.assert_expected_output(wanted_labels, wanted) def assert_expected_output(self, labels, wanted): - with open(self.filenames["out_file"], "r") as output: + with open(self.filenames["out_file"]) as output: got = [line.split() for line in output] labels_got = got.pop(0) # remove header assert labels_got == labels diff --git a/nipype/interfaces/tests/test_r.py b/nipype/interfaces/tests/test_r.py index 6550a32747..6e980e61cd 100644 --- a/nipype/interfaces/tests/test_r.py +++ b/nipype/interfaces/tests/test_r.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/utility/__init__.py b/nipype/interfaces/utility/__init__.py index f5556e7263..b4df1c2afb 100644 --- a/nipype/interfaces/utility/__init__.py +++ b/nipype/interfaces/utility/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/utility/base.py b/nipype/interfaces/utility/base.py index 641489ecc4..812eeabd98 100644 --- a/nipype/interfaces/utility/base.py +++ b/nipype/interfaces/utility/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -58,7 +57,7 @@ class IdentityInterface(IOBase): output_spec = DynamicTraitedSpec def __init__(self, fields=None, mandatory_inputs=True, **inputs): - super(IdentityInterface, self).__init__(**inputs) + super().__init__(**inputs) if fields is None or not fields: raise ValueError("Identity Interface fields must be a non-empty list") # Each input must be in the fields. @@ -175,7 +174,7 @@ class Merge(IOBase): output_spec = MergeOutputSpec def __init__(self, numinputs=0, **inputs): - super(Merge, self).__init__(**inputs) + super().__init__(**inputs) self._numinputs = numinputs if numinputs >= 1: input_names = ["in%d" % (i + 1) for i in range(numinputs)] @@ -229,7 +228,7 @@ class RenameOutputSpec(TraitedSpec): class Rename(SimpleInterface, IOBase): - """Change the name of a file based on a mapped format string. + r"""Change the name of a file based on a mapped format string. To use additional inputs that will be defined at run-time, the class constructor must be called with the format template, and the fields @@ -275,7 +274,7 @@ class Rename(SimpleInterface, IOBase): output_spec = RenameOutputSpec def __init__(self, format_string=None, **inputs): - super(Rename, self).__init__(**inputs) + super().__init__(**inputs) if format_string is not None: self.inputs.format_string = format_string self.fmt_fields = re.findall(r"%\((.+?)\)", format_string) diff --git a/nipype/interfaces/utility/csv.py b/nipype/interfaces/utility/csv.py index 04cb28438e..3bfc46203d 100644 --- a/nipype/interfaces/utility/csv.py +++ b/nipype/interfaces/utility/csv.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """CSV Handling utilities @@ -59,7 +58,7 @@ def _parse_line(self, line): return entry def _get_outfields(self): - with open(self.inputs.in_file, "r") as fid: + with open(self.inputs.in_file) as fid: entry = self._parse_line(fid.readline()) if self.inputs.header: self._outfields = tuple(entry) @@ -72,7 +71,7 @@ def _run_interface(self, runtime): return runtime def _outputs(self): - return self._add_output_traits(super(CSVReader, self)._outputs()) + return self._add_output_traits(super()._outputs()) def _add_output_traits(self, base): return add_traits(base, self._get_outfields()) @@ -82,7 +81,7 @@ def _list_outputs(self): isHeader = True for key in self._outfields: outputs[key] = [] # initialize outfields - with open(self.inputs.in_file, "r") as fid: + with open(self.inputs.in_file) as fid: for line in fid.readlines(): if self.inputs.header and isHeader: # skip header line isHeader = False diff --git a/nipype/interfaces/utility/tests/__init__.py b/nipype/interfaces/utility/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/utility/tests/__init__.py +++ b/nipype/interfaces/utility/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/utility/tests/test_base.py b/nipype/interfaces/utility/tests/test_base.py index a19cff16b4..4a4e6d8899 100644 --- a/nipype/interfaces/utility/tests/test_base.py +++ b/nipype/interfaces/utility/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -72,7 +71,7 @@ def test_merge(tmpdir, args, kwargs, in_lists, expected): numinputs = args[0] if args else 0 if numinputs >= 1: for i in range(1, numinputs + 1): - setattr(node.inputs, "in{:d}".format(i), in_lists[i - 1]) + setattr(node.inputs, f"in{i:d}", in_lists[i - 1]) res = node.run() if numinputs < 1: diff --git a/nipype/interfaces/utility/tests/test_csv.py b/nipype/interfaces/utility/tests/test_csv.py index ffd69f000f..923af9d837 100644 --- a/nipype/interfaces/utility/tests/test_csv.py +++ b/nipype/interfaces/utility/tests/test_csv.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/utility/tests/test_wrappers.py b/nipype/interfaces/utility/tests/test_wrappers.py index 1e2ce8a953..f3e378c2c3 100644 --- a/nipype/interfaces/utility/tests/test_wrappers.py +++ b/nipype/interfaces/utility/tests/test_wrappers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/interfaces/utility/wrappers.py b/nipype/interfaces/utility/wrappers.py index f638816166..dffe98c862 100644 --- a/nipype/interfaces/utility/wrappers.py +++ b/nipype/interfaces/utility/wrappers.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -71,12 +70,12 @@ def __init__( in an otherwise empty namespace """ - super(Function, self).__init__(**inputs) + super().__init__(**inputs) if function: if hasattr(function, "__call__"): try: self.inputs.function_str = getsource(function) - except IOError: + except OSError: raise Exception( "Interface Function does not accept " "function objects defined interactively " diff --git a/nipype/interfaces/vista/__init__.py b/nipype/interfaces/vista/__init__.py index 928ff19fc2..414a002527 100644 --- a/nipype/interfaces/vista/__init__.py +++ b/nipype/interfaces/vista/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """VistaSoft contains Matlab code to perform a variety of analysis on MRI data.""" diff --git a/nipype/interfaces/vista/tests/__init__.py b/nipype/interfaces/vista/tests/__init__.py index 40a96afc6f..e69de29bb2 100644 --- a/nipype/interfaces/vista/tests/__init__.py +++ b/nipype/interfaces/vista/tests/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/nipype/interfaces/vista/vista.py b/nipype/interfaces/vista/vista.py index 12823b10cc..5e6571639a 100644 --- a/nipype/interfaces/vista/vista.py +++ b/nipype/interfaces/vista/vista.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/interfaces/vtkbase.py b/nipype/interfaces/vtkbase.py index cb4d74e62f..dffcb6e2a4 100644 --- a/nipype/interfaces/vtkbase.py +++ b/nipype/interfaces/vtkbase.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/workbench/__init__.py b/nipype/interfaces/workbench/__init__.py index fb68624c88..693ee395a8 100644 --- a/nipype/interfaces/workbench/__init__.py +++ b/nipype/interfaces/workbench/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Connectome Workbench is a visualization for neuroimaging data, esp. derived from HCP data.""" diff --git a/nipype/interfaces/workbench/base.py b/nipype/interfaces/workbench/base.py index d91c85d9f6..0cfb8624d7 100644 --- a/nipype/interfaces/workbench/base.py +++ b/nipype/interfaces/workbench/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/interfaces/workbench/cifti.py b/nipype/interfaces/workbench/cifti.py index 272aec1a3e..d8b0c1a721 100644 --- a/nipype/interfaces/workbench/cifti.py +++ b/nipype/interfaces/workbench/cifti.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench CIFTI commands""" diff --git a/nipype/interfaces/workbench/metric.py b/nipype/interfaces/workbench/metric.py index 6bbe7f98cf..21cdfa83e4 100644 --- a/nipype/interfaces/workbench/metric.py +++ b/nipype/interfaces/workbench/metric.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """This module provides interfaces for workbench surface commands""" @@ -167,10 +166,10 @@ def _format_arg(self, opt, spec, val): roi_out = self._gen_filename(self.inputs.in_file, suffix="_roi") iflogger.info("Setting roi output file as", roi_out) spec.argstr += " " + roi_out - return super(MetricResample, self)._format_arg(opt, spec, val) + return super()._format_arg(opt, spec, val) def _list_outputs(self): - outputs = super(MetricResample, self)._list_outputs() + outputs = super()._list_outputs() if self.inputs.valid_roi_out: roi_file = self._gen_filename(self.inputs.in_file, suffix="_roi") outputs["roi_file"] = os.path.abspath(roi_file) diff --git a/nipype/pipeline/__init__.py b/nipype/pipeline/__init__.py index 75b3b17c3a..63c5557f56 100644 --- a/nipype/pipeline/__init__.py +++ b/nipype/pipeline/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/pipeline/engine/__init__.py b/nipype/pipeline/engine/__init__.py index b13ba968ac..20829e63a7 100644 --- a/nipype/pipeline/engine/__init__.py +++ b/nipype/pipeline/engine/__init__.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/pipeline/engine/base.py b/nipype/pipeline/engine/base.py index a041fd12e0..633353943d 100644 --- a/nipype/pipeline/engine/base.py +++ b/nipype/pipeline/engine/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces.""" @@ -11,7 +10,7 @@ from ...utils.filemanip import loadpkl, savepkl -class EngineBase(object): +class EngineBase: """ Defines common attributes and functions for workflows and nodes. @@ -57,7 +56,7 @@ def name(self, name): def fullname(self): """Build the full name down the hierarchy.""" if self._hierarchy: - return "%s.%s" % (self._hierarchy, self.name) + return f"{self._hierarchy}.{self.name}" return self.name @property @@ -73,7 +72,7 @@ def itername(self): """Get the name of the expanded iterable.""" itername = self._id if self._hierarchy: - itername = "%s.%s" % (self._hierarchy, self._id) + itername = f"{self._hierarchy}.{self._id}" return itername def clone(self, name): diff --git a/nipype/pipeline/engine/nodes.py b/nipype/pipeline/engine/nodes.py index b1f2a662b2..e2010b356b 100644 --- a/nipype/pipeline/engine/nodes.py +++ b/nipype/pipeline/engine/nodes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces @@ -174,11 +173,11 @@ def __init__( """ # Make sure an interface is set, and that it is an Interface if interface is None: - raise IOError("Interface must be provided") + raise OSError("Interface must be provided") if not isinstance(interface, Interface): - raise IOError("interface must be an instance of an Interface") + raise OSError("interface must be an instance of an Interface") - super(Node, self).__init__(name, kwargs.get("base_dir")) + super().__init__(name, kwargs.get("base_dir")) self._interface = interface self._hierarchy = None @@ -872,7 +871,7 @@ def __init__( See Node docstring for additional keyword arguments. """ - super(JoinNode, self).__init__(interface, name, **kwargs) + super().__init__(interface, name, **kwargs) self._joinsource = None # The member should be defined self.joinsource = joinsource # Let the setter do the job @@ -938,9 +937,9 @@ def _add_join_item_fields(self): """ # create the new join item fields idx = self._next_slot_index - newfields = dict( - [(field, self._add_join_item_field(field, idx)) for field in self.joinfield] - ) + newfields = { + field: self._add_join_item_field(field, idx) for field in self.joinfield + } # increment the join slot index logger.debug("Added the %s join item fields %s.", self, newfields) self._next_slot_index += 1 @@ -1001,7 +1000,7 @@ def _override_join_traits(self, basetraits, fields): def _run_command(self, execute, copyfiles=True): """Collates the join inputs prior to delegating to the superclass.""" self._collate_join_field_inputs() - return super(JoinNode, self)._run_command(execute, copyfiles) + return super()._run_command(execute, copyfiles) def _collate_join_field_inputs(self): """ @@ -1113,7 +1112,7 @@ def __init__( See Node docstring for additional keyword arguments. """ - super(MapNode, self).__init__(interface, name, **kwargs) + super().__init__(interface, name, **kwargs) if isinstance(iterfield, (str, bytes)): iterfield = [iterfield] self.iterfield = iterfield @@ -1291,7 +1290,7 @@ def _collate_results(self, nodes): msg += ["Subnode %d failed" % i] msg += ["Error: %s" % str(code)] raise NodeExecutionError( - "Subnodes of node: %s failed:\n%s" % (self.name, "\n".join(msg)) + "Subnodes of node: {} failed:\n{}".format(self.name, "\n".join(msg)) ) return finalresult @@ -1319,7 +1318,7 @@ def _get_inputs(self): self._interface.inputs, fields=self.iterfield ) self._inputs.trait_set(**old_inputs) - super(MapNode, self)._get_inputs() + super()._get_inputs() def _check_iterfield(self): """Checks iterfield diff --git a/nipype/pipeline/engine/tests/__init__.py b/nipype/pipeline/engine/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/pipeline/engine/tests/__init__.py +++ b/nipype/pipeline/engine/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/engine/tests/test_base.py b/nipype/pipeline/engine/tests/test_base.py index ab80c2f158..5562a3338c 100644 --- a/nipype/pipeline/engine/tests/test_base.py +++ b/nipype/pipeline/engine/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/engine/tests/test_engine.py b/nipype/pipeline/engine/tests/test_engine.py index 2df8f0b233..641a4837c4 100644 --- a/nipype/pipeline/engine/tests/test_engine.py +++ b/nipype/pipeline/engine/tests/test_engine.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module @@ -355,7 +354,7 @@ def func1(in1): assert len(outjson) == 1 # check that multiple json's don't trigger rerun - with open(os.path.join(node.output_dir(), "test.json"), "wt") as fp: + with open(os.path.join(node.output_dir(), "test.json"), "w") as fp: fp.write("dummy file") w1.config["execution"].update(**{"stop_on_first_rerun": True}) @@ -458,7 +457,7 @@ def test_deep_nested_write_graph_runs(tmpdir): pipe = pe.Workflow(name="pipe") parent = pipe for depth in range(10): - sub = pe.Workflow(name="pipe_nest_{}".format(depth)) + sub = pe.Workflow(name=f"pipe_nest_{depth}") parent.add_nodes([sub]) parent = sub mod1 = pe.Node(interface=EngineTestInterface(), name="mod1") diff --git a/nipype/pipeline/engine/tests/test_join.py b/nipype/pipeline/engine/tests/test_join.py index 9606587c13..2fe5f70564 100644 --- a/nipype/pipeline/engine/tests/test_join.py +++ b/nipype/pipeline/engine/tests/test_join.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for join expansion @@ -406,7 +405,7 @@ def test_multifield_join_node(tmpdir): # node and 1 post-join node. assert len(result.nodes()) == 10, "The number of expanded nodes is incorrect." # the product inputs are [2, 4], [2, 5], [3, 4], [3, 5] - assert set(_products) == set([8, 10, 12, 15]), ( + assert set(_products) == {8, 10, 12, 15}, ( "The post-join products is incorrect: %s." % _products ) diff --git a/nipype/pipeline/engine/tests/test_nodes.py b/nipype/pipeline/engine/tests/test_nodes.py index e0f55f096a..19ffd714c6 100644 --- a/nipype/pipeline/engine/tests/test_nodes.py +++ b/nipype/pipeline/engine/tests/test_nodes.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -259,9 +258,9 @@ def test_function(arg1): file1 = os.path.join(os.getcwd(), "file1.txt") file2 = os.path.join(os.getcwd(), "file2.txt") - with open(file1, "wt") as fp: + with open(file1, "w") as fp: fp.write("%d" % arg1) - with open(file2, "wt") as fp: + with open(file2, "w") as fp: fp.write("%d" % arg1) return file1, file2 diff --git a/nipype/pipeline/engine/tests/test_utils.py b/nipype/pipeline/engine/tests/test_utils.py index 07b01bd3ba..80865f7eee 100644 --- a/nipype/pipeline/engine/tests/test_utils.py +++ b/nipype/pipeline/engine/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine utils module @@ -179,7 +178,7 @@ def test_mapnode_crash(tmpdir): iterfield=["WRONG"], name="myfunc", ) - node.inputs.WRONG = ["string{}".format(i) for i in range(3)] + node.inputs.WRONG = [f"string{i}" for i in range(3)] node.config = deepcopy(config._sections) node.config["execution"]["stop_on_first_crash"] = True node.base_dir = tmpdir.strpath @@ -198,7 +197,7 @@ def test_mapnode_crash2(tmpdir): iterfield=["WRONG"], name="myfunc", ) - node.inputs.WRONG = ["string{}".format(i) for i in range(3)] + node.inputs.WRONG = [f"string{i}" for i in range(3)] node.base_dir = tmpdir.strpath with pytest.raises(Exception): @@ -216,7 +215,7 @@ def test_mapnode_crash3(tmpdir): iterfield=["WRONG"], name="myfunc", ) - node.inputs.WRONG = ["string{}".format(i) for i in range(3)] + node.inputs.WRONG = [f"string{i}" for i in range(3)] wf = pe.Workflow("testmapnodecrash") wf.add_nodes([node]) wf.base_dir = tmpdir.strpath diff --git a/nipype/pipeline/engine/tests/test_workflows.py b/nipype/pipeline/engine/tests/test_workflows.py index ab6ad663fd..9b06eb56bb 100644 --- a/nipype/pipeline/engine/tests/test_workflows.py +++ b/nipype/pipeline/engine/tests/test_workflows.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine workflows module @@ -134,7 +133,7 @@ def _test_function(arg1): file4 = os.path.join(os.getcwd(), "subdir", "file4.txt") os.mkdir("subdir") for filename in [file1, file2, file3, file4]: - with open(filename, "wt") as fp: + with open(filename, "w") as fp: fp.write("%d" % arg1) return file1, file2, os.path.join(os.getcwd(), "subdir") @@ -142,7 +141,7 @@ def _test_function(arg1): def _test_function2(in_file, arg): import os - with open(in_file, "rt") as fp: + with open(in_file) as fp: in_arg = fp.read() file1 = os.path.join(os.getcwd(), "file1.txt") @@ -150,7 +149,7 @@ def _test_function2(in_file, arg): file3 = os.path.join(os.getcwd(), "file3.txt") files = [file1, file2, file3] for filename in files: - with open(filename, "wt") as fp: + with open(filename, "w") as fp: fp.write("%d" % arg + in_arg) return file1, file2, 1 diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index 94f9852268..53e2c49242 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utility routines for workflow graphs""" @@ -67,13 +66,13 @@ def save_hashfile(hashfile, hashed_inputs): """Store a hashfile""" try: save_json(hashfile, hashed_inputs) - except (IOError, TypeError): + except (OSError, TypeError): err_type = sys.exc_info()[0] if err_type is TypeError: # XXX - SG current workaround is to just # create the hashed file and not put anything # in it - with open(hashfile, "wt") as fd: + with open(hashfile, "w") as fd: fd.writelines(str(hashed_inputs)) logger.debug("Unable to write a particular type to the json file") @@ -333,7 +332,7 @@ def _write_inputs(node): try: func = create_function_from_source(val) except RuntimeError: - lines.append("%s.inputs.%s = '%s'" % (nodename, key, val)) + lines.append(f"{nodename}.inputs.{key} = '{val}'") else: funcname = [ name for name in func.__globals__ if name != "__builtins__" @@ -345,11 +344,9 @@ def _write_inputs(node): ) funcname = "%s_1" % funcname lines.append("from nipype.utils.functions import getsource") - lines.append( - "%s.inputs.%s = getsource(%s)" % (nodename, key, funcname) - ) + lines.append(f"{nodename}.inputs.{key} = getsource({funcname})") else: - lines.append("%s.inputs.%s = %s" % (nodename, key, val)) + lines.append(f"{nodename}.inputs.{key} = {val}") return lines @@ -361,7 +358,7 @@ def format_node(node, format="python", include_config=False): name = node.fullname.replace(".", "_") if format == "python": klass = node.interface - importline = "from %s import %s" % (klass.__module__, klass.__class__.__name__) + importline = f"from {klass.__module__} import {klass.__class__.__name__}" comment = "# Node: %s" % node.fullname spec = signature(node.interface.__init__) filled_args = [] @@ -372,7 +369,7 @@ def format_node(node, format="python", include_config=False): args = ", ".join(filled_args) klass_name = klass.__class__.__name__ if isinstance(node, MapNode): - nodedef = '%s = MapNode(%s(%s), iterfield=%s, name="%s")' % ( + nodedef = '{} = MapNode({}({}), iterfield={}, name="{}")'.format( name, klass_name, args, @@ -380,7 +377,7 @@ def format_node(node, format="python", include_config=False): name, ) else: - nodedef = '%s = Node(%s(%s), name="%s")' % (name, klass_name, args, name) + nodedef = f'{name} = Node({klass_name}({args}), name="{name}")' lines = [importline, comment, nodedef] if include_config: @@ -390,10 +387,10 @@ def format_node(node, format="python", include_config=False): comment, nodedef, ] - lines.append("%s.config = %s" % (name, node.config)) + lines.append(f"{name}.config = {node.config}") if node.iterables is not None: - lines.append("%s.iterables = %s" % (name, node.iterables)) + lines.append(f"{name}.iterables = {node.iterables}") lines.extend(_write_inputs(node)) return lines @@ -437,11 +434,11 @@ def modify_paths(object, relative=True, basedir=None): else: out = os.path.abspath(os.path.join(basedir, object)) if not os.path.exists(out): - raise IOError("File %s not found" % out) + raise OSError("File %s not found" % out) else: out = object else: - raise TypeError("Object {} is undefined".format(object)) + raise TypeError(f"Object {object} is undefined") return out @@ -536,7 +533,7 @@ def _write_detailed_dot(graph, dotfilename): inports.append(inport) inputstr = ( ["{IN"] - + ["| %s" % (_replacefunk(ip), ip) for ip in sorted(inports)] + + [f"| {ip}" for ip in sorted(inports)] + ["}"] ) outports = [] @@ -550,10 +547,7 @@ def _write_detailed_dot(graph, dotfilename): outports.append(outport) outputstr = ( ["{OUT"] - + [ - "| %s" % (_replacefunk(oport), oport) - for oport in sorted(outports) - ] + + [f"| {oport}" for oport in sorted(outports)] + ["}"] ) srcpackage = "" @@ -562,7 +556,7 @@ def _write_detailed_dot(graph, dotfilename): if len(pkglist) > 2: srcpackage = pkglist[2] srchierarchy = ".".join(nodename.split(".")[1:-1]) - nodenamestr = "{ %s | %s | %s }" % ( + nodenamestr = "{{ {} | {} | {} }}".format( nodename.split(".")[-1], srcpackage, srchierarchy, @@ -580,7 +574,7 @@ def _write_detailed_dot(graph, dotfilename): for edge in sorted(edges): text.append(edge) text.append("}") - with open(dotfilename, "wt") as filep: + with open(dotfilename, "w") as filep: filep.write("\n".join(text)) return text @@ -651,8 +645,7 @@ def walk(children, level=0, path=None, usename=True): else: path[level] = child # Recurse into the next level - for child_paths in walk(tail, level + 1, path, usename): - yield child_paths + yield from walk(tail, level + 1, path, usename) def synchronize_iterables(iterables): @@ -758,10 +751,8 @@ def _merge_graphs( # used at the same level. The use of the template below for naming # updates to nodes is the general solution. raise Exception( - ( - "Execution graph does not have a unique set of node " - "names. Please rerun the workflow" - ) + "Execution graph does not have a unique set of node " + "names. Please rerun the workflow" ) edgeinfo = {} for n in list(subgraph.nodes()): @@ -1013,11 +1004,9 @@ def generate_expanded_graph(graph_in): # find the unique iterable source node in the graph try: iter_src = next( - ( - node - for node in graph_in.nodes() - if node.name == src_name and nx.has_path(graph_in, node, inode) - ) + node + for node in graph_in.nodes() + if node.name == src_name and nx.has_path(graph_in, node, inode) ) except StopIteration: raise ValueError( @@ -1039,13 +1028,9 @@ def generate_expanded_graph(graph_in): # The itersource iterables is a {field: lookup} dictionary, where the # lookup is a {source key: iteration list} dictionary. Look up the # current iterable value using the predecessor itersource input values. - iter_dict = dict( - [ - (field, lookup[key]) - for field, lookup in inode.iterables - if key in lookup - ] - ) + iter_dict = { + field: lookup[key] for field, lookup in inode.iterables if key in lookup + } # convert the iterables to the standard {field: function} format @@ -1235,9 +1220,7 @@ def _standardize_iterables(node): if node.synchronize: if len(iterables) == 2: first, last = iterables - if all( - (isinstance(item, (str, bytes)) and item in fields for item in first) - ): + if all(isinstance(item, (str, bytes)) and item in fields for item in first): iterables = _transpose_iterables(first, last) # Convert a tuple to a list @@ -1282,13 +1265,11 @@ def _validate_iterables(node, iterables, fields): "The %s iterables is not a [(field, values)]" " list" % node.name ) except TypeError as e: - raise TypeError( - "A %s iterables member is not iterable: %s" % (node.name, e) - ) + raise TypeError(f"A {node.name} iterables member is not iterable: {e}") field, _ = item if field not in fields: raise ValueError( - "The %s iterables field is unrecognized: %s" % (node.name, field) + f"The {node.name} iterables field is unrecognized: {field}" ) @@ -1303,7 +1284,7 @@ def _transpose_iterables(fields, values): Otherwise, the result is a list of (field: value list) pairs. """ if isinstance(values, dict): - transposed = dict([(field, defaultdict(list)) for field in fields]) + transposed = {field: defaultdict(list) for field in fields} for key, tuples in list(values.items()): for kvals in tuples: for idx, val in enumerate(kvals): @@ -1398,9 +1379,9 @@ def format_dot(dotfilename, format="png"): """Dump a directed graph (Linux only; install via `brew` on OSX)""" try: formatted_dot, _ = _run_dot(dotfilename, format_ext=format) - except IOError as ioe: + except OSError as ioe: if "could not be found" in str(ioe): - raise IOError("Cannot draw directed graph; executable 'dot' is unavailable") + raise OSError("Cannot draw directed graph; executable 'dot' is unavailable") else: raise ioe return formatted_dot @@ -1411,8 +1392,8 @@ def _run_dot(dotfilename, format_ext): return dotfilename, None dot_base = os.path.splitext(dotfilename)[0] - formatted_dot = "{}.{}".format(dot_base, format_ext) - cmd = 'dot -T{} -o"{}" "{}"'.format(format_ext, formatted_dot, dotfilename) + formatted_dot = f"{dot_base}.{format_ext}" + cmd = f'dot -T{format_ext} -o"{formatted_dot}" "{dotfilename}"' res = CommandLine(cmd, terminal_output="allatonce", resource_monitor=False).run() return formatted_dot, res @@ -1668,7 +1649,7 @@ def write_workflow_resources(graph, filename=None, append=None): # If we append different runs, then we will see different # "bursts" of timestamps corresponding to those executions. if append and os.path.isfile(filename): - with open(filename, "r") as rsf: + with open(filename) as rsf: big_dict = json.load(rsf) for _, node in enumerate(graph.nodes()): @@ -1677,7 +1658,7 @@ def write_workflow_resources(graph, filename=None, append=None): params = "" if node.parameterization: - params = "_".join(["{}".format(p) for p in node.parameterization]) + params = "_".join([f"{p}" for p in node.parameterization]) try: rt_list = node.result.runtime diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 5cff775284..72be5fb71a 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Defines functionality for pipelined execution of interfaces @@ -56,7 +55,7 @@ def __init__(self, name, base_dir=None): """ import networkx as nx - super(Workflow, self).__init__(name, base_dir) + super().__init__(name, base_dir) self._graph = nx.DiGraph() self._nodes_cache = set() @@ -78,7 +77,7 @@ def clone(self, name): unique name for the workflow """ - clone = super(Workflow, self).clone(name) + clone = super().clone(name) clone._reset_hierarchy() return clone @@ -152,7 +151,7 @@ def connect(self, *args, **kwargs): " src[%s] dest[%s] workflow[%s]" ) % (srcnode, destnode, self.name) - raise IOError(msg) + raise OSError(msg) if (srcnode not in newnodes) and not self._has_node(srcnode): newnodes.add(srcnode) if (destnode not in newnodes) and not self._has_node(destnode): @@ -232,9 +231,7 @@ def connect(self, *args, **kwargs): connected_ports[destnode].add(dest) infostr = [] for info in not_found: - infostr += [ - "Module %s has no %sput called %s\n" % (info[1], info[0], info[2]) - ] + infostr += [f"Module {info[1]} has no {info[0]}put called {info[2]}\n"] if not_found: raise Exception("\n".join(["Some connections were not found"] + infostr)) @@ -332,11 +329,11 @@ def add_nodes(self, nodes): all_nodes = self._get_all_nodes() for node in nodes: if node in all_nodes: - raise IOError("Node %s already exists in the workflow" % node) + raise OSError("Node %s already exists in the workflow" % node) if isinstance(node, Workflow): for subnode in node._get_all_nodes(): if subnode in all_nodes: - raise IOError( + raise OSError( ("Subnode %s of node %s already exists " "in the workflow") % (subnode, node) ) @@ -493,7 +490,7 @@ def write_hierarchical_dotfile( dotlist.append("}") dotstr = "\n".join(dotlist) if dotfilename: - fp = open(dotfilename, "wt") + fp = open(dotfilename, "w") fp.writelines(dotstr) fp.close() else: @@ -531,10 +528,10 @@ def export( if format == "python": connect_template = '%s.connect(%%s, %%s, %%s, "%%s")' % self.name connect_template2 = '%s.connect(%%s, "%%s", %%s, "%%s")' % self.name - wfdef = '%s = Workflow("%s")' % (self.name, self.name) + wfdef = f'{self.name} = Workflow("{self.name}")' lines.append(wfdef) if include_config: - lines.append("%s.config = %s" % (self.name, self.config)) + lines.append(f"{self.name}.config = {self.config}") for idx, node in enumerate(nodes): nodename = node.fullname.replace(".", "_") # write nodes @@ -587,8 +584,8 @@ def export( all_lines = importlines + functionlines + lines if not filename: - filename = "%s%s.py" % (prefix, self.name) - with open(filename, "wt") as fp: + filename = f"{prefix}{self.name}.py" + with open(filename, "w") as fp: fp.writelines("\n".join(all_lines)) return all_lines @@ -672,7 +669,7 @@ def _write_report_info(self, workingdir, name, graph): report_file = "%s/_report/report.rst" % node.output_dir().replace( report_dir, "" ) - result_file = "%s/result_%s.pklz" % ( + result_file = "{}/result_{}.pklz".format( node.output_dir().replace(report_dir, ""), node.name, ) @@ -763,10 +760,10 @@ def _check_nodes(self, nodes): try: this_node_lineage = node_lineage[idx] except IndexError: - raise IOError('Duplicate node name "%s" found.' % node.name) + raise OSError('Duplicate node name "%s" found.' % node.name) else: if this_node_lineage in [node._hierarchy, self.name]: - raise IOError('Duplicate node name "%s" found.' % node.name) + raise OSError('Duplicate node name "%s" found.' % node.name) else: node_names.append(node.name) @@ -1037,7 +1034,7 @@ def _get_dot( if level > len(colorset) - 2: level = 3 # Loop back to blue - dotlist = ['%slabel="%s";' % (prefix, self.name)] + dotlist = [f'{prefix}label="{self.name}";'] for node in nx.topological_sort(self._graph): fullname = ".".join(hierarchy + [node.fullname]) nodename = fullname.replace(".", "_") @@ -1098,7 +1095,7 @@ def _get_dot( nodename = nodefullname.replace(".", "_") subnodename = subnodefullname.replace(".", "_") for _ in self._graph.get_edge_data(node, subnode)["connect"]: - dotlist.append("%s -> %s;" % (nodename, subnodename)) + dotlist.append(f"{nodename} -> {subnodename};") logger.debug("connection: %s", dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges(data=True): diff --git a/nipype/pipeline/plugins/__init__.py b/nipype/pipeline/plugins/__init__.py index 83f4869a41..0b1ba01637 100644 --- a/nipype/pipeline/plugins/__init__.py +++ b/nipype/pipeline/plugins/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 3d600dda55..607ebbe7ba 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution.""" @@ -33,7 +32,7 @@ def _graph_to_lil_matrix(graph, nodelist): return ssp.lil_matrix(to_scipy_sparse_array(graph, nodelist=nodelist, format="lil")) -class PluginBase(object): +class PluginBase: """Base class for plugins.""" def __init__(self, plugin_args=None): @@ -103,7 +102,7 @@ def __init__(self, plugin_args=None): Initialize runtime attributes to none """ - super(DistributedPluginBase, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self.procs = None self.depidx = None self.refidx = None @@ -487,7 +486,7 @@ class SGELikeBatchManagerBase(DistributedPluginBase): """Execute workflow with SGE/OGE/PBS like batch system""" def __init__(self, template, plugin_args=None): - super(SGELikeBatchManagerBase, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._template = template self._qsub_args = None if plugin_args: @@ -535,14 +534,14 @@ def _get_result(self, taskid): results_file = None try: error_message = ( - "Job id ({0}) finished or terminated, but " - "results file does not exist after ({1}) " + "Job id ({}) finished or terminated, but " + "results file does not exist after ({}) " "seconds. Batch dir contains crashdump file " "if node raised an exception.\n" - "Node working directory: ({2}) ".format(taskid, timeout, node_dir) + "Node working directory: ({}) ".format(taskid, timeout, node_dir) ) - raise IOError(error_message) - except IOError as e: + raise OSError(error_message) + except OSError as e: result_data["traceback"] = "\n".join(format_exception(*sys.exc_info())) else: results_file = glob(os.path.join(node_dir, "result_*.pklz"))[0] @@ -565,10 +564,10 @@ def _submit_job(self, node, updatehash=False): batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) batchscript = "\n".join( - (self._template.rstrip("\n"), "%s %s" % (sys.executable, pyscript)) + (self._template.rstrip("\n"), f"{sys.executable} {pyscript}") ) batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) - with open(batchscriptfile, "wt") as fp: + with open(batchscriptfile, "w") as fp: fp.writelines(batchscript) return self._submit_batchtask(batchscriptfile, node) @@ -584,7 +583,7 @@ def __init__(self, plugin_args=None): logger.warning( "status_callback not supported for Graph submission" " plugins" ) - super(GraphPluginBase, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) def run(self, graph, config, updatehash=False): import networkx as nx diff --git a/nipype/pipeline/plugins/condor.py b/nipype/pipeline/plugins/condor.py index cd0ad985e2..59971f65f2 100644 --- a/nipype/pipeline/plugins/condor.py +++ b/nipype/pipeline/plugins/condor.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via Condor """ @@ -44,7 +43,7 @@ def __init__(self, **kwargs): self._retry_timeout = kwargs["plugin_args"]["retry_timeout"] if "max_tries" in kwargs["plugin_args"]: self._max_tries = kwargs["plugin_args"]["max_tries"] - super(CondorPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): cmd = CommandLine( @@ -79,9 +78,9 @@ def _submit_batchtask(self, scriptfile, node): if self._qsub_args: qsubargs = self._qsub_args if "-o" not in qsubargs: - qsubargs = "%s -o %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -o {path}" if "-e" not in qsubargs: - qsubargs = "%s -e %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: @@ -89,7 +88,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) - cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 diff --git a/nipype/pipeline/plugins/dagman.py b/nipype/pipeline/plugins/dagman.py index 98b07eeb10..42a73f0d8d 100644 --- a/nipype/pipeline/plugins/dagman.py +++ b/nipype/pipeline/plugins/dagman.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via Condor DAGMan """ import os @@ -99,14 +98,14 @@ def __init__(self, **kwargs): warn( "the 'submit_specs' argument is deprecated, use 'override_specs' instead" ) - super(CondorDAGManPlugin, self).__init__(**kwargs) + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): # location of all scripts, place dagman output in here too batch_dir, _ = os.path.split(pyfiles[0]) # DAG description filename dagfilename = os.path.join(batch_dir, "workflow-%s.dag" % uuid.uuid4()) - with open(dagfilename, "wt") as dagfileptr: + with open(dagfilename, "w") as dagfileptr: # loop over all scripts, create submit files, and define them # as jobs in the DAG for idx, pyscript in enumerate(pyfiles): @@ -130,7 +129,7 @@ def _submit_graph(self, pyfiles, dependencies, nodes): ], ) # add required slots to the template - template = "%s\n%s\n%s\nqueue\n" % ( + template = "{}\n{}\n{}\nqueue\n".format( "%(initial_specs)s", template, "%(override_specs)s", @@ -147,7 +146,7 @@ def _submit_graph(self, pyfiles, dependencies, nodes): ) if wrapper_cmd is not None: specs["executable"] = wrapper_cmd - specs["nodescript"] = "%s %s %s" % ( + specs["nodescript"] = "{} {} {}".format( wrapper_args % specs, # give access to variables sys.executable, pyscript, @@ -155,7 +154,7 @@ def _submit_graph(self, pyfiles, dependencies, nodes): submitspec = template % specs # write submit spec for this job submitfile = os.path.join(batch_dir, "%s.submit" % name) - with open(submitfile, "wt") as submitfileprt: + with open(submitfile, "w") as submitfileprt: submitfileprt.writelines(submitspec) submitfileprt.close() # define job in DAG @@ -176,14 +175,14 @@ def _submit_graph(self, pyfiles, dependencies, nodes): terminal_output="allatonce", ) # needs -update_submit or re-running a workflow will fail - cmd.inputs.args = "%s -update_submit %s" % (self._dagman_args, dagfilename) + cmd.inputs.args = f"{self._dagman_args} -update_submit {dagfilename}" cmd.run() logger.info("submitted all jobs to Condor DAGMan") if self._block: # wait for DAGMan to settle down, no time wasted it is already running time.sleep(10) if not os.path.exists("%s.condor.sub" % dagfilename): - raise EnvironmentError( + raise OSError( "DAGMan did not create its submit file, please check the logs" ) # wait for completion diff --git a/nipype/pipeline/plugins/debug.py b/nipype/pipeline/plugins/debug.py index 31ce4e08e5..956d1b0694 100644 --- a/nipype/pipeline/plugins/debug.py +++ b/nipype/pipeline/plugins/debug.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Debug plugin @@ -12,7 +11,7 @@ class DebugPlugin(PluginBase): """Execute workflow in series""" def __init__(self, plugin_args=None): - super(DebugPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) if ( plugin_args and "callable" in plugin_args diff --git a/nipype/pipeline/plugins/ipython.py b/nipype/pipeline/plugins/ipython.py index b22a5ea4e5..0ecabb2438 100644 --- a/nipype/pipeline/plugins/ipython.py +++ b/nipype/pipeline/plugins/ipython.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via IPython controller @@ -49,7 +48,7 @@ class IPythonPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): if IPython_not_loaded: raise ImportError("Please install ipyparallel to use this plugin.") - super(IPythonPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) valid_args = ( "url_file", "profile", @@ -96,7 +95,7 @@ def run(self, graph, config, updatehash=False): raise Exception("Ipython kernel not installed") from e else: raise e - return super(IPythonPlugin, self).run(graph, config, updatehash=updatehash) + return super().run(graph, config, updatehash=updatehash) def _get_result(self, taskid): if taskid not in self.taskmap: diff --git a/nipype/pipeline/plugins/legacymultiproc.py b/nipype/pipeline/plugins/legacymultiproc.py index 747ff00c16..9a0e5f7740 100644 --- a/nipype/pipeline/plugins/legacymultiproc.py +++ b/nipype/pipeline/plugins/legacymultiproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing @@ -76,7 +75,7 @@ def run_node(node, updatehash, taskid): # Pythons 2.7, 3.4-3.7.0, and 3.7.1 have three different implementations of # pool.Pool().Process(), and the type of the result varies based on the default # multiprocessing context, so we need to dynamically patch the daemon property -class NonDaemonMixin(object): +class NonDaemonMixin: @property def daemon(self): return False @@ -135,7 +134,7 @@ def __init__( if context is None: context = mp.get_context() context = _nondaemon_context_mapper[context._name] - super(NonDaemonPool, self).__init__( + super().__init__( processes=processes, initializer=initializer, initargs=initargs, @@ -194,7 +193,7 @@ class LegacyMultiProcPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): # Init variables and instance attributes - super(LegacyMultiProcPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 diff --git a/nipype/pipeline/plugins/linear.py b/nipype/pipeline/plugins/linear.py index 8449e34111..226a25dbd4 100644 --- a/nipype/pipeline/plugins/linear.py +++ b/nipype/pipeline/plugins/linear.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Local serial workflow execution diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index a88fbb6675..b4c50c96cb 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via LSF """ @@ -39,7 +38,7 @@ def __init__(self, **kwargs): self._max_tries = kwargs["plugin_args"]["max_tries"] if "bsub_args" in kwargs["plugin_args"]: self._bsub_args = kwargs["plugin_args"]["bsub_args"] - super(LSFPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): """LSF lists a status of 'PEND' when a job has been submitted but is @@ -76,10 +75,10 @@ def _submit_batchtask(self, scriptfile, node): else: bsubargs += " " + node.plugin_args["bsub_args"] if "-o" not in bsubargs: # -o outfile - bsubargs = "%s -o %s" % (bsubargs, scriptfile + ".log") + bsubargs = "{} -o {}".format(bsubargs, scriptfile + ".log") if "-e" not in bsubargs: # -e error file - bsubargs = "%s -e %s" % (bsubargs, scriptfile + ".log") + bsubargs = "{} -e {}".format(bsubargs, scriptfile + ".log") if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: @@ -87,7 +86,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) - cmd.inputs.args = "%s -J %s sh %s" % ( + cmd.inputs.args = "{} -J {} sh {}".format( bsubargs, jobname, scriptfile, @@ -121,7 +120,7 @@ def _submit_batchtask(self, scriptfile, node): if match: taskid = int(match.groups()[0]) else: - raise IOError( + raise OSError( "Can't parse submission job output id: %s" % result.runtime.stdout ) self._pending[taskid] = node.output_dir() diff --git a/nipype/pipeline/plugins/multiproc.py b/nipype/pipeline/plugins/multiproc.py index 7df35cab9d..05d063b3b4 100644 --- a/nipype/pipeline/plugins/multiproc.py +++ b/nipype/pipeline/plugins/multiproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Parallel workflow execution via multiprocessing @@ -114,7 +113,7 @@ class MultiProcPlugin(DistributedPluginBase): def __init__(self, plugin_args=None): # Init variables and instance attributes - super(MultiProcPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) self._taskresult = {} self._task_obj = {} self._taskid = 0 diff --git a/nipype/pipeline/plugins/oar.py b/nipype/pipeline/plugins/oar.py index bc7d2ba09d..b723dfd308 100644 --- a/nipype/pipeline/plugins/oar.py +++ b/nipype/pipeline/plugins/oar.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via OAR http://oar.imag.fr """ import os @@ -47,7 +46,7 @@ def __init__(self, **kwargs): self._max_tries = kwargs["plugin_args"]["max_tries"] if "max_jobname_len" in kwargs["plugin_args"]: self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] - super(OARPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): # subprocess.Popen requires taskid to be a string @@ -90,12 +89,12 @@ def _submit_batchtask(self, scriptfile, node): jobname = jobname[0 : self._max_jobname_len] if "-O" not in oarsubargs: - oarsubargs = "%s -O %s" % ( + oarsubargs = "{} -O {}".format( oarsubargs, os.path.join(path, jobname + ".stdout"), ) if "-E" not in oarsubargs: - oarsubargs = "%s -E %s" % ( + oarsubargs = "{} -E {}".format( oarsubargs, os.path.join(path, jobname + ".stderr"), ) @@ -103,7 +102,7 @@ def _submit_batchtask(self, scriptfile, node): oarsubargs = "%s -J" % (oarsubargs) os.chmod(scriptfile, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) - cmd.inputs.args = "%s -n %s -S %s" % (oarsubargs, jobname, scriptfile) + cmd.inputs.args = f"{oarsubargs} -n {jobname} -S {scriptfile}" oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) @@ -142,5 +141,5 @@ def _submit_batchtask(self, scriptfile, node): break taskid = json.loads(o)["job_id"] self._pending[taskid] = node.output_dir() - logger.debug("submitted OAR task: %s for node %s" % (taskid, node._id)) + logger.debug(f"submitted OAR task: {taskid} for node {node._id}") return taskid diff --git a/nipype/pipeline/plugins/pbs.py b/nipype/pipeline/plugins/pbs.py index 4ff172a067..3106cc1122 100644 --- a/nipype/pipeline/plugins/pbs.py +++ b/nipype/pipeline/plugins/pbs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ import os @@ -41,11 +40,11 @@ def __init__(self, **kwargs): self._max_tries = kwargs["plugin_args"]["max_tries"] if "max_jobname_len" in kwargs["plugin_args"]: self._max_jobname_len = kwargs["plugin_args"]["max_jobname_len"] - super(PBSPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): result = CommandLine( - "qstat -f {}".format(taskid), + f"qstat -f {taskid}", environ=dict(os.environ), terminal_output="file_split", resource_monitor=False, @@ -78,9 +77,9 @@ def _submit_batchtask(self, scriptfile, node): else: qsubargs += " " + node.plugin_args["qsub_args"] if "-o" not in qsubargs: - qsubargs = "%s -o %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -o {path}" if "-e" not in qsubargs: - qsubargs = "%s -e %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: @@ -89,7 +88,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems.reverse() jobname = ".".join(jobnameitems) jobname = jobname[0 : self._max_jobname_len] - cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) @@ -105,7 +104,7 @@ def _submit_batchtask(self, scriptfile, node): else: iflogger.setLevel(oldlevel) raise RuntimeError( - "Could not submit pbs task for node {}\n{}".format(node._id, e) + f"Could not submit pbs task for node {node._id}\n{e}" ) else: break @@ -113,6 +112,6 @@ def _submit_batchtask(self, scriptfile, node): # retrieve pbs taskid taskid = result.runtime.stdout.split(".")[0] self._pending[taskid] = node.output_dir() - logger.debug("submitted pbs task: {} for node {}".format(taskid, node._id)) + logger.debug(f"submitted pbs task: {taskid} for node {node._id}") return taskid diff --git a/nipype/pipeline/plugins/pbsgraph.py b/nipype/pipeline/plugins/pbsgraph.py index 6304e715b7..17c6db2964 100644 --- a/nipype/pipeline/plugins/pbsgraph.py +++ b/nipype/pipeline/plugins/pbsgraph.py @@ -27,7 +27,7 @@ class PBSGraphPlugin(SGEGraphPlugin): def _submit_graph(self, pyfiles, dependencies, nodes): batch_dir, _ = os.path.split(pyfiles[0]) submitjobsfile = os.path.join(batch_dir, "submit_jobs.sh") - with open(submitjobsfile, "wt") as fp: + with open(submitjobsfile, "w") as fp: fp.writelines("#!/usr/bin/env sh\n") for idx, pyscript in enumerate(pyfiles): node = nodes[idx] @@ -35,11 +35,9 @@ def _submit_graph(self, pyfiles, dependencies, nodes): batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) - batchscript = "\n".join( - (template, "%s %s" % (sys.executable, pyscript)) - ) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) batchscriptfile = os.path.join(batch_dir, "batchscript_%s.sh" % name) - with open(batchscriptfile, "wt") as batchfp: + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" diff --git a/nipype/pipeline/plugins/semaphore_singleton.py b/nipype/pipeline/plugins/semaphore_singleton.py index 12fa7c7777..1ab42de2fc 100644 --- a/nipype/pipeline/plugins/semaphore_singleton.py +++ b/nipype/pipeline/plugins/semaphore_singleton.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import threading semaphore = threading.Semaphore(0) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index d6512b9103..172fc6e852 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ import os @@ -25,7 +24,7 @@ def sge_debug_print(message): # print DEBUGGING_PREFIX + " " + "=!" * 3 + " " + message -class QJobInfo(object): +class QJobInfo: """Information about a single job created by OGE/SGE or similar Each job is responsible for knowing it's own refresh state :author Hans J. Johnson @@ -82,7 +81,7 @@ def is_job_state_pending(self): time_diff = time.time() - self._job_info_creation_time if self.is_zombie(): sge_debug_print( - "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{0}".format( + "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{}".format( self ) ) @@ -111,7 +110,7 @@ def set_state(self, new_state): self._job_queue_state = new_state -class QstatSubstitute(object): +class QstatSubstitute: """A wrapper for Qstat to avoid overloading the SGE/OGS server with rapid continuous qstat requests""" @@ -158,7 +157,7 @@ def _qacct_verified_complete(taskid): sge_debug_print( "WARNING: " "CONTACTING qacct for finished jobs, " - "{0}: {1}".format(time.time(), "Verifying Completion") + "{}: {}".format(time.time(), "Verifying Completion") ) this_command = "qacct" @@ -181,7 +180,7 @@ def _qacct_verified_complete(taskid): qacct_result, _ = proc.communicate() if qacct_result.find(str(taskid)): is_complete = True - sge_debug_print("NOTE: qacct for jobs\n{0}".format(qacct_result)) + sge_debug_print(f"NOTE: qacct for jobs\n{qacct_result}") break except: sge_debug_print("NOTE: qacct call failed") @@ -235,9 +234,7 @@ def _parse_qstat_job_list(self, xml_job_list): self._task_dictionary[task_id].update_info( job_queue_state, job_time, job_queue_name, job_slots ) - sge_debug_print( - "Updating job: {0}".format(self._task_dictionary[task_id]) - ) + sge_debug_print(f"Updating job: {self._task_dictionary[task_id]}") current_jobs_parsed.append(task_id) # Changed from job_num as "in" is used to check which does not cast else: @@ -259,7 +256,7 @@ def _parse_qstat_job_list(self, xml_job_list): else: sge_debug_print( "ERROR: Job not in current parselist, " - "and not in done list {0}: {1}".format( + "and not in done list {}: {}".format( dictionary_job, self._task_dictionary[dictionary_job] ) ) @@ -271,7 +268,7 @@ def _parse_qstat_job_list(self, xml_job_list): else: sge_debug_print( "ERROR: Job not in still in initialization mode, " - "and not in done list {0}: {1}".format( + "and not in done list {}: {}".format( dictionary_job, self._task_dictionary[dictionary_job] ) ) @@ -287,7 +284,7 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): """ sge_debug_print( "WARNING: CONTACTING qmaster for jobs, " - "{0}: {1}".format(time.time(), reason_for_qstat) + "{}: {}".format(time.time(), reason_for_qstat) ) if force_instant: this_command = self._qstat_instant_executable @@ -318,7 +315,7 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): self._parse_qstat_job_list(runjobs) break except Exception as inst: - exception_message = "QstatParsingError:\n\t{0}\n\t{1}\n".format( + exception_message = "QstatParsingError:\n\t{}\n\t{}\n".format( type(inst), # the exception instance inst, # __str__ allows args to printed directly ) @@ -339,32 +336,28 @@ def is_job_pending(self, task_id): job_is_pending = self._task_dictionary[task_id].is_job_state_pending() # Double check pending jobs in case of change (since we don't check at the beginning) if job_is_pending: - self._run_qstat( - "checking job pending status {0}".format(task_id), False - ) + self._run_qstat(f"checking job pending status {task_id}", False) job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: - self._run_qstat("checking job pending status {0}".format(task_id), True) + self._run_qstat(f"checking job pending status {task_id}", True) if task_id in self._task_dictionary: # Trust the cache, only False if state='zombie' job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: sge_debug_print( - "ERROR: Job {0} not in task list, " + "ERROR: Job {} not in task list, " "even after forced qstat!".format(task_id) ) job_is_pending = False if not job_is_pending: - sge_debug_print("DONE! Returning for {0} claiming done!".format(task_id)) + sge_debug_print(f"DONE! Returning for {task_id} claiming done!") if task_id in self._task_dictionary: - sge_debug_print( - "NOTE: Adding {0} to OutOfScopeJobs list!".format(task_id) - ) + sge_debug_print(f"NOTE: Adding {task_id} to OutOfScopeJobs list!") self._out_of_scope_jobs.append(int(task_id)) self._task_dictionary.pop(task_id) else: sge_debug_print( - "ERROR: Job {0} not in task list, " + "ERROR: Job {} not in task list, " "but attempted to be removed!".format(task_id) ) return job_is_pending @@ -419,7 +412,7 @@ def __init__(self, **kwargs): cached_qstat = kwargs["plugin_args"]["qstatCachedProgramPath"] self._refQstatSubstitute = QstatSubstitute(instant_qstat, cached_qstat) - super(SGEPlugin, self).__init__(template, **kwargs) + super().__init__(template, **kwargs) def _is_pending(self, taskid): return self._refQstatSubstitute.is_job_pending(int(taskid)) @@ -441,9 +434,9 @@ def _submit_batchtask(self, scriptfile, node): else: qsubargs += " " + node.plugin_args["qsub_args"] if "-o" not in qsubargs: - qsubargs = "%s -o %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -o {path}" if "-e" not in qsubargs: - qsubargs = "%s -e %s" % (qsubargs, path) + qsubargs = f"{qsubargs} -e {path}" if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: @@ -452,7 +445,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems.reverse() jobname = ".".join(jobnameitems) jobname = qsub_sanitize_job_name(jobname) - cmd.inputs.args = "%s -N %s %s" % (qsubargs, jobname, scriptfile) + cmd.inputs.args = f"{qsubargs} -N {jobname} {scriptfile}" oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 48fc1ce132..182619bd98 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SGE """ import os @@ -59,7 +58,7 @@ def __init__(self, **kwargs): self._dont_resubmit_completed_jobs = plugin_args[ "dont_resubmit_completed_jobs" ] - super(SGEGraphPlugin, self).__init__(**kwargs) + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): @@ -68,7 +67,7 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SGE """ - job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) + job_name = f"j{jobnumber}_{nodeslist[jobnumber]._id}" # Condition job_name to be a valid bash identifier (i.e. - is invalid) job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name @@ -96,7 +95,7 @@ def make_job_name(jobnumber, nodeslist): cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, "wt") as fp: + with open(submitjobsfile, "w") as fp: fp.writelines("#!/usr/bin/env bash\n") fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): @@ -110,9 +109,7 @@ def make_job_name(jobnumber, nodeslist): batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) - batchscript = "\n".join( - (template, "%s %s" % (sys.executable, pyscript)) - ) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) batchscriptfile = os.path.join( batch_dir, "batchscript_%s.sh" % name ) @@ -120,7 +117,7 @@ def make_job_name(jobnumber, nodeslist): batchscriptoutfile = batchscriptfile + ".o" batchscripterrfile = batchscriptfile + ".e" - with open(batchscriptfile, "wt") as batchfp: + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" @@ -144,10 +141,10 @@ def make_job_name(jobnumber, nodeslist): # Do not use default output locations if they are set in self._qsub_args stderrFile = "" if self._qsub_args.count("-e ") == 0: - stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) + stderrFile = f"-e {batchscripterrfile}" stdoutFile = "" if self._qsub_args.count("-o ") == 0: - stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) + stdoutFile = f"-o {batchscriptoutfile}" full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '/^Your job/{{print $3}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, diff --git a/nipype/pipeline/plugins/slurm.py b/nipype/pipeline/plugins/slurm.py index c06444a02e..90cae8d9e2 100644 --- a/nipype/pipeline/plugins/slurm.py +++ b/nipype/pipeline/plugins/slurm.py @@ -54,7 +54,7 @@ def __init__(self, **kwargs): if "sbatch_args" in kwargs["plugin_args"]: self._sbatch_args = kwargs["plugin_args"]["sbatch_args"] self._pending = {} - super(SLURMPlugin, self).__init__(self._template, **kwargs) + super().__init__(self._template, **kwargs) def _is_pending(self, taskid): try: @@ -104,9 +104,13 @@ def _submit_batchtask(self, scriptfile, node): else: sbatch_args += " " + node.plugin_args["sbatch_args"] if "-o" not in sbatch_args: - sbatch_args = "%s -o %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) + sbatch_args = "{} -o {}".format( + sbatch_args, os.path.join(path, "slurm-%j.out") + ) if "-e" not in sbatch_args: - sbatch_args = "%s -e %s" % (sbatch_args, os.path.join(path, "slurm-%j.out")) + sbatch_args = "{} -e {}".format( + sbatch_args, os.path.join(path, "slurm-%j.out") + ) if node._hierarchy: jobname = ".".join((dict(os.environ)["LOGNAME"], node._hierarchy, node._id)) else: @@ -114,7 +118,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) - cmd.inputs.args = "%s -J %s %s" % (sbatch_args, jobname, scriptfile) + cmd.inputs.args = f"{sbatch_args} -J {jobname} {scriptfile}" oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) tries = 0 @@ -139,7 +143,7 @@ def _submit_batchtask(self, scriptfile, node): ) else: break - logger.debug("Ran command ({0})".format(cmd.cmdline)) + logger.debug(f"Ran command ({cmd.cmdline})") iflogger.setLevel(oldlevel) # retrieve taskid lines = [line for line in result.runtime.stdout.split("\n") if line] diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index e54c5f4291..901dc02a99 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via SLURM """ import os @@ -59,7 +58,7 @@ def __init__(self, **kwargs): ] else: self._dont_resubmit_completed_jobs = False - super(SLURMGraphPlugin, self).__init__(**kwargs) + super().__init__(**kwargs) def _submit_graph(self, pyfiles, dependencies, nodes): def make_job_name(jobnumber, nodeslist): @@ -68,7 +67,7 @@ def make_job_name(jobnumber, nodeslist): - nodeslist: The name of the node being processed - return: A string representing this job to be displayed by SLURM """ - job_name = "j{0}_{1}".format(jobnumber, nodeslist[jobnumber]._id) + job_name = f"j{jobnumber}_{nodeslist[jobnumber]._id}" # Condition job_name to be a valid bash identifier (i.e. - is invalid) job_name = job_name.replace("-", "_").replace(".", "_").replace(":", "_") return job_name @@ -96,7 +95,7 @@ def make_job_name(jobnumber, nodeslist): cache_doneness_per_node[idx] = node_status_done - with open(submitjobsfile, "wt") as fp: + with open(submitjobsfile, "w") as fp: fp.writelines("#!/usr/bin/env bash\n") fp.writelines("# Condense format attempted\n") for idx, pyscript in enumerate(pyfiles): @@ -110,9 +109,7 @@ def make_job_name(jobnumber, nodeslist): batch_dir, name = os.path.split(pyscript) name = ".".join(name.split(".")[:-1]) - batchscript = "\n".join( - (template, "%s %s" % (sys.executable, pyscript)) - ) + batchscript = "\n".join((template, f"{sys.executable} {pyscript}")) batchscriptfile = os.path.join( batch_dir, "batchscript_%s.sh" % name ) @@ -120,7 +117,7 @@ def make_job_name(jobnumber, nodeslist): batchscriptoutfile = batchscriptfile + ".o" batchscripterrfile = batchscriptfile + ".e" - with open(batchscriptfile, "wt") as batchfp: + with open(batchscriptfile, "w") as batchfp: batchfp.writelines(batchscript) batchfp.close() deps = "" @@ -144,10 +141,10 @@ def make_job_name(jobnumber, nodeslist): # Do not use default output locations if they are set in self._sbatch_args stderrFile = "" if self._sbatch_args.count("-e ") == 0: - stderrFile = "-e {errFile}".format(errFile=batchscripterrfile) + stderrFile = f"-e {batchscripterrfile}" stdoutFile = "" if self._sbatch_args.count("-o ") == 0: - stdoutFile = "-o {outFile}".format(outFile=batchscriptoutfile) + stdoutFile = f"-o {batchscriptoutfile}" full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format( jobNm=jobname, outFileOption=stdoutFile, diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 62076d9f65..fe2a871a9f 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """Parallel workflow execution via PBS/Torque """ @@ -20,7 +19,7 @@ class SomaFlowPlugin(GraphPluginBase): def __init__(self, plugin_args=None): if soma_not_loaded: raise ImportError("SomaFlow could not be imported") - super(SomaFlowPlugin, self).__init__(plugin_args=plugin_args) + super().__init__(plugin_args=plugin_args) def _submit_graph(self, pyfiles, dependencies, nodes): jobs = [] diff --git a/nipype/pipeline/plugins/tests/__init__.py b/nipype/pipeline/plugins/tests/__init__.py index 99fb243f19..349937997e 100644 --- a/nipype/pipeline/plugins/tests/__init__.py +++ b/nipype/pipeline/plugins/tests/__init__.py @@ -1,3 +1,2 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/pipeline/plugins/tests/test_base.py b/nipype/pipeline/plugins/tests/test_base.py index fddcfa2368..43471a7d64 100644 --- a/nipype/pipeline/plugins/tests/test_base.py +++ b/nipype/pipeline/plugins/tests/test_base.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module diff --git a/nipype/pipeline/plugins/tests/test_callback.py b/nipype/pipeline/plugins/tests/test_callback.py index 8baa356fdd..f7606708c7 100644 --- a/nipype/pipeline/plugins/tests/test_callback.py +++ b/nipype/pipeline/plugins/tests/test_callback.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for workflow callbacks @@ -17,7 +16,7 @@ def bad_func(): raise Exception -class Status(object): +class Status: def __init__(self): self.statuses = [] diff --git a/nipype/pipeline/plugins/tests/test_debug.py b/nipype/pipeline/plugins/tests/test_debug.py index 82361a0228..fafb6a276d 100644 --- a/nipype/pipeline/plugins/tests/test_debug.py +++ b/nipype/pipeline/plugins/tests/test_debug.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib diff --git a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py index 4e44c801ec..9d0d490a87 100644 --- a/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py +++ b/nipype/pipeline/plugins/tests/test_legacymultiproc_nondaemon.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Testing module for functions and classes from multiproc.py diff --git a/nipype/pipeline/plugins/tests/test_linear.py b/nipype/pipeline/plugins/tests/test_linear.py index 9ccb5157fc..82ba752a01 100644 --- a/nipype/pipeline/plugins/tests/test_linear.py +++ b/nipype/pipeline/plugins/tests/test_linear.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import nipype.interfaces.base as nib diff --git a/nipype/pipeline/plugins/tests/test_multiproc.py b/nipype/pipeline/plugins/tests/test_multiproc.py index 142d108ebc..938e1aab9e 100644 --- a/nipype/pipeline/plugins/tests/test_multiproc.py +++ b/nipype/pipeline/plugins/tests/test_multiproc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/pipeline/plugins/tests/test_oar.py b/nipype/pipeline/plugins/tests/test_oar.py index 75cfddaa05..91622d4dd7 100644 --- a/nipype/pipeline/plugins/tests/test_oar.py +++ b/nipype/pipeline/plugins/tests/test_oar.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from shutil import which import nipype.interfaces.base as nib diff --git a/nipype/pipeline/plugins/tests/test_pbs.py b/nipype/pipeline/plugins/tests/test_pbs.py index 65662fd867..1151fb5ad2 100644 --- a/nipype/pipeline/plugins/tests/test_pbs.py +++ b/nipype/pipeline/plugins/tests/test_pbs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from shutil import which import nipype.interfaces.base as nib diff --git a/nipype/pipeline/plugins/tests/test_somaflow.py b/nipype/pipeline/plugins/tests/test_somaflow.py index 5fe5935e1d..2eb9a966b6 100644 --- a/nipype/pipeline/plugins/tests/test_somaflow.py +++ b/nipype/pipeline/plugins/tests/test_somaflow.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os from time import sleep diff --git a/nipype/pipeline/plugins/tests/test_tools.py b/nipype/pipeline/plugins/tests/test_tools.py index b1ff7e09ac..454621573e 100644 --- a/nipype/pipeline/plugins/tests/test_tools.py +++ b/nipype/pipeline/plugins/tests/test_tools.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Tests for the engine module diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index 86fdf67ac6..8c28f36246 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Common graph operations for execution @@ -55,8 +54,8 @@ def report_crash(node, traceback=None, hostname=None): try: login_name = getpass.getuser() except KeyError: - login_name = "UID{:d}".format(os.getuid()) - crashfile = "crash-%s-%s-%s-%s" % (timeofcrash, login_name, name, str(uuid.uuid4())) + login_name = f"UID{os.getuid():d}" + crashfile = f"crash-{timeofcrash}-{login_name}-{name}-{str(uuid.uuid4())}" crashdir = node.config["execution"].get("crashdump_dir", os.getcwd()) os.makedirs(crashdir, exist_ok=True) @@ -99,10 +98,10 @@ def create_pyscript(node, updatehash=False, store_exception=True): # pickle node timestamp = strftime("%Y%m%d_%H%M%S") if node._hierarchy: - suffix = "%s_%s_%s" % (timestamp, node._hierarchy, node._id) + suffix = f"{timestamp}_{node._hierarchy}_{node._id}" batch_dir = os.path.join(node.base_dir, node._hierarchy.split(".")[0], "batch") else: - suffix = "%s_%s" % (timestamp, node._id) + suffix = f"{timestamp}_{node._id}" batch_dir = os.path.join(node.base_dir, "batch") if not os.path.exists(batch_dir): os.makedirs(batch_dir) @@ -176,6 +175,6 @@ def create_pyscript(node, updatehash=False, store_exception=True): """ cmdstr = cmdstr % (mpl_backend, pkl_file, batch_dir, node.config, suffix) pyscript = os.path.join(batch_dir, "pyscript_%s.py" % suffix) - with open(pyscript, "wt") as fp: + with open(pyscript, "w") as fp: fp.writelines(cmdstr) return pyscript diff --git a/nipype/pkg_info.py b/nipype/pkg_info.py index e80fde9d76..4370f495f8 100644 --- a/nipype/pkg_info.py +++ b/nipype/pkg_info.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import configparser import os @@ -41,7 +40,7 @@ def pkg_commit_hash(pkg_path): # Try and get commit from written commit text file pth = os.path.join(pkg_path, COMMIT_INFO_FNAME) if not os.path.isfile(pth): - raise IOError("Missing commit info file %s" % pth) + raise OSError("Missing commit info file %s" % pth) cfg_parser = configparser.RawConfigParser() with open(pth, encoding="utf-8") as fp: cfg_parser.read_file(fp) diff --git a/nipype/scripts/cli.py b/nipype/scripts/cli.py index 284b9fbb96..9645db6a18 100644 --- a/nipype/scripts/cli.py +++ b/nipype/scripts/cli.py @@ -145,12 +145,12 @@ def run(ctx, module, interface, list, help): iface_names = list_interfaces(module) click.echo("Available Interfaces:") for if_name in iface_names: - click.echo(" {}".format(if_name)) + click.echo(f" {if_name}") # check the interface elif module_given and interface: # create the argument parser - description = "Run {}".format(interface) + description = f"Run {interface}" prog = " ".join([ctx.command_path, module.__name__, interface] + ctx.args) iface_parser = argparse.ArgumentParser(description=description, prog=prog) diff --git a/nipype/scripts/instance.py b/nipype/scripts/instance.py index 0d736de796..7bd2d7bf1b 100644 --- a/nipype/scripts/instance.py +++ b/nipype/scripts/instance.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Import lib and class meta programming utilities. """ @@ -29,7 +28,7 @@ def import_module(module_path): try: mod = importlib.import_module(module_path) except: - raise ImportError("Error when importing object {}.".format(module_path)) + raise ImportError(f"Error when importing object {module_path}.") else: return mod diff --git a/nipype/scripts/utils.py b/nipype/scripts/utils.py index 28e11cd8f6..00b0f8970e 100644 --- a/nipype/scripts/utils.py +++ b/nipype/scripts/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Utilities for the CLI functions. """ @@ -25,7 +24,7 @@ # validators def check_not_none(ctx, param, value): if value is None: - raise click.BadParameter("got {}.".format(value)) + raise click.BadParameter(f"got {value}.") return value @@ -102,12 +101,10 @@ def add_args_options(arg_parser, interface): if has_multiple_inner_traits: raise NotImplementedError( - ( - "This interface cannot be used. via the" - " command line as multiple inner traits" - " are currently not supported for mandatory" - " argument: {}.".format(name) - ) + "This interface cannot be used. via the" + " command line as multiple inner traits" + " are currently not supported for mandatory" + " argument: {}.".format(name) ) arg_parser.add_argument(name, help=desc, **args) else: diff --git a/nipype/sphinxext/__init__.py b/nipype/sphinxext/__init__.py index 7f877fb023..c30bc66036 100644 --- a/nipype/sphinxext/__init__.py +++ b/nipype/sphinxext/__init__.py @@ -1,4 +1,3 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py index f1f27ef12a..8f56f96942 100644 --- a/nipype/sphinxext/apidoc/docstring.py +++ b/nipype/sphinxext/apidoc/docstring.py @@ -169,9 +169,7 @@ def _parse_spec(inputs, name, spec): desc_lines += ["(Nipype **default** value: ``%s``)" % str(default)] - out_rst = [ - "{name} : {type}".format(name=name, type=spec.full_info(inputs, name, None)) - ] + out_rst = [f"{name} : {spec.full_info(inputs, name, None)}"] out_rst += _indent(desc_lines, 4) return out_rst diff --git a/nipype/sphinxext/documenter.py b/nipype/sphinxext/documenter.py index 5300d81165..500c486927 100644 --- a/nipype/sphinxext/documenter.py +++ b/nipype/sphinxext/documenter.py @@ -46,9 +46,7 @@ def add_directive_header(self, sig: str) -> None: for line in lines.splitlines(): self.add_line(line, sourcename) else: - self.add_line( - ".. %s:%s:: %s%s" % (domain, directive, name, sig), sourcename - ) + self.add_line(f".. {domain}:{directive}:: {name}{sig}", sourcename) if self.options.noindex: self.add_line(" :noindex:", sourcename) if self.objpath: @@ -73,9 +71,9 @@ def add_directive_header(self, sig: str) -> None: if b.__module__ in ("__builtin__", "builtins"): bases_links.append(":class:`%s`" % b.__name__) elif based_interface: - bases_links.append(":ref:`%s.%s`" % (b.__module__, b.__name__)) + bases_links.append(f":ref:`{b.__module__}.{b.__name__}`") else: - bases_links.append(":class:`%s.%s`" % (b.__module__, b.__name__)) + bases_links.append(f":class:`{b.__module__}.{b.__name__}`") self.add_line(" " + _("Bases: %s") % ", ".join(bases_links), sourcename) diff --git a/nipype/sphinxext/gh.py b/nipype/sphinxext/gh.py index 3d1f4a9f43..9339ddb48b 100644 --- a/nipype/sphinxext/gh.py +++ b/nipype/sphinxext/gh.py @@ -23,7 +23,7 @@ def get_url(obj): revision = _get_git_revision() if revision is not None: shortfile = os.path.join("nipype", filename.split("nipype/")[-1]) - uri = "http://github.com/nipy/nipype/blob/%s/%s" % (revision, shortfile) + uri = f"http://github.com/nipy/nipype/blob/{revision}/{shortfile}" lines, lstart = inspect.getsourcelines(obj) lend = len(lines) + lstart return "%s#L%d-L%d" % (uri, lstart, lend) diff --git a/nipype/sphinxext/plot_workflow.py b/nipype/sphinxext/plot_workflow.py index a61a29c5ad..69f2ba3aaf 100644 --- a/nipype/sphinxext/plot_workflow.py +++ b/nipype/sphinxext/plot_workflow.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -273,7 +272,7 @@ def run(self): else: function_name = None - with open(source_file_name, "r", encoding="utf-8") as fd: + with open(source_file_name, encoding="utf-8") as fd: code = fd.read() output_base = os.path.basename(source_file_name) else: @@ -388,7 +387,7 @@ def run(self): images = [] opts = [ - ":%s: %s" % (key, val) + f":{key}: {val}" for key, val in list(self.options.items()) if key in ("alt", "height", "width", "scale", "align", "class") ] @@ -585,14 +584,14 @@ def remove_coding(text): wf_context = dict() -class ImageFile(object): +class ImageFile: def __init__(self, basename, dirname): self.basename = basename self.dirname = dirname self.formats = [] def filename(self, fmt): - return os.path.join(self.dirname, "%s.%s" % (self.basename, fmt)) + return os.path.join(self.dirname, f"{self.basename}.{fmt}") def filenames(self): return [self.filename(fmt) for fmt in self.formats] diff --git a/nipype/testing/__init__.py b/nipype/testing/__init__.py index 4a0ab306f6..e3fbd80e6a 100644 --- a/nipype/testing/__init__.py +++ b/nipype/testing/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """The testing directory contains a small set of imaging files to be @@ -27,6 +26,6 @@ def example_data(infile="functional.nii"): basedir = os.path.dirname(filepath) outfile = os.path.join(basedir, "data", infile) if not os.path.exists(outfile): - raise IOError("%s empty data file does NOT exist" % outfile) + raise OSError("%s empty data file does NOT exist" % outfile) return outfile diff --git a/nipype/testing/fixtures.py b/nipype/testing/fixtures.py index 6f5b12495c..b28741b9d8 100644 --- a/nipype/testing/fixtures.py +++ b/nipype/testing/fixtures.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/testing/tests/test_utils.py b/nipype/testing/tests/test_utils.py index b2c8a296d2..0055038392 100644 --- a/nipype/testing/tests/test_utils.py +++ b/nipype/testing/tests/test_utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Test testing utilities @@ -15,7 +14,7 @@ def test_tempfatfs(): try: fatfs = TempFATFS() - except (IOError, OSError): + except OSError: raise SkipTest("Cannot mount FAT filesystems with FUSE") with fatfs as tmp_dir: assert os.path.exists(tmp_dir) @@ -28,7 +27,7 @@ def test_tempfatfs(): def test_tempfatfs_calledprocesserror(): try: TempFATFS() - except IOError as e: + except OSError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, subprocess.CalledProcessError) else: @@ -40,7 +39,7 @@ def test_tempfatfs_calledprocesserror(): def test_tempfatfs_oserror(): try: TempFATFS() - except IOError as e: + except OSError as e: assert isinstance(e, IOError) assert isinstance(e.__cause__, OSError) else: diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index e666a7586f..71a75a41c7 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Additional handy utilities for testing @@ -18,7 +17,7 @@ import nibabel as nb -class TempFATFS(object): +class TempFATFS: def __init__(self, size_in_mbytes=8, delay=0.5): """Temporary filesystem for testing non-POSIX filesystems on a POSIX system. @@ -55,19 +54,19 @@ def __init__(self, size_in_mbytes=8, delay=0.5): args=mkfs_args, stdout=self.dev_null, stderr=self.dev_null ) except CalledProcessError as e: - raise IOError("mkfs.vfat failed") from e + raise OSError("mkfs.vfat failed") from e try: self.fusefat = subprocess.Popen( args=mount_args, stdout=self.dev_null, stderr=self.dev_null ) except OSError as e: - raise IOError("fusefat is not installed") from e + raise OSError("fusefat is not installed") from e time.sleep(self.delay) if self.fusefat.poll() is not None: - raise IOError("fusefat terminated too soon") + raise OSError("fusefat terminated too soon") open(self.canary, "wb").close() diff --git a/nipype/utils/__init__.py b/nipype/utils/__init__.py index a8ee27f54d..56d7dfb2c7 100644 --- a/nipype/utils/__init__.py +++ b/nipype/utils/__init__.py @@ -1,4 +1,2 @@ -# -*- coding: utf-8 -*- - from .onetime import OneTimeProperty, setattr_on_read from .tmpdirs import TemporaryDirectory, InTemporaryDirectory diff --git a/nipype/utils/config.py b/nipype/utils/config.py index 98e1be31a9..bf29f81de8 100644 --- a/nipype/utils/config.py +++ b/nipype/utils/config.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -83,7 +82,7 @@ def mkdir_p(path): raise -class NipypeConfig(object): +class NipypeConfig: """Base nipype config class""" def __init__(self, *args, **kwargs): @@ -207,7 +206,7 @@ def get_data(self, key): if not os.path.exists(self.data_file): return None with SoftFileLock("%s.lock" % self.data_file): - with open(self.data_file, "rt") as file: + with open(self.data_file) as file: datadict = load(file) if key in datadict: return datadict[key] @@ -218,14 +217,14 @@ def save_data(self, key, value): datadict = {} if os.path.exists(self.data_file): with SoftFileLock("%s.lock" % self.data_file): - with open(self.data_file, "rt") as file: + with open(self.data_file) as file: datadict = load(file) else: dirname = os.path.dirname(self.data_file) if not os.path.exists(dirname): mkdir_p(dirname) with SoftFileLock("%s.lock" % self.data_file): - with open(self.data_file, "wt") as file: + with open(self.data_file, "w") as file: datadict[key] = value dump(datadict, file) diff --git a/nipype/utils/docparse.py b/nipype/utils/docparse.py index 45fbca5df7..992d243956 100644 --- a/nipype/utils/docparse.py +++ b/nipype/utils/docparse.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Utilities to pull in documentation from command-line tools. @@ -40,8 +39,8 @@ def grab_doc(cmd, trap_error=True): stdout, stderr = proc.communicate() if trap_error and proc.returncode: - msg = "Attempting to run %s. Returned Error: %s" % (cmd, stderr) - raise IOError(msg) + msg = f"Attempting to run {cmd}. Returned Error: {stderr}" + raise OSError(msg) if stderr: # A few programs, like fast and fnirt, send their help to diff --git a/nipype/utils/draw_gantt_chart.py b/nipype/utils/draw_gantt_chart.py index a13d596bf7..debce75970 100644 --- a/nipype/utils/draw_gantt_chart.py +++ b/nipype/utils/draw_gantt_chart.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -98,7 +97,7 @@ def log_to_dict(logfile): """ # Init variables - with open(logfile, "r") as content: + with open(logfile) as content: # read file separating each line lines = content.readlines() @@ -527,7 +526,7 @@ def generate_gantt_chart( html_string += ( "

Finish: " + last_node["finish"].strftime("%Y-%m-%d %H:%M:%S") + "

" ) - html_string += "

Duration: " + "{0:.2f}".format(duration / 60) + " minutes

" + html_string += "

Duration: " + f"{duration / 60:.2f}" + " minutes

" html_string += "

Nodes: " + str(len(nodes_list)) + "

" html_string += "

Cores: " + str(cores) + "

" html_string += close_header diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index a8947a3d0d..682813df86 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous file manipulation functions @@ -583,7 +582,7 @@ def load_json(filename): """ - with open(filename, "r") as fp: + with open(filename) as fp: data = json.load(fp) return data @@ -608,15 +607,15 @@ def loadpkl(infile): if infile.exists(): timed_out = False break - fmlogger.debug("'{}' missing; waiting 2s".format(infile)) + fmlogger.debug(f"'{infile}' missing; waiting 2s") sleep(2) if timed_out: error_message = ( - "Result file {0} expected, but " - "does not exist after ({1}) " + "Result file {} expected, but " + "does not exist after ({}) " "seconds.".format(infile, timeout) ) - raise IOError(error_message) + raise OSError(error_message) with pklopen(str(infile), "rb") as pkl_file: pkl_contents = pkl_file.read() @@ -676,10 +675,10 @@ def crash2txt(filename, record): with open(filename, "w") as fp: if "node" in record: node = record["node"] - fp.write("Node: {}\n".format(node.fullname)) - fp.write("Working directory: {}\n".format(node.output_dir())) + fp.write(f"Node: {node.fullname}\n") + fp.write(f"Working directory: {node.output_dir()}\n") fp.write("\n") - fp.write("Node inputs:\n{}\n".format(node.inputs)) + fp.write(f"Node inputs:\n{node.inputs}\n") fp.write("".join(record["traceback"])) @@ -710,7 +709,7 @@ def savepkl(filename, record, versioning=False): if versioning: metadata = json.dumps({"version": version}) f.write(metadata.encode("utf-8")) - f.write("\n".encode("utf-8")) + f.write(b"\n") pickle.dump(record, f) content = f.getvalue() @@ -739,14 +738,14 @@ def write_rst_header(header, level=0): def write_rst_list(items, prefix=""): out = [] for item in ensure_list(items): - out.append("{} {}".format(prefix, str(item))) + out.append(f"{prefix} {str(item)}") return "\n".join(out) + "\n\n" def write_rst_dict(info, prefix=""): out = [] for key, value in sorted(info.items()): - out.append("{}* {} : {}".format(prefix, key, str(value))) + out.append(f"{prefix}* {key} : {str(value)}") return "\n".join(out) + "\n\n" @@ -934,7 +933,7 @@ def relpath(path, start=None): ) else: raise ValueError( - "path is on drive %s, start on drive %s" % (path_list[0], start_list[0]) + f"path is on drive {path_list[0]}, start on drive {start_list[0]}" ) # Work out how much of the filepath is shared by start and path. for i in range(min(len(start_list), len(path_list))): diff --git a/nipype/utils/functions.py b/nipype/utils/functions.py index c53a221d48..1ef35b22b1 100644 --- a/nipype/utils/functions.py +++ b/nipype/utils/functions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ Handles custom functions used in Function interface. Future imports are avoided to keep namespace as clear as possible. @@ -34,7 +33,7 @@ def create_function_from_source(function_source, imports=None): exec(function_source, ns) except Exception as e: - msg = "Error executing function\n{}\n".format(function_source) + msg = f"Error executing function\n{function_source}\n" msg += ( "Functions in connection strings have to be standalone. " "They cannot be declared either interactively or inside " diff --git a/nipype/utils/logger.py b/nipype/utils/logger.py index bfa23628a4..6ce7deafad 100644 --- a/nipype/utils/logger.py +++ b/nipype/utils/logger.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: @@ -16,7 +15,7 @@ from logging.handlers import RotatingFileHandler as RFHandler -class Logging(object): +class Logging: """Nipype logging class""" fmt = "%(asctime)s,%(msecs)d %(name)-2s " "%(levelname)-2s:\n\t %(message)s" diff --git a/nipype/utils/matlabtools.py b/nipype/utils/matlabtools.py index 4919c39c2b..fdde853b6e 100644 --- a/nipype/utils/matlabtools.py +++ b/nipype/utils/matlabtools.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ Useful Functions for working with matlab""" diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index 11aa9ea859..baafbf29d2 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions @@ -159,11 +158,11 @@ def package_check( """ if app: - msg = "%s requires %s" % (app, pkg_name) + msg = f"{app} requires {pkg_name}" else: msg = "Nipype requires %s" % pkg_name if version: - msg += " with version >= %s" % (version,) + msg += f" with version >= {version}" try: mod = __import__(pkg_name) except ImportError as e: @@ -340,7 +339,7 @@ def _uniformize(val): old = _uniformize(dold[k]) if new != old: - diff += [" * %s: %r != %r" % (k, _shorten(new), _shorten(old))] + diff += [f" * {k}: {_shorten(new)!r} != {_shorten(old)!r}"] if len(diff) > diffkeys: diff.insert(diffkeys, "Some dictionary entries had differing values:") diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index 0a12e59f28..da8f716f41 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # This tool exports a Nipype interface in the Boutiques # (https://github.com/boutiques) JSON format. Boutiques tools # can be imported in CBRAIN (https://github.com/aces/cbrain) diff --git a/nipype/utils/nipype_cmd.py b/nipype/utils/nipype_cmd.py index 76717bf168..2adbd179de 100644 --- a/nipype/utils/nipype_cmd.py +++ b/nipype/utils/nipype_cmd.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import os import argparse import inspect @@ -53,7 +52,7 @@ def run_instance(interface, options): try: setattr(interface.inputs, input_name, value) except ValueError as e: - print("Error when setting the value of %s: '%s'" % (input_name, str(e))) + print(f"Error when setting the value of {input_name}: '{str(e)}'") print(interface.inputs) res = interface.run() diff --git a/nipype/utils/onetime.py b/nipype/utils/onetime.py index bb721dc7e8..188c8f9147 100644 --- a/nipype/utils/onetime.py +++ b/nipype/utils/onetime.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Descriptor support for NIPY. @@ -19,7 +18,7 @@ """ -class OneTimeProperty(object): +class OneTimeProperty: """A descriptor to make special properties that become normal attributes.""" def __init__(self, func): diff --git a/nipype/utils/profiler.py b/nipype/utils/profiler.py index d83b745df5..2ca93bf720 100644 --- a/nipype/utils/profiler.py +++ b/nipype/utils/profiler.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ @@ -132,7 +131,7 @@ def _sample(self, cpu_interval=None): except psutil.NoSuchProcess: pass - print("%f,%f,%f,%f" % (time(), cpu, rss / _MB, vms / _MB), file=self._logfile) + print(f"{time():f},{cpu:f},{rss / _MB:f},{vms / _MB:f}", file=self._logfile) self._logfile.flush() def run(self): @@ -203,7 +202,7 @@ def get_system_total_memory_gb(): # Get memory if "linux" in sys.platform: - with open("/proc/meminfo", "r") as f_in: + with open("/proc/meminfo") as f_in: meminfo_lines = f_in.readlines() mem_total_line = [line for line in meminfo_lines if "MemTotal" in line][0] mem_total = float(mem_total_line.split()[1]) diff --git a/nipype/utils/provenance.py b/nipype/utils/provenance.py index 5493f7c330..4ba3582616 100644 --- a/nipype/utils/provenance.py +++ b/nipype/utils/provenance.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- from collections import OrderedDict from copy import deepcopy @@ -121,7 +120,7 @@ def _get_sorteddict(object, dictwithhash=False): else: out = hash elif isinstance(object, float): - out = "%.10f".format(object) + out = f"%.10f" else: out = object return out @@ -144,7 +143,7 @@ def safe_encode(x, as_literal=True): if os.path.exists(x): if x[0] != os.pathsep: x = os.path.abspath(x) - value = "file://{}{}".format(platform.node().lower(), x) + value = f"file://{platform.node().lower()}{x}" if not as_literal: return value try: @@ -181,7 +180,7 @@ def safe_encode(x, as_literal=True): try: jsonstr = json.dumps(outdict) except UnicodeDecodeError as excp: - jsonstr = "Could not encode dictionary. {}".format(excp) + jsonstr = f"Could not encode dictionary. {excp}" logger.warning("Prov: %s", jsonstr) if not as_literal: @@ -211,7 +210,7 @@ def safe_encode(x, as_literal=True): try: jsonstr = json.dumps(x) except UnicodeDecodeError as excp: - jsonstr = "Could not encode list/tuple. {}".format(excp) + jsonstr = f"Could not encode list/tuple. {excp}" logger.warning("Prov: %s", jsonstr) if not as_literal: @@ -237,7 +236,7 @@ def safe_encode(x, as_literal=True): jsonstr = dumps(x) ltype = nipype_ns["pickle"] except TypeError as excp: - jsonstr = "Could not encode object. {}".format(excp) + jsonstr = f"Could not encode object. {excp}" if not as_literal: return jsonstr @@ -310,7 +309,7 @@ def write_provenance(results, filename="provenance", format="all"): return prov -class ProvStore(object): +class ProvStore: def __init__(self): self.g = pm.ProvDocument() self.g.add_namespace(foaf) @@ -327,7 +326,7 @@ def add_results(self, results, keep_provenance=False): inputs = results.inputs outputs = results.outputs classname = interface.__name__ - modulepath = "{0}.{1}".format(interface.__module__, interface.__name__) + modulepath = f"{interface.__module__}.{interface.__name__}" activitytype = "".join([i.capitalize() for i in modulepath.split(".")]) a0_attrs = { @@ -443,7 +442,7 @@ def add_results(self, results, keep_provenance=False): def write_provenance(self, filename="provenance", format="all"): if format in ["provn", "all"]: - with open(filename + ".provn", "wt") as fp: + with open(filename + ".provn", "w") as fp: fp.writelines(self.g.get_provn()) try: if format in ["rdf", "all"]: diff --git a/nipype/utils/spm_docs.py b/nipype/utils/spm_docs.py index 758d1fbb39..3702378d1f 100644 --- a/nipype/utils/spm_docs.py +++ b/nipype/utils/spm_docs.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Grab documentation from spm.""" @@ -28,7 +27,7 @@ def grab_doc(task_name): # We need to tell Matlab where to find our spm_get_doc.m file. cwd = os.path.dirname(__file__) # Build matlab command - mcmd = "addpath('%s');spm_get_doc('%s')" % (cwd, task_name) + mcmd = f"addpath('{cwd}');spm_get_doc('{task_name}')" cmd.inputs.script_lines = mcmd # Run the command and get the documentation out of the result. out = cmd.run() @@ -48,7 +47,7 @@ def _strip_header(doc): try: index = doc.index(hdr) except ValueError as e: - raise IOError("This docstring was not generated by Nipype!\n") from e + raise OSError("This docstring was not generated by Nipype!\n") from e index += len(hdr) index += 1 diff --git a/nipype/utils/subprocess.py b/nipype/utils/subprocess.py index ec00c8b198..16fb438f1f 100644 --- a/nipype/utils/subprocess.py +++ b/nipype/utils/subprocess.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """Miscellaneous utility functions @@ -19,7 +18,7 @@ iflogger = logging.getLogger("nipype.interface") -class Stream(object): +class Stream: """Function to capture stdout and stderr streams with timestamps stackoverflow.com/questions/4984549/merge-and-sync-stdout-and-stderr/5188359 @@ -64,7 +63,7 @@ def _read(self, drain): self._buf = rest now = datetime.datetime.now().isoformat() rows = tmp.split("\n") - self._rows += [(now, "%s %s:%s" % (self._name, now, r), r) for r in rows] + self._rows += [(now, f"{self._name} {now}:{r}", r) for r in rows] for idx in range(self._lastidx, len(self._rows)): iflogger.info(self._rows[idx][1]) self._lastidx = len(self._rows) @@ -126,7 +125,7 @@ def run_command(runtime, output=None, timeout=0.01, write_cmdline=False): def _process(drain=0): try: res = select.select(streams, [], [], timeout) - except select.error as e: + except OSError as e: iflogger.info(e) if e.errno == errno.EINTR: return diff --git a/nipype/utils/tests/__init__.py b/nipype/utils/tests/__init__.py index 939910d6b6..00d7c65d5a 100644 --- a/nipype/utils/tests/__init__.py +++ b/nipype/utils/tests/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: """ diff --git a/nipype/utils/tests/test_config.py b/nipype/utils/tests/test_config.py index 5d9b5d57df..f11908c3dd 100644 --- a/nipype/utils/tests/test_config.py +++ b/nipype/utils/tests/test_config.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/utils/tests/test_docparse.py b/nipype/utils/tests/test_docparse.py index 48812721b7..b6c8bbaaf3 100644 --- a/nipype/utils/tests/test_docparse.py +++ b/nipype/utils/tests/test_docparse.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/nipype/utils/tests/test_filemanip.py b/nipype/utils/tests/test_filemanip.py index 91f0e1f229..146a245841 100644 --- a/nipype/utils/tests/test_filemanip.py +++ b/nipype/utils/tests/test_filemanip.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -238,7 +237,7 @@ def test_copyfallback(_temp_analyze_files): pth, hdrname = os.path.split(orig_hdr) try: fatfs = TempFATFS() - except (IOError, OSError): + except OSError: raise SkipTest("Fuse mount failed. copyfile fallback tests skipped.") with fatfs as fatdir: diff --git a/nipype/utils/tests/test_functions.py b/nipype/utils/tests/test_functions.py index 65d5867915..a55f3f3416 100644 --- a/nipype/utils/tests/test_functions.py +++ b/nipype/utils/tests/test_functions.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import pytest from nipype.utils.functions import getsource, create_function_from_source diff --git a/nipype/utils/tests/test_misc.py b/nipype/utils/tests/test_misc.py index 13ae3740d6..6e71e7c0ca 100644 --- a/nipype/utils/tests/test_misc.py +++ b/nipype/utils/tests/test_misc.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -127,7 +126,7 @@ def test_dict_diff(): diff = dict_diff({"a": complicated_val1}, {"a": complicated_val2}) assert "Some dictionary entries had differing values:" in diff - assert "a: {!r} != {!r}".format(uniformized_val2, uniformized_val1) in diff + assert f"a: {uniformized_val2!r} != {uniformized_val1!r}" in diff # Trigger shortening diff = dict_diff({"a": "b" * 60}, {"a": "c" * 70}) diff --git a/nipype/utils/tests/test_nipype2boutiques.py b/nipype/utils/tests/test_nipype2boutiques.py index 758f621202..2ea0abd687 100644 --- a/nipype/utils/tests/test_nipype2boutiques.py +++ b/nipype/utils/tests/test_nipype2boutiques.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from ..nipype2boutiques import generate_boutiques_descriptor @@ -20,7 +19,7 @@ def test_generate(): author=("Oxford Centre for Functional" " MRI of the Brain (FMRIB)"), ) - with open(example_data("nipype2boutiques_example.json"), "r") as desc_file: + with open(example_data("nipype2boutiques_example.json")) as desc_file: # Make sure that output descriptor matches the expected descriptor. output_desc = json.loads(desc) expected_desc = json.load(desc_file) diff --git a/nipype/utils/tests/test_provenance.py b/nipype/utils/tests/test_provenance.py index 8137c083f7..ffdabd6f83 100644 --- a/nipype/utils/tests/test_provenance.py +++ b/nipype/utils/tests/test_provenance.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os diff --git a/nipype/utils/tmpdirs.py b/nipype/utils/tmpdirs.py index 70709ae209..a399650c07 100644 --- a/nipype/utils/tmpdirs.py +++ b/nipype/utils/tmpdirs.py @@ -1,6 +1,3 @@ -# -*- coding: utf-8 -*- - - # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os @@ -8,7 +5,7 @@ from tempfile import template, mkdtemp -class TemporaryDirectory(object): +class TemporaryDirectory: """Create and return a temporary directory. This has the same behavior as mkdtemp but can be used as a context manager. For example: @@ -41,8 +38,8 @@ class InTemporaryDirectory(TemporaryDirectory): def __enter__(self): self._pwd = os.getcwd() os.chdir(self.name) - return super(InTemporaryDirectory, self).__enter__() + return super().__enter__() def __exit__(self, exc, value, tb): os.chdir(self._pwd) - return super(InTemporaryDirectory, self).__exit__(exc, value, tb) + return super().__exit__(exc, value, tb) diff --git a/nipype/workflows/__init__.py b/nipype/workflows/__init__.py index 760ee9229a..5a3f04b56e 100644 --- a/nipype/workflows/__init__.py +++ b/nipype/workflows/__init__.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: diff --git a/setup.py b/setup.py index 046124fde8..f5d7787577 100755 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ def run(self): cfg_parser.read(pjoin("nipype", "COMMIT_INFO.txt")) cfg_parser.set("commit hash", "install_hash", repo_commit.strip()) out_pth = pjoin(self.build_lib, "nipype", "COMMIT_INFO.txt") - cfg_parser.write(open(out_pth, "wt")) + cfg_parser.write(open(out_pth, "w")) def main(): diff --git a/tools/checkspecs.py b/tools/checkspecs.py index 7aaac0d107..5cceb49252 100644 --- a/tools/checkspecs.py +++ b/tools/checkspecs.py @@ -12,7 +12,7 @@ # Functions and classes -class InterfaceChecker(object): +class InterfaceChecker: """Class for checking all interface specifications""" def __init__( @@ -124,7 +124,7 @@ def _parse_module(self, uri): if filename is None: # nothing that we could handle here. return ([], []) - f = open(filename, "rt") + f = open(filename) functions, classes = self._parse_lines(f, uri) f.close() return functions, classes @@ -158,7 +158,7 @@ def _normalize_repr(cls, value): return "[{}]".format(", ".join(map(cls._normalize_repr, value))) if isinstance(value, tuple): if len(value) == 1: - return "({},)".format(cls._normalize_repr(value[0])) + return f"({cls._normalize_repr(value[0])},)" return "({})".format(", ".join(map(cls._normalize_repr, value))) if isinstance(value, (str, bytes)): value = repr(value) @@ -247,7 +247,7 @@ def test_specs(self, uri): if not os.path.exists(nonautotest): cmd = [ "# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT", - "from ..%s import %s" % (uri.split(".")[-1], c), + "from ..{} import {}".format(uri.split(".")[-1], c), "", ] cmd.append("\ndef test_%s_inputs():" % c) @@ -259,7 +259,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue - input_fields += "%s=%s,\n " % ( + input_fields += "{}={},\n ".format( key, self._normalize_repr(value), ) @@ -274,7 +274,7 @@ def test_specs(self, uri): ] fmt_cmd = black.format_str("\n".join(cmd), mode=black.FileMode()) - with open(testfile, "wt") as fp: + with open(testfile, "w") as fp: fp.writelines(fmt_cmd) else: print("%s has nonautotest" % c) @@ -350,7 +350,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue - input_fields += "%s=%s,\n " % ( + input_fields += "{}={},\n ".format( key, self._normalize_repr(value), ) @@ -364,7 +364,7 @@ def test_specs(self, uri): assert getattr(outputs.traits()[key], metakey) == value""" ] fmt_cmd = black.format_str("\n".join(cmd), mode=black.FileMode()) - with open(testfile, "at") as fp: + with open(testfile, "a") as fp: fp.writelines("\n\n" + fmt_cmd) for traitname, trait in sorted( @@ -484,26 +484,26 @@ def check_modules(self): ic = InterfaceChecker(package) # Packages that should not be included in generated API docs. ic.package_skip_patterns += [ - "\.external$", - "\.fixes$", - "\.utils$", - "\.pipeline", - "\.testing", - "\.caching", - "\.workflows", + r"\.external$", + r"\.fixes$", + r"\.utils$", + r"\.pipeline", + r"\.testing", + r"\.caching", + r"\.workflows", ] """ # Modules that should not be included in generated API docs. - ic.module_skip_patterns += ['\.version$', - '\.interfaces\.base$', - '\.interfaces\.matlab$', - '\.interfaces\.rest$', - '\.interfaces\.pymvpa$', - '\.interfaces\.slicer\.generate_classes$', - '\.interfaces\.spm\.base$', - '\.interfaces\.traits', - '\.pipeline\.alloy$', - '\.pipeline\.s3_node_wrapper$', + ic.module_skip_patterns += ['\\.version$', + '\\.interfaces\\.base$', + '\\.interfaces\\.matlab$', + '\\.interfaces\\.rest$', + '\\.interfaces\\.pymvpa$', + '\\.interfaces\\.slicer\\.generate_classes$', + '\\.interfaces\\.spm\\.base$', + '\\.interfaces\\.traits', + '\\.pipeline\\.alloy$', + '\\.pipeline\\.s3_node_wrapper$', '.\testing', ] ic.class_skip_patterns += ['AFNI', diff --git a/tools/gitwash_dumper.py b/tools/gitwash_dumper.py index 2c08547bac..f906d48f5f 100755 --- a/tools/gitwash_dumper.py +++ b/tools/gitwash_dumper.py @@ -18,7 +18,7 @@ def clone_repo(url, branch): cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: - cmd = "git clone %s %s" % (url, tmpdir) + cmd = f"git clone {url} {tmpdir}" call(cmd, shell=True) os.chdir(tmpdir) cmd = "git checkout %s" % branch @@ -51,16 +51,16 @@ def cp_files(in_path, globs, out_path): def filename_search_replace(sr_pairs, filename, backup=False): """Search and replace for expressions in files""" - in_txt = open(filename, "rt").read(-1) + in_txt = open(filename).read(-1) out_txt = in_txt[:] for in_exp, out_exp in sr_pairs: in_exp = re.compile(in_exp) out_txt = in_exp.sub(out_exp, out_txt) if in_txt == out_txt: return False - open(filename, "wt").write(out_txt) + open(filename, "w").write(out_txt) if backup: - open(filename + ".bak", "wt").write(in_txt) + open(filename + ".bak", "w").write(in_txt) return True @@ -110,7 +110,7 @@ def make_link_targets( .. _`proj_name`: url .. _`proj_name` mailing list: url """ - link_contents = open(known_link_fname, "rt").readlines() + link_contents = open(known_link_fname).readlines() have_url = url is not None have_ml_url = ml_url is not None have_gh_url = None @@ -133,18 +133,18 @@ def make_link_targets( ) lines = [] if url is not None: - lines.append(".. _%s: %s\n" % (proj_name, url)) + lines.append(f".. _{proj_name}: {url}\n") if not have_gh_url: - gh_url = "http://github.com/%s/%s\n" % (user_name, repo_name) - lines.append(".. _`%s github`: %s\n" % (proj_name, gh_url)) + gh_url = f"http://github.com/{user_name}/{repo_name}\n" + lines.append(f".. _`{proj_name} github`: {gh_url}\n") if ml_url is not None: - lines.append(".. _`%s mailing list`: %s\n" % (proj_name, ml_url)) + lines.append(f".. _`{proj_name} mailing list`: {ml_url}\n") if len(lines) == 0: # Nothing to do return # A neat little header line lines = [".. %s\n" % proj_name] + lines - out_links = open(out_link_fname, "wt") + out_links = open(out_link_fname, "w") out_links.writelines(lines) out_links.close() @@ -232,7 +232,7 @@ def main(): out_path, cp_globs=(pjoin("gitwash", "*"),), rep_globs=("*.rst",), - renames=(("\.rst$", options.source_suffix),), + renames=((r"\.rst$", options.source_suffix),), ) make_link_targets( project_name, diff --git a/tools/run_examples.py b/tools/run_examples.py index bd77f1a0a4..b52ba9613e 100644 --- a/tools/run_examples.py +++ b/tools/run_examples.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import sys from textwrap import dedent