From fcbba4f5d9a7259eb7999f3ade5b26230cf67c43 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 6 Sep 2024 12:16:42 +0200 Subject: [PATCH 1/5] bump action version and python to 3.10, drop build py3.8 and add 3.12 --- .github/workflows/build.yml | 7 +++---- .github/workflows/code-style.yml | 8 ++++---- .github/workflows/doc.yml | 8 ++++---- .github/workflows/publish.yml | 8 ++++---- 4 files changed, 15 insertions(+), 16 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 2d25c7e..c8a15fe 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -15,7 +15,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: [3.8, 3.9, "3.10", "3.11"] + python-version: ["3.9", "3.10", "3.11", "3.12"] name: ${{ matrix.os }} - py${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest defaults: @@ -23,12 +23,11 @@ jobs: shell: bash steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - #architecture: 'x64' - name: Install dependencies run: | python -m pip install --progress-bar off --upgrade pip setuptools wheel diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml index 2bf1b11..662d0b6 100644 --- a/.github/workflows/code-style.yml +++ b/.github/workflows/code-style.yml @@ -14,11 +14,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 - - name: Setup Python 3.9 - uses: actions/setup-python@v4 + uses: actions/checkout@v4 + - name: Setup Python 3.10 + uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: | diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 2547d0d..fefda7e 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -20,10 +20,10 @@ jobs: uses: actions/checkout@v4 with: path: ./main - - name: Setup Python 3.9 - uses: actions/setup-python@v4 + - name: Setup Python 3.10 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.10' architecture: 'x64' - name: Install package run: | @@ -56,7 +56,7 @@ jobs: name: doc-dev path: ./doc-dev - name: Deploy dev documentation - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./doc-dev diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 1f81b4a..a301568 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -10,11 +10,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 - - name: Setup Python 3.9 - uses: actions/setup-python@v4 + uses: actions/checkout@v4 + - name: Setup Python 3.10 + uses: actions/setup-python@v5 with: - python-version: '3.9' + python-version: '3.10' architecture: 'x64' - name: Install dependencies run: | From 917d982b4769b39360ea4d6b54aeaccad9907586 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 6 Sep 2024 12:24:15 +0200 Subject: [PATCH 2/5] add more rules to ruff and drop isort and black --- .github/workflows/code-style.yml | 7 ----- pyproject.toml | 45 ++++++++++++-------------------- 2 files changed, 16 insertions(+), 36 deletions(-) diff --git a/.github/workflows/code-style.yml b/.github/workflows/code-style.yml index 662d0b6..5b2e35c 100644 --- a/.github/workflows/code-style.yml +++ b/.github/workflows/code-style.yml @@ -26,13 +26,6 @@ jobs: python -m pip install --progress-bar off .[style] - name: Run Ruff run: ruff check . - - name: Run isort - uses: isort/isort-action@master - - name: Run black - uses: psf/black@stable - with: - options: "--check --verbose" - version: "23.10.1" - name: Run codespell uses: codespell-project/actions-codespell@master with: diff --git a/pyproject.toml b/pyproject.toml index 493e34b..f02b54b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ build-backend = 'setuptools.build_meta' name = 'fsqc' description = 'Quality control scripts for FastSurfer and FreeSurfer structural MRI data' license = {file = 'LICENSE'} -requires-python = '>=3.8' +requires-python = '>=3.9' authors = [ {name = 'Kersten Diers', email = 'kersten.diers@dzne.de'}, {name = 'Martin Reuter', email = 'martin.reuter@dzne.de'} @@ -25,10 +25,10 @@ classifiers = [ 'Operating System :: Unix', 'Operating System :: MacOS', 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Intended Audience :: Science/Research', @@ -58,9 +58,7 @@ doc = [ ] style = [ 'bibclean', - 'black', 'codespell', - 'isort', 'pydocstyle[toml]', 'ruff', ] @@ -100,29 +98,6 @@ dependencies = {file = 'requirements.txt'} include = ['fsqc', 'fsqc.cli', 'fsqc.commands', 'fsqc.utils'] exclude = ['docker', 'singularity'] -[tool.black] -line-length = 88 -target-version = ['py38'] -include = '\.pyi?$' -extend-exclude = ''' -( - __pycache__ - | \.github - | setup.py -) -''' - -[tool.isort] -profile = 'black' -multi_line_output = 3 -line_length = 88 -py_version = 38 -extend_skip_glob = [ - 'setup.py', - 'data/*', - 'examples/*', -] - [tool.pydocstyle] convention = 'numpy' ignore-decorators = '(copy_doc|property|.*setter|.*getter|pyqtSlot|Slot)' @@ -133,15 +108,27 @@ add_ignore = 'D100,D104,D107' [tool.ruff] line-length = 88 extend-exclude = [ + ".github", "doc", + "docker", "setup.py", + "singularity", +] + +[tool.ruff.lint] +# https://docs.astral.sh/ruff/linter/#rule-selection +select = [ + "E", # pycodestyle + "F", # Pyflakes + "UP", # pyupgrade + "B", # flake8-bugbear + "I", # isort + # "SIM", # flake8-simplify ] -ignore = ["E501"] # line too long (black will do that) [tool.ruff.per-file-ignores] "__init__.py" = ["F401"] - [tool.pytest.ini_options] minversion = '6.0' filterwarnings = [] From 5fc3c0da661f6910aa1792be122cd2e9ff479a6c Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 6 Sep 2024 13:03:07 +0200 Subject: [PATCH 3/5] fix ruff B errors --- fsqc/checkCCSize.py | 2 +- fsqc/checkContrast.py | 16 +++-- fsqc/checkRotation.py | 14 ++-- fsqc/checkSNR.py | 13 ++-- fsqc/checkTopology.py | 5 +- fsqc/createScreenshots.py | 49 ++++++++------ fsqc/evaluateFornixSegmentation.py | 12 ++-- fsqc/evaluateHippocampalSegmentation.py | 11 ++-- fsqc/evaluateHypothalamicSegmentation.py | 11 ++-- fsqc/fsqcMain.py | 83 +++++++++++++++--------- fsqc/fsqcUtils.py | 6 +- fsqc/utils/_config.py | 6 +- fsqc/utils/_imports.py | 4 +- pyproject.toml | 2 +- 14 files changed, 141 insertions(+), 93 deletions(-) diff --git a/fsqc/checkCCSize.py b/fsqc/checkCCSize.py index 2ed9df3..cfbc94d 100644 --- a/fsqc/checkCCSize.py +++ b/fsqc/checkCCSize.py @@ -70,7 +70,7 @@ def checkCCSize(subjects_dir, subject): relative_cc = sum_cc / intracranial_volume logging.info( - "Relative size of the corpus callosum is " + "{:.4}".format(relative_cc) + "Relative size of the corpus callosum is " + f"{relative_cc:.4}" ) # Return diff --git a/fsqc/checkContrast.py b/fsqc/checkContrast.py index 30f917d..f3816dd 100644 --- a/fsqc/checkContrast.py +++ b/fsqc/checkContrast.py @@ -49,12 +49,14 @@ def checkContrast(subjects_dir, subject): # Check if files exist path_pct_lh = os.path.join(subjects_dir, subject, "surf", "lh.w-g.pct.mgh") if not os.path.exists(path_pct_lh): - warnings.warn("WARNING: could not find " + path_pct_lh + ", returning NaNs") + warnings.warn("WARNING: could not find " + path_pct_lh + ", returning NaNs", + stacklevel = 2) return numpy.nan path_pct_rh = os.path.join(subjects_dir, subject, "surf", "rh.w-g.pct.mgh") if not os.path.exists(path_pct_rh): - warnings.warn("WARNING: could not find " + path_pct_rh + ", returning NaNs") + warnings.warn("WARNING: could not find " + path_pct_rh + ", returning NaNs", + stacklevel = 2) return numpy.nan path_label_cortex_lh = os.path.join( @@ -62,7 +64,8 @@ def checkContrast(subjects_dir, subject): ) if not os.path.exists(path_label_cortex_lh): warnings.warn( - "WARNING: could not find " + path_label_cortex_lh + ", returning NaNs" + "WARNING: could not find " + path_label_cortex_lh + ", returning NaNs", + stacklevel = 2 ) return numpy.nan @@ -71,7 +74,8 @@ def checkContrast(subjects_dir, subject): ) if not os.path.exists(path_label_cortex_rh): warnings.warn( - "WARNING: could not find " + path_label_cortex_rh + ", returning NaNs" + "WARNING: could not find " + path_label_cortex_rh + ", returning NaNs", + stacklevel = 2 ) return numpy.nan @@ -91,14 +95,14 @@ def checkContrast(subjects_dir, subject): con_lh_std = numpy.std(con_lh) con_lh_snr = con_lh_mean / con_lh_std logging.info( - "WM/GM contrast SNR for the left hemisphere: " + "{:.4}".format(con_lh_snr) + "WM/GM contrast SNR for the left hemisphere: " + f"{con_lh_snr:.4}" ) con_rh_mean = numpy.mean(con_rh) con_rh_std = numpy.std(con_rh) con_rh_snr = con_rh_mean / con_rh_std logging.info( - "WM/GM contrast SNR for the right hemisphere: " + "{:.4}".format(con_rh_snr) + "WM/GM contrast SNR for the right hemisphere: " + f"{con_rh_snr:.4}" ) # Return diff --git a/fsqc/checkRotation.py b/fsqc/checkRotation.py index a1f807f..9eb7f48 100644 --- a/fsqc/checkRotation.py +++ b/fsqc/checkRotation.py @@ -50,7 +50,8 @@ def checkRotation(subjects_dir, subject): if importlib.util.find_spec("transforms3d") is None: warnings.warn( - "WARNING: 'transforms3d' package required for running this script, returning NaNs." + "WARNING: 'transforms3d' package required for running this script, returning NaNs.", + stacklevel = 2 ) return np.nan, np.nan, np.nan else: @@ -64,12 +65,13 @@ def checkRotation(subjects_dir, subject): warnings.warn( "WARNING: could not open " + os.path.join(subjects_dir, subject, "mri", "transforms", "talairach.lta") - + ", returning NaNs." + + ", returning NaNs.", + stacklevel = 2 ) return np.nan, np.nan, np.nan with open( - os.path.join(subjects_dir, subject, "mri", "transforms", "talairach.lta"), "r" + os.path.join(subjects_dir, subject, "mri", "transforms", "talairach.lta") ) as datafile: lines = datafile.readlines() @@ -102,11 +104,11 @@ def checkRotation(subjects_dir, subject): logging.info( "Found Talairach rotation angles: x = " - + "{:.3}".format(rot_x) + + f"{rot_x:.3}" + ", y = " - + "{:.3}".format(rot_y) + + f"{rot_y:.3}" + ", z = " - + "{:.3}".format(rot_z) + + f"{rot_z:.3}" + " radians.", ) diff --git a/fsqc/checkSNR.py b/fsqc/checkSNR.py index 703d1cd..134f25f 100644 --- a/fsqc/checkSNR.py +++ b/fsqc/checkSNR.py @@ -77,7 +77,8 @@ def checkSNR( norm_data = norm.get_fdata() else: warnings.warn( - "WARNING: could not open " + path_reference_image + ", returning NaNs." + "WARNING: could not open " + path_reference_image + ", returning NaNs.", + stacklevel = 2 ) return np.nan, np.nan @@ -86,7 +87,8 @@ def checkSNR( aseg = nib.load(path_aseg) data_aseg = aseg.get_fdata() else: - warnings.warn("WARNING: could not open " + path_aseg + ", returning NaNs.") + warnings.warn("WARNING: could not open " + path_aseg + ", returning NaNs.", + stacklevel = 2) return np.nan, np.nan path_aparc_aseg = os.path.join(subjects_dir, subject, "mri", aparc_image) @@ -95,7 +97,8 @@ def checkSNR( data_aparc_aseg = inseg.get_fdata() else: warnings.warn( - "WARNING: could not open " + path_aparc_aseg + ", returning NaNs." + "WARNING: could not open " + path_aparc_aseg + ", returning NaNs.", + stacklevel = 2 ) return np.nan, np.nan @@ -122,7 +125,7 @@ def checkSNR( signal_wm_mean = np.mean(signal_wm) signal_wm_std = np.std(signal_wm) wm_snr = signal_wm_mean / signal_wm_std - logging.info("White matter signal to noise ratio: " + "{:.4}".format(wm_snr)) + logging.info("White matter signal to noise ratio: " + f"{wm_snr:.4}") # Process gray matter image @@ -143,7 +146,7 @@ def checkSNR( signal_gm_mean = np.mean(signal_gm) signal_gm_std = np.std(signal_gm) gm_snr = signal_gm_mean / signal_gm_std - logging.info("Gray matter signal to noise ratio: " + "{:.4}".format(gm_snr)) + logging.info("Gray matter signal to noise ratio: " + f"{gm_snr:.4}") # Return return wm_snr, gm_snr diff --git a/fsqc/checkTopology.py b/fsqc/checkTopology.py index ebf715a..b74d8dd 100644 --- a/fsqc/checkTopology.py +++ b/fsqc/checkTopology.py @@ -61,10 +61,11 @@ def checkTopology(subjects_dir, subject): path_log_file = os.path.join(subjects_dir, subject, "scripts", "recon-all.log") if os.path.exists(path_log_file): - with open(path_log_file, "r") as logfile: + with open(path_log_file) as logfile: lines_log_file = logfile.read().splitlines() else: - warnings.warn("WARNING: could not find " + path_log_file + ", returning NaNs.") + warnings.warn("WARNING: could not find " + path_log_file + ", returning NaNs.", + stacklevel = 2) return np.nan, np.nan, np.nan, np.nan, np.nan, np.nan # Initialize diff --git a/fsqc/createScreenshots.py b/fsqc/createScreenshots.py index a338bcc..63b253b 100644 --- a/fsqc/createScreenshots.py +++ b/fsqc/createScreenshots.py @@ -12,16 +12,16 @@ def createScreenshots( OUTFILE, INTERACTIVE=True, LAYOUT=None, - BASE=["default"], - OVERLAY=["default"], + BASE="default", + OVERLAY="default", LABELS=None, - SURF=["default"], - SURFCOLOR=["default"], - VIEWS=["default"], + SURF="default", + SURFCOLOR="default", + VIEWS="default", XLIM=None, YLIM=None, BINARIZE=False, - ORIENTATION=["radiological"], + ORIENTATION=None, ): """ Function to create screenshots. @@ -39,17 +39,19 @@ def createScreenshots( LAYOUT : str, optional The layout, default is None. BASE : list, optional - The base, default is ["default"]. + The base, default is "default". + Load norm.mgz as default. OVERLAY : list, optional - The overlay, default is ["default"]. + The overlay, default is "default". + Load aseg.mgz as default. LABELS : None or str, optional The labels, default is None. SURF : list, optional - The surface, default is ["default"]. + The surface, default is "default". SURFCOLOR : list, optional - The surface color, default is ["default"]. + The surface color, default is "default". VIEWS : list, optional - The views, default is ["default"]. + The views, default is "default". XLIM : None or list, optional The x limits, default is None. YLIM : None or list, optional @@ -57,13 +59,14 @@ def createScreenshots( BINARIZE : bool, optional Flag for binarization, default is False. ORIENTATION : list, optional - The orientation, default is ["radiological"]. + The orientation, default is None. + Will use ["radiological"] per default. Notes ----- - BASE, VIEWS must be lists, can be ["default"]. + BASE, VIEWS must be lists, can be "default". - OVERLAY, SURF, SURFCOLOR can be lists or None, can be ["default"]. + OVERLAY, SURF, SURFCOLOR can be lists, None, or "default". XLIM, YLIM can be lists of list two-element numeric lists or None; if given, length must match length of VIEWS. x and y refer to final image dimensions, @@ -92,6 +95,9 @@ def computeLayout(n): import nibabel as nb import numpy as np + if ORIENTATION is None: + ORIENTATION = ["radiological"] + if not INTERACTIVE: matplotlib.use("Agg") @@ -115,14 +121,14 @@ def computeLayout(n): # ----------------------------------------------------------------------------- # import image data - if BASE == ["default"]: + if BASE == "default": norm = nb.load(os.path.join(SUBJECTS_DIR, SUBJECT, "mri", "norm.mgz")) else: norm = nb.load(BASE[0]) if OVERLAY is None: aseg = None - elif OVERLAY == ["default"]: + elif OVERLAY == "default": aseg = nb.load(os.path.join(SUBJECTS_DIR, SUBJECT, "mri", "aseg.mgz")) else: aseg = nb.load(OVERLAY[0]) @@ -130,7 +136,7 @@ def computeLayout(n): # ----------------------------------------------------------------------------- # import surface data - if SURF == ["default"]: + if SURF == "default": surflist = [ os.path.join(SUBJECTS_DIR, SUBJECT, "surf", "lh.white"), os.path.join(SUBJECTS_DIR, SUBJECT, "surf", "rh.white"), @@ -146,9 +152,9 @@ def computeLayout(n): for i in range(len(surflist)): surf.append(nb.freesurfer.io.read_geometry(surflist[i], read_metadata=True)) - if SURFCOLOR == ["default"] and SURF == ["default"]: + if SURFCOLOR == "default" and SURF == "default": surfcolor = ["yellow", "yellow", "red", "red"] - elif SURFCOLOR == ["default"] and SURF != ["default"]: + elif SURFCOLOR == "default" and SURF != "default": surfcolor = ["yellow"] * len(surf) else: surfcolor = SURFCOLOR @@ -211,7 +217,7 @@ def computeLayout(n): # ----------------------------------------------------------------------------- # determine VIEWS - if VIEWS == ["default"]: + if VIEWS == "default": CutsRRAS = [("x", -10), ("x", 10), ("y", 0), ("z", 0)] else: CutsRRAS = VIEWS @@ -678,7 +684,8 @@ def computeLayout(n): sortIdx = np.delete(sortIdx, findIdx[0, 0]) elif findIdx.shape[0] > 1: warnings.warn( - "WARNING: a problem occurred with the surface overlays" + "WARNING: a problem occurred with the surface overlays", + stacklevel = 2 ) # now final plot axs[axsx, axsy].plot( diff --git a/fsqc/evaluateFornixSegmentation.py b/fsqc/evaluateFornixSegmentation.py index ce4ca7a..28c0494 100644 --- a/fsqc/evaluateFornixSegmentation.py +++ b/fsqc/evaluateFornixSegmentation.py @@ -10,7 +10,7 @@ def evaluateFornixSegmentation( SUBJECTS_DIR, OUTPUT_DIR, CREATE_SCREENSHOT=True, - SCREENSHOTS_OUTFILE=[], + SCREENSHOTS_OUTFILE=None, RUN_SHAPEDNA=True, N_EIGEN=15, WRITE_EIGEN=True, @@ -41,7 +41,7 @@ def evaluateFornixSegmentation( The output directory. CREATE_SCREENSHOT : bool, optional (default: True) Whether to create screenshots. - SCREENSHOTS_OUTFILE : str or list, optional (default: []) + SCREENSHOTS_OUTFILE : str or list, optional (default: None) File or list of files for screenshots. RUN_SHAPEDNA : bool, optional (default: True) Whether to run shape analysis. @@ -80,7 +80,7 @@ def evaluateFornixSegmentation( warnings.warn( "WARNING: could not find " + os.path.join(SUBJECTS_DIR, SUBJECT, "mri", "transforms", "cc_up.lta") - + ", returning NaNs" + + ", returning NaNs", stacklevel = 2 ) out = np.empty(N_EIGEN) @@ -92,7 +92,7 @@ def evaluateFornixSegmentation( warnings.warn( "WARNING: could not find " + os.path.join(SUBJECTS_DIR, SUBJECT, "mri", "aseg.mgz") - + ", returning NaNs" + + ", returning NaNs", stacklevel = 2 ) out = np.empty(N_EIGEN) @@ -104,7 +104,7 @@ def evaluateFornixSegmentation( warnings.warn( "WARNING: could not find " + os.path.join(SUBJECTS_DIR, SUBJECT, "mri", "norm.mgz") - + ", returning NaNs" + + ", returning NaNs", stacklevel = 2 ) out = np.empty(N_EIGEN) @@ -112,7 +112,7 @@ def evaluateFornixSegmentation( return out - if not SCREENSHOTS_OUTFILE: + if SCREENSHOTS_OUTFILE is None: SCREENSHOTS_OUTFILE = os.path.join(OUTPUT_DIR, "cc.png") # -------------------------------------------------------------------------- diff --git a/fsqc/evaluateHippocampalSegmentation.py b/fsqc/evaluateHippocampalSegmentation.py index c1dc10b..914842b 100644 --- a/fsqc/evaluateHippocampalSegmentation.py +++ b/fsqc/evaluateHippocampalSegmentation.py @@ -10,8 +10,8 @@ def evaluateHippocampalSegmentation( SUBJECTS_DIR, OUTPUT_DIR, CREATE_SCREENSHOT=True, - SCREENSHOTS_OUTFILE=[], - SCREENSHOTS_ORIENTATION=["radiological"], + SCREENSHOTS_OUTFILE=None, + SCREENSHOTS_ORIENTATION=None, HEMI="lh", LABEL="T1.v21", ): @@ -36,7 +36,7 @@ def evaluateHippocampalSegmentation( The output directory. CREATE_SCREENSHOT : bool, optional, default: True Whether to create screenshots. - SCREENSHOTS_OUTFILE : str or list, optional, default: [] + SCREENSHOTS_OUTFILE : str or list, optional, default: None File or list of files for screenshots. SCREENSHOTS_ORIENTATION : str or list, optional, default: ["radiological"] Orientation or list of orientations for screenshots. @@ -68,6 +68,9 @@ def evaluateHippocampalSegmentation( from fsqc.createScreenshots import createScreenshots from fsqc.fsqcUtils import binarizeImage + if SCREENSHOTS_ORIENTATION is None: + SCREENSHOTS_ORIENTATION = ["radiological"] + # -------------------------------------------------------------------------- # check files @@ -101,7 +104,7 @@ def evaluateHippocampalSegmentation( raise ValueError("File not found") - if not SCREENSHOTS_OUTFILE: + if SCREENSHOTS_OUTFILE is None: SCREENSHOTS_OUTFILE = os.path.join(OUTPUT_DIR, "hippocampus.png") # -------------------------------------------------------------------------- diff --git a/fsqc/evaluateHypothalamicSegmentation.py b/fsqc/evaluateHypothalamicSegmentation.py index a50b01e..533b6c5 100644 --- a/fsqc/evaluateHypothalamicSegmentation.py +++ b/fsqc/evaluateHypothalamicSegmentation.py @@ -11,8 +11,8 @@ def evaluateHypothalamicSegmentation( SUBJECTS_DIR, OUTPUT_DIR, CREATE_SCREENSHOT=True, - SCREENSHOTS_OUTFILE=[], - SCREENSHOTS_ORIENTATION=["radiological"], + SCREENSHOTS_OUTFILE=None, + SCREENSHOTS_ORIENTATION=None, ): """ Evaluate potential missegmentation of the hypothalamus. @@ -35,7 +35,7 @@ def evaluateHypothalamicSegmentation( The output directory. CREATE_SCREENSHOT : bool, optional, default: True Whether to create screenshots. - SCREENSHOTS_OUTFILE : str or list, optional, default: [] + SCREENSHOTS_OUTFILE : str or list, optional, default: None File or list of files for screenshots. SCREENSHOTS_ORIENTATION : str or list, optional, default: ["radiological"] Orientation or list of orientations for screenshots. @@ -63,6 +63,9 @@ def evaluateHypothalamicSegmentation( from fsqc.createScreenshots import createScreenshots from fsqc.fsqcUtils import binarizeImage + if SCREENSHOTS_ORIENTATION is None: + SCREENSHOTS_ORIENTATION = ["radiological"] + # -------------------------------------------------------------------------- # check files @@ -88,7 +91,7 @@ def evaluateHypothalamicSegmentation( raise ValueError("File not found") - if not SCREENSHOTS_OUTFILE: + if SCREENSHOTS_OUTFILE is None: SCREENSHOTS_OUTFILE = os.path.join(OUTPUT_DIR, "hypothalamus.png") # -------------------------------------------------------------------------- diff --git a/fsqc/fsqcMain.py b/fsqc/fsqcMain.py index 95fe631..ce9c161 100644 --- a/fsqc/fsqcMain.py +++ b/fsqc/fsqcMain.py @@ -1197,7 +1197,8 @@ def _check_arguments(argsDict): argsDict["subjects_dir"], subject, "stats", "aseg.stats" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) # -files: surf/[lr]h.w-g.pct.mgh, label/[lr]h.cortex.label @@ -1205,28 +1206,32 @@ def _check_arguments(argsDict): argsDict["subjects_dir"], subject, "surf", "lh.w-g.pct.mgh" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) path_check = os.path.join( argsDict["subjects_dir"], subject, "surf", "rh.w-g.pct.mgh" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) path_check = os.path.join( argsDict["subjects_dir"], subject, "label", "lh.cortex.label" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) path_check = os.path.join( argsDict["subjects_dir"], subject, "label", "rh.cortex.label" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) # -files: mri/transforms/talairach.lta @@ -1234,19 +1239,22 @@ def _check_arguments(argsDict): argsDict["subjects_dir"], subject, "mri", "transforms", "talairach.lta" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) # -files: mri/norm.mgz, mri/aseg.mgz, mri/aparc+aseg.mgz for FreeSurfer # -files: mri/norm.mgz, mri/aseg.mgz, mri/aparc.DKTatlas+aseg.deep.mgz for FastSurfer path_check = os.path.join(argsDict["subjects_dir"], subject, "mri", "norm.mgz") if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) path_check = os.path.join(argsDict["subjects_dir"], subject, "mri", "aseg.mgz") if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) if argsDict["fastsurfer"] is True: @@ -1258,7 +1266,8 @@ def _check_arguments(argsDict): argsDict["subjects_dir"], subject, "mri", "aparc+aseg.mgz" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) # -files: scripts/recon-all.log @@ -1266,7 +1275,8 @@ def _check_arguments(argsDict): argsDict["subjects_dir"], subject, "scripts", "recon-all.log" ) if not os.path.isfile(path_check): - warnings.warn("Could not find " + path_check + " for subject " + subject) + warnings.warn("Could not find " + path_check + " for subject " + subject, + stacklevel = 2) subjects_to_remove.extend([subject]) # check screenshots @@ -1279,7 +1289,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1288,7 +1299,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1297,7 +1309,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1306,7 +1319,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1319,7 +1333,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1328,7 +1343,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1337,7 +1353,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1346,7 +1363,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1363,7 +1381,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1380,7 +1399,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1406,7 +1426,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1415,7 +1436,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1427,7 +1449,8 @@ def _check_arguments(argsDict): ) if not os.path.isfile(path_check): warnings.warn( - "Could not find " + path_check + " for subject " + subject + "Could not find " + path_check + " for subject " + subject, + stacklevel = 2 ) subjects_to_remove.extend([subject]) @@ -1453,13 +1476,9 @@ def _check_packages(): """ import importlib.util - import sys import packaging.version - if sys.version_info <= (3, 8): - raise RuntimeError("ERROR: Python version must be 3.8 or greater\n") - if importlib.util.find_spec("skimage") is None: raise ImportError( "ERROR: the 'skimage' package is required for running this script, please install.\n" @@ -3523,10 +3542,10 @@ def run_fsqc( screenshots_surf="default", screenshots_views="default", screenshots_layout=None, - screenshots_orientation=["radiological"], + screenshots_orientation=None, surfaces=False, surfaces_html=False, - surfaces_views=["left", "right", "superior", "inferior"], + surfaces_views=None, skullstrip=False, skullstrip_html=False, fornix=False, @@ -3643,6 +3662,12 @@ def run_fsqc( dict A dictionary of input arguments and processing directives. """ + # set defauls here to avoid mutable datastructures for default argument B006 + if screenshots_orientation is None: + screenshots_orientation = ["radiological"] + if surfaces_views is None: + surfaces_views = ["left", "right", "superior", "inferior"] + # create argsDict if argsDict is None and (subjects_dir is None or output_dir is None): raise ValueError( diff --git a/fsqc/fsqcUtils.py b/fsqc/fsqcUtils.py index c8d1ad0..89cca3b 100644 --- a/fsqc/fsqcUtils.py +++ b/fsqc/fsqcUtils.py @@ -36,7 +36,7 @@ def importMGH(filename): logging.captureWarnings(True) if not os.path.exists(filename): - warnings.warn("WARNING: could not find " + filename + ", returning NaNs") + warnings.warn("WARNING: could not find " + filename + ", returning NaNs", stacklevel = 2) return numpy.nan fp = open(filename, "rb") @@ -70,7 +70,7 @@ def importMGH(filename): unused_space_size = unused_space_size - USED_SPACE_SIZE - for i in range(unused_space_size): + for _i in range(unused_space_size): struct.unpack(">b", fp.read(charsize))[0] nv = ndim1 * ndim2 * ndim3 * nframes @@ -220,7 +220,7 @@ def readLTA(file): import numpy as np - with open(file, "r") as f: + with open(file) as f: lta = f.readlines() d = dict() i = 0 diff --git a/fsqc/utils/_config.py b/fsqc/utils/_config.py index d18a31a..3b2b5a3 100644 --- a/fsqc/utils/_config.py +++ b/fsqc/utils/_config.py @@ -3,7 +3,7 @@ import sys from functools import partial from importlib.metadata import requires, version -from typing import IO, Callable, List, Optional +from typing import IO, Callable, Optional import psutil @@ -68,7 +68,7 @@ def sys_info(fid: Optional[IO] = None, developer: bool = False): _list_dependencies_info(out, ljust, dependencies) -def _list_dependencies_info(out: Callable, ljust: int, dependencies: List[str]): +def _list_dependencies_info(out: Callable, ljust: int, dependencies: list[str]): """List dependencies names and versions. Parameters @@ -77,7 +77,7 @@ def _list_dependencies_info(out: Callable, ljust: int, dependencies: List[str]): output function ljust : int length of returned string - dependencies : List[str] + dependencies : list[str] list of dependencies """ diff --git a/fsqc/utils/_imports.py b/fsqc/utils/_imports.py index 4bfd727..60856cc 100644 --- a/fsqc/utils/_imports.py +++ b/fsqc/utils/_imports.py @@ -51,12 +51,12 @@ def import_optional_dependency( try: module = importlib.import_module(name) - except ImportError: + except ImportError as err: if raise_error: raise ImportError( f"Missing optional dependency '{install_name}'. {extra} " f"Use pip or conda to install {install_name}." - ) + ) from err else: return None diff --git a/pyproject.toml b/pyproject.toml index f02b54b..589533c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -126,7 +126,7 @@ select = [ # "SIM", # flake8-simplify ] -[tool.ruff.per-file-ignores] +[tool.ruff.lint.per-file-ignores] "__init__.py" = ["F401"] [tool.pytest.ini_options] From 5354516bca8186cdf59aeff8f2ac4ff9d9f7d1f9 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 6 Sep 2024 13:05:03 +0200 Subject: [PATCH 4/5] ignore line length for now --- pyproject.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 589533c..289aef8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -114,6 +114,8 @@ extend-exclude = [ "setup.py", "singularity", ] +ignore = ["E501"] # line too long (should be enforced soon) + [tool.ruff.lint] # https://docs.astral.sh/ruff/linter/#rule-selection From 7232038be14f838c54d169cecd4d1efda92c6e99 Mon Sep 17 00:00:00 2001 From: Martin Reuter Date: Fri, 6 Sep 2024 13:07:21 +0200 Subject: [PATCH 5/5] fix typo --- fsqc/fsqcMain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fsqc/fsqcMain.py b/fsqc/fsqcMain.py index ce9c161..74f6c9d 100644 --- a/fsqc/fsqcMain.py +++ b/fsqc/fsqcMain.py @@ -3662,7 +3662,7 @@ def run_fsqc( dict A dictionary of input arguments and processing directives. """ - # set defauls here to avoid mutable datastructures for default argument B006 + # set defaults here to avoid mutable datastructures for default argument B006 if screenshots_orientation is None: screenshots_orientation = ["radiological"] if surfaces_views is None: