Skip to content

Commit

Permalink
Add an PluginInterface base class for plugins to implement
Browse files Browse the repository at this point in the history
  • Loading branch information
marcelzwiers committed Dec 8, 2024
1 parent f48ef58 commit 9f91039
Show file tree
Hide file tree
Showing 15 changed files with 1,219 additions and 1,378 deletions.
6 changes: 3 additions & 3 deletions bidscoin/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import subprocess
from pathlib import Path
from importlib import metadata
from typing import Tuple, Union, List
from typing import Union
from logging import getLogger
from .due import due, Doi
try:
Expand Down Expand Up @@ -101,7 +101,7 @@
path='bidscoin', version=__version__, cite_module=True, tags=['reference-implementation'])


def check_version() -> Tuple[str, Union[bool, None], str]:
def check_version() -> tuple[str, Union[bool, None], str]:
"""
Compares the BIDSCOIN version from the local metadata to the remote pypi repository
Expand Down Expand Up @@ -141,7 +141,7 @@ def is_hidden(path: Path):
return hidden


def lsdirs(folder: Path, wildcard: str='*') -> List[Path]:
def lsdirs(folder: Path, wildcard: str='*') -> list[Path]:
"""
Gets all sorted directories in a folder, ignores files. Foldernames starting with a dot are considered hidden and will be skipped
Expand Down
66 changes: 31 additions & 35 deletions bidscoin/bcoin.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
from importlib.metadata import entry_points
from importlib.util import spec_from_file_location, module_from_spec
from pathlib import Path
from typing import Tuple, Union, List
from typing import Union
from ruamel.yaml import YAML
from tqdm import tqdm
from tqdm.contrib.logging import logging_redirect_tqdm
Expand Down Expand Up @@ -241,7 +241,7 @@ def list_executables(show: bool=False) -> list:
return scripts


def list_plugins(show: bool=False) -> Tuple[List[Path], List[Path]]:
def list_plugins(show: bool=False) -> tuple[list[Path], list[Path]]:
"""
:param show: Print the template bidsmaps and installed plugins if True
:return: List of the installed plugins and template bidsmaps
Expand All @@ -264,7 +264,7 @@ def list_plugins(show: bool=False) -> Tuple[List[Path], List[Path]]:
return plugins, templates


def install_plugins(filenames: List[str]=()) -> None:
def install_plugins(filenames: list[str]=()) -> None:
"""
Installs template bidsmaps and plugins and adds the plugin Options and data format section to the default template bidsmap
Expand Down Expand Up @@ -296,7 +296,7 @@ def install_plugins(filenames: List[str]=()) -> None:
continue

# Check if we can import the plugin
module = import_plugin(file, ('bidsmapper_plugin', 'bidscoiner_plugin'))
module = import_plugin(file)
if not module:
LOGGER.error(f"Plugin failure, please re-install a valid version of '{file.name}'")
continue
Expand All @@ -316,7 +316,7 @@ def install_plugins(filenames: List[str]=()) -> None:
LOGGER.success(f"The '{file.name}' plugin was successfully installed")


def uninstall_plugins(filenames: List[str]=(), wipe: bool=True) -> None:
def uninstall_plugins(filenames: list[str]=(), wipe: bool=True) -> None:
"""
Uninstalls template bidsmaps and plugins and removes the plugin Options and data format section from the default template bidsmap
Expand All @@ -338,7 +338,7 @@ def uninstall_plugins(filenames: List[str]=(), wipe: bool=True) -> None:

# First check if we can import the plugin
if file.suffix == '.py':
module = import_plugin(pluginfolder/file.name, ('bidsmapper_plugin', 'bidscoiner_plugin'))
module = import_plugin(pluginfolder/file.name)
else:
module = None

Expand Down Expand Up @@ -373,13 +373,13 @@ def uninstall_plugins(filenames: List[str]=(), wipe: bool=True) -> None:


@lru_cache()
def import_plugin(plugin: Union[Path,str], functions: tuple=()) -> Union[types.ModuleType, None]:
def import_plugin(plugin: Union[Path,str], classes: tuple=('Interface',)) -> Union[types.ModuleType, None]:
"""
Imports the plugin if it contains any of the specified functions
:param plugin: Name of the plugin in the bidscoin "plugins" folder or the fullpath name
:param functions: List of functions of which at least one of them should be present in the plugin
:return: The imported plugin-module
:param plugin: Name of the plugin in the bidscoin "plugins" folder or the fullpath name
:param classes: List of classes of which at least one of them should be present in the plugin
:return: The imported plugin-module
"""

if not plugin: return
Expand All @@ -401,17 +401,17 @@ def import_plugin(plugin: Union[Path,str], functions: tuple=()) -> Union[types.M
module = module_from_spec(spec)
spec.loader.exec_module(module)

functionsfound = []
for function in functions:
if not hasattr(module, function):
LOGGER.verbose(f"Could not find '{function}' in the '{plugin}' plugin")
elif not callable(getattr(module, function)):
LOGGER.error(f"'The {function}' attribute in the '{plugin}' plugin is not callable")
classesfound = []
for klass in classes:
if not hasattr(module, klass):
LOGGER.verbose(f"Could not find '{klass}' in the '{plugin}' plugin")
elif not callable(getattr(module, klass)):
LOGGER.error(f"'The {klass}' attribute in the '{plugin}' plugin is not callable")
else:
functionsfound.append(function)
classesfound.append(klass)

if functions and not functionsfound:
LOGGER.bcdebug(f"Plugin '{plugin}' does not contain {functions} functions")
if classes and not classesfound:
LOGGER.bcdebug(f"Plugin '{plugin}' does not contain {classes} classes")
else:
return module

Expand All @@ -432,26 +432,22 @@ def test_plugin(plugin: Union[Path,str], options: dict) -> int:

LOGGER.info(f"--------- Testing the '{plugin}' plugin ---------")

# First test to see if we can import the core plugin methods
module = import_plugin(plugin, ('bidsmapper_plugin','bidscoiner_plugin'))
# First test to see if we can import the plugin interface
module = import_plugin(plugin)
if module is None:
return 1

# Then run the plugin's own 'test' routine (if implemented)
if hasattr(module, 'test') and callable(getattr(module, 'test')):
try:
returncode = module.test(options)
if returncode == 0:
LOGGER.success(f"The '{plugin}' plugin functioned correctly")
else:
LOGGER.warning(f"The '{plugin}' plugin did not function correctly")
return returncode
except Exception as pluginerror:
LOGGER.error(f"Could not run {plugin}.test(options):\n{pluginerror}")
return 1
else:
LOGGER.info(f"The '{plugin}' did not have a test routine")
return 0
try:
returncode = module.Interface().test(options)
if returncode == 0:
LOGGER.success(f"The '{plugin}' plugin functioned correctly")
else:
LOGGER.warning(f"The '{plugin}' plugin did not function correctly")
return returncode
except Exception as pluginerror:
LOGGER.error(f"Could not run {plugin}.test(options):\n{pluginerror}")
return 1


def test_bidsmap(bidsmapfile: str):
Expand Down
62 changes: 31 additions & 31 deletions bidscoin/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from fnmatch import fnmatch
from functools import lru_cache
from pathlib import Path
from typing import List, Set, Tuple, Union, Dict, Any, Iterable, NewType
from typing import Union, Any, Iterable, NewType
from pydicom import dcmread, fileset, config
from importlib.util import find_spec
if find_spec('bidscoin') is None:
Expand All @@ -32,17 +32,17 @@
from bidscoin.plugins import EventsParser
from ruamel.yaml import YAML
yaml = YAML()
yaml.representer.ignore_aliases = lambda *data: True # Expand aliases (https://stackoverflow.com/questions/58091449/disabling-alias-for-yaml-file-in-python)
yaml.representer.ignore_aliases = lambda *data: True # Expand aliases (https://stackoverflow.com/questions/58091449/disabling-alias-for-yaml-file-in-python)
config.INVALID_KEY_BEHAVIOR = 'IGNORE'

# Define custom data types (replace with proper classes or TypeAlias of Python >= 3.10)
Plugin = NewType('Plugin', Dict[str, Any])
Plugins = NewType('Plugin', Dict[str, Plugin])
Options = NewType('Options', Dict[str, Any])
Properties = NewType('Properties', Dict[str, Any])
Attributes = NewType('Attributes', Dict[str, Any])
Bids = NewType('Bids', Dict[str, Any])
Meta = NewType('Meta', Dict[str, Any])
Plugin = NewType('Plugin', dict[str, Any])
Plugins = NewType('Plugin', dict[str, Plugin])
Options = NewType('Options', dict[str, Any])
Properties = NewType('Properties', dict[str, Any])
Attributes = NewType('Attributes', dict[str, Any])
Bids = NewType('Bids', dict[str, Any])
Meta = NewType('Meta', dict[str, Any])

LOGGER = logging.getLogger(__name__)

Expand Down Expand Up @@ -125,10 +125,10 @@ def has_support(self) -> str:
return ''

for plugin, options in self.plugins.items():
module = bcoin.import_plugin(plugin, ('has_support',))
module = bcoin.import_plugin(plugin)
if module:
try:
supported = module.has_support(self.path, self.dataformat)
supported = module.Interface().has_support(self.path, self.dataformat)
except Exception as moderror:
supported = ''
LOGGER.exception(f"The {plugin} plugin crashed while reading {self.path}\n{moderror}")
Expand Down Expand Up @@ -226,9 +226,9 @@ def attributes(self, attributekey: str, validregexp: bool=False, cache: bool=Tru

elif self.dataformat or self.has_support():
for plugin, options in self.plugins.items():
module = bcoin.import_plugin(plugin, ('get_attribute',))
module = bcoin.import_plugin(plugin)
if module:
attributeval = module.get_attribute(self.dataformat, self.path, attributekey, options)
attributeval = module.Interface().get_attribute(self.dataformat, self.path, attributekey, options)
attributeval = str(attributeval) if attributeval is not None else ''
if attributeval:
break
Expand Down Expand Up @@ -278,7 +278,7 @@ def _extattributes(self) -> Attributes:

return Attributes(attributes)

def subid_sesid(self, subid: str=None, sesid: str=None) -> Tuple[str, str]:
def subid_sesid(self, subid: str=None, sesid: str=None) -> tuple[str, str]:
"""
Extract the cleaned-up subid and sesid from the datasource properties or attributes
Expand Down Expand Up @@ -447,7 +447,7 @@ def __eq__(self, other):
else:
return NotImplemented

def check(self, checks: Tuple[bool, bool, bool]=(False, False, False)) -> Tuple[Union[bool, None], Union[bool, None], Union[bool, None]]:
def check(self, checks: tuple[bool, bool, bool]=(False, False, False)) -> tuple[Union[bool, None], Union[bool, None], Union[bool, None]]:
"""
Check run for required and optional entities using the BIDS schema files
Expand Down Expand Up @@ -607,7 +607,7 @@ def bidsname(self, subid: str='unknown', sesid: str='', validkeys: bool=False, r

return bidsname

def increment_runindex(self, outfolder: Path, bidsname: str, scans_table: pd.DataFrame=None, targets: Set[Path]=()) -> str:
def increment_runindex(self, outfolder: Path, bidsname: str, scans_table: pd.DataFrame=None, targets: set[Path]=()) -> str:
"""
Checks if a file with the same bidsname already exists in the folder and then increments the dynamic runindex
(if any) until no such file is found.
Expand Down Expand Up @@ -721,7 +721,7 @@ def __hash__(self):
return hash(str(self))

@property
def runitems(self) -> List[RunItem]:
def runitems(self) -> list[RunItem]:
"""Returns a list of the RunItem objects for this datatype"""

return [RunItem(self.dataformat, self.datatype, rundata, self.options, self.plugins) for rundata in self._data]
Expand Down Expand Up @@ -836,7 +836,7 @@ def session(self, value: str):
self._data['session'] = value

@property
def datatypes(self) -> List[DataType]:
def datatypes(self) -> list[DataType]:
"""Gets a list of DataType objects for the dataformat"""

return [DataType(self.dataformat, datatype, self._data[datatype], self.options, self.plugins) for datatype in self._data if datatype not in ('subject', 'session')]
Expand Down Expand Up @@ -881,7 +881,7 @@ def delete_runs(self, datatype: Union[str, DataType]=''):
class BidsMap:
"""Reads and writes mapping heuristics from the bidsmap YAML-file"""

def __init__(self, yamlfile: Path, folder: Path=templatefolder, plugins: Iterable[Union[Path,str]]=(), checks: Tuple[bool,bool,bool]=(True,True,True)):
def __init__(self, yamlfile: Path, folder: Path=templatefolder, plugins: Iterable[Union[Path,str]]=(), checks: tuple[bool,bool,bool]=(True,True,True)):
"""
Read and standardize the bidsmap (i.e. add missing information and perform checks). If yamlfile is not fullpath, then 'folder' is first searched before
the default 'heuristics'. If yamfile is empty, then first 'bidsmap.yaml' is searched for, then 'bidsmap_template'. So fullpath
Expand Down Expand Up @@ -1119,7 +1119,7 @@ def validate(self, level: int=1) -> bool:

return valid

def check(self, checks: Tuple[bool, bool, bool]=(True, True, True)) -> Tuple[Union[bool, None], Union[bool, None], Union[bool, None]]:
def check(self, checks: tuple[bool, bool, bool]=(True, True, True)) -> tuple[Union[bool, None], Union[bool, None], Union[bool, None]]:
"""
Check all non-ignored runs in the bidsmap for required and optional entities using the BIDS schema files
Expand Down Expand Up @@ -1207,7 +1207,7 @@ def check_template(self) -> bool:

return valid

def dir(self, dataformat: Union[str, DataFormat]) -> List[Path]:
def dir(self, dataformat: Union[str, DataFormat]) -> list[Path]:
"""
Make a provenance list of all the runs in the bidsmap[dataformat]
Expand Down Expand Up @@ -1268,7 +1268,7 @@ def exist_run(self, runitem: RunItem, datatype: Union[str, DataType]='') -> bool

return False

def get_matching_run(self, sourcefile: Union[str, Path], dataformat, runtime=False) -> Tuple[RunItem, str]:
def get_matching_run(self, sourcefile: Union[str, Path], dataformat, runtime=False) -> tuple[RunItem, str]:
"""
Find the first run in the bidsmap with properties and attributes that match with the data source. Only non-empty
properties and attributes are matched, except when runtime is True, then the empty attributes are also matched.
Expand Down Expand Up @@ -1534,7 +1534,7 @@ def update(self, source_datatype: Union[str, DataType], runitem: RunItem):
LOGGER.error(f"Number of runs in bidsmap['{runitem.dataformat}'] changed unexpectedly: {num_runs_in} -> {num_runs_out}")


def unpack(sesfolder: Path, wildcard: str='', workfolder: Path='', _subprefix: Union[str,None]='') -> Tuple[Set[Path], bool]:
def unpack(sesfolder: Path, wildcard: str='', workfolder: Path='', _subprefix: Union[str,None]='') -> tuple[set[Path], bool]:
"""
Unpacks and sorts DICOM files in sourcefolder to a temporary folder if sourcefolder contains a DICOMDIR file or .tar.gz, .gz or .zip files
Expand Down Expand Up @@ -1572,7 +1572,7 @@ def unpack(sesfolder: Path, wildcard: str='', workfolder: Path='', _subprefix: U
shutil.copytree(sesfolder, worksesfolder, dirs_exist_ok=True)

# Unpack the zip/tarball files in the temporary folder
sessions: Set[Path] = set()
sessions: set[Path] = set()
for tarzipfile in [worksesfolder/tarzipfile.name for tarzipfile in tarzipfiles]:
LOGGER.info(f"Unpacking: {tarzipfile.name} -> {worksesfolder}")
try:
Expand Down Expand Up @@ -1674,7 +1674,7 @@ def get_dicomfile(folder: Path, index: int=0) -> Path:
return Path()


def get_parfiles(folder: Path) -> List[Path]:
def get_parfiles(folder: Path) -> list[Path]:
"""
Gets the Philips PAR-file from the folder
Expand All @@ -1685,7 +1685,7 @@ def get_parfiles(folder: Path) -> List[Path]:
if is_hidden(Path(folder.name)):
return []

parfiles: List[Path] = []
parfiles: list[Path] = []
for file in sorted(folder.iterdir()):
if not is_hidden(file.relative_to(folder)) and is_parfile(file):
parfiles.append(file)
Expand Down Expand Up @@ -2334,7 +2334,7 @@ def check_runindices(session: Path) -> bool:
return True


def limitmatches(fmap: str, matches: List[str], limits: str, niifiles: Set[str], scans_table: pd.DataFrame):
def limitmatches(fmap: str, matches: list[str], limits: str, niifiles: set[str], scans_table: pd.DataFrame):
"""
Helper function for addmetadata() to check if there are multiple fieldmap runs and get the lower- and upperbound from
the AcquisitionTime to bound the grand list of matches to adjacent runs. The resulting list is appended to niifiles
Expand Down Expand Up @@ -2518,7 +2518,7 @@ def addmetadata(bidsses: Path):
json.dump(jsondata, sidecar, indent=4)


def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, extensions: Iterable, sourcemeta: Path=Path()) -> Meta:
def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, metaext: Iterable, sourcemeta: Path=Path()) -> Meta:
"""
Load the metadata from the target (json sidecar), then add metadata from the source (json sidecar) and finally add
the user metadata (meta table). Source metadata other than json sidecars are copied over to the target folder. Special
Expand All @@ -2529,7 +2529,7 @@ def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, exten
:param datasource: The data source from which dynamic values are read
:param targetmeta: The filepath of the target data file with meta-data
:param usermeta: A user metadata dict, e.g. the meta table from a run-item
:param extensions: A list of file extensions of the source metadata files, e.g. as specified in bidsmap.plugins['plugin']['meta']
:param metaext: A list of file extensions of the source metadata files, e.g. as specified in bidsmap.plugins['plugin']['meta']
:param sourcemeta: The filepath of the source data file with associated/equally named meta-data files (name may include wildcards). Leave empty to use datasource.path
:return: The combined target + source + user metadata
"""
Expand All @@ -2544,7 +2544,7 @@ def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, exten
metapool = json.load(json_fid)

# Add the source metadata to the metadict or copy it over
for ext in extensions:
for ext in metaext:
for sourcefile in sourcemeta.parent.glob(sourcemeta.with_suffix('').with_suffix(ext).name):
LOGGER.verbose(f"Copying source data from: '{sourcefile}''")

Expand Down Expand Up @@ -2600,7 +2600,7 @@ def poolmetadata(datasource: DataSource, targetmeta: Path, usermeta: Meta, exten
return Meta(metapool)


def addparticipant(participants_tsv: Path, subid: str='', sesid: str='', data: dict=None, dryrun: bool=False) -> Tuple[pd.DataFrame, dict]:
def addparticipant(participants_tsv: Path, subid: str='', sesid: str='', data: dict=None, dryrun: bool=False) -> tuple[pd.DataFrame, dict]:
"""
Read/create and/or add (if it's not there yet) a participant to the participants.tsv/.json file
Expand Down
Loading

0 comments on commit 9f91039

Please sign in to comment.