diff --git a/README.rst b/README.rst index 82747f9c..dcf8e0b2 100644 --- a/README.rst +++ b/README.rst @@ -35,7 +35,7 @@ Features * [x] Multi-echo data\* * [x] Multi-coil data\* * [x] Plug-ins -* [ ] Stimulus/behavioural logfiles +* [x] Stimulus/behavioural logfiles ``* = Only DICOM source data`` diff --git a/bidscoin/bids.py b/bidscoin/bids.py index bf690364..101f6d58 100644 --- a/bidscoin/bids.py +++ b/bidscoin/bids.py @@ -820,9 +820,9 @@ def increment_runindex(self, outfolder: Path, bidsname: str, scans_table: pd.Dat def eventsparser(self) -> EventsParser: """Returns a plugin EventsParser instance to parse the stimulus presentation logfile (if any)""" - plugins = [bcoin.import_plugin(plugin, (f"{self.dataformat}Events",)) for plugin in self.plugins] - if plugins and plugins[0]: - return getattr(plugins[0], f"{self.dataformat}Events")(self.provenance, self.events) + for name in self.plugins: + if plugin := bcoin.import_plugin(name, (f"{self.dataformat}Events",)): + return getattr(plugin, f"{self.dataformat}Events")(self.provenance, self.events) class DataType: diff --git a/bidscoin/bidscoiner.py b/bidscoin/bidscoiner.py index 9c2e56ce..e569c2ec 100755 --- a/bidscoin/bidscoiner.py +++ b/bidscoin/bidscoiner.py @@ -98,8 +98,7 @@ def bidscoiner(sourcefolder: str, bidsfolder: str, participant: list=(), force: return # Load the data conversion plugins - plugins = [bcoin.import_plugin(plugin, ('bidscoiner_plugin',)) for plugin,options in bidsmap.plugins.items()] - plugins = [plugin for plugin in plugins if plugin] # Filter the empty items from the list + plugins = [plugin for name in bidsmap.plugins if (plugin := bcoin.import_plugin(name, ('bidscoiner_plugin',)))] if not plugins: LOGGER.warning(f"The plugins listed in your bidsmap['Options'] did not have a usable `bidscoiner_plugin` function, nothing to do") LOGGER.info('-------------- FINISHED! ------------') diff --git a/bidscoin/bidseditor.py b/bidscoin/bidseditor.py index c6798fa9..07e19a57 100755 --- a/bidscoin/bidseditor.py +++ b/bidscoin/bidseditor.py @@ -1093,15 +1093,18 @@ def __init__(self, runitem: RunItem, bidsmap: BidsMap, template_bidsmap: BidsMap events_time.cellChanged.connect(self.events_time2run) events_time.setToolTip(f"Columns: The number of time units per second + the column names that contain timing information (e.g. [10000, 'Time', 'Duration'])\n" f"Start: The event that marks the beginning of the experiment, i.e. where the clock should be (re)set to 0 (e.g. 'Code=10' if '10' is used to log the pulses)") - events_rows_label = QLabel('Conditions') + events_rows_label = QLabel('Rows') self.events_rows = events_rows = self.setup_table(events_data.get('rows',[]), 'events_rows') events_rows.cellChanged.connect(self.events_rows2run) events_rows.setToolTip(f"The groups of rows that are included in the output table") events_rows.horizontalHeader().setVisible(True) + events_rows.setStyleSheet('QTableView::item {border-right: 1px solid #d6d9dc;}') events_columns_label = QLabel('Columns') self.events_columns = events_columns = self.setup_table(events_data.get('columns',[]), 'events_columns') events_columns.cellChanged.connect(self.events_columns2run) events_columns.setToolTip(f"The mappings of the included output columns. To add a new column, enter its mapping in the empty bottom row") + events_columns.horizontalHeader().setVisible(True) + events_columns.setStyleSheet('QTableView::item {border-right: 1px solid #d6d9dc;}') log_table_label = QLabel('Log data') log_table = self.setup_table(events_data.get('log_table',[]), 'log_table', minsize=False) log_table.setShowGrid(True) @@ -1156,10 +1159,10 @@ def __init__(self, runitem: RunItem, bidsmap: BidsMap, template_bidsmap: BidsMap layout2_.setAlignment(arrow_, QtCore.Qt.AlignmentFlag.AlignHCenter) layout2_.addWidget(events_columns_label) layout2_.addWidget(events_columns) - layout2_.addWidget(events_time_label) - layout2_.addWidget(events_time) layout2_.addWidget(events_rows_label) layout2_.addWidget(events_rows) + layout2_.addWidget(events_time_label) + layout2_.addWidget(events_time) layout2_.addStretch() self.events_editbox = events_editbox = QGroupBox(' ') events_editbox.setSizePolicy(sizepolicy) @@ -1243,8 +1246,8 @@ def fill_table(self, table: MyQTable, data: list): # Some ugly hacks to adjust individual tables tablename = table.objectName() - header = tablename in ('log_table', 'events_table', 'events_rows') - extrarow = [[{'value': '', 'editable': True}, {'value': '', 'editable': True}]] if tablename in ('events_columns','meta') else [] + header = tablename in ('log_table', 'events_table', 'events_rows', 'events_columns') + extrarow = [[{'value': '', 'editable': True}, {'value': '', 'editable': True}]] if tablename in ('events_rows','events_columns','meta') else [] ncols = len(data[0]) if data else 2 # Always at least two columns (i.e. key, value) # Populate the blocked/hidden table @@ -1412,20 +1415,15 @@ def run2data(self) -> tuple: [{'value': 'start', 'editable': False}, {'value': runitem.events['time']['start'], 'editable': True}]] # Set up the data for the events conditions / row groups - header = [{'value': '', 'editable': False}] - row_incl = [{'value': 'include', 'editable': False}] - row_cast = [{'value': 'cast', 'editable': False}] - for n, condition in enumerate(runitem.events.get('rows', [])): - header += [{'value': f"{n + 1}", 'editable': False}] - row_incl += [{'value': f"{dict(condition['include'])}", 'editable': True}] - row_cast += [{'value': f"{dict(condition.get('cast') or {})}", 'editable': True}] - header += [{'value': 'new', 'editable': False}] # = Extra column - events_data['rows'] = [header, row_incl, row_cast] + events_data['rows'] = [[{'value': 'condition', 'editable': False}, {'value': 'cast output', 'editable': False}]] + for condition in runitem.events.get('rows') or []: + events_data['rows'].append([{'value': f"{dict(condition['include'])}", 'editable': True}, {'value': f"{dict(condition.get('cast') or {})}", 'editable': True}]) # Set up the data for the events columns - events_data['columns'] = [] + events_data['columns'] = [[{'value': 'input', 'editable': False}, {'value': 'output', 'editable': False}]] for mapping in runitem.events.get('columns') or []: - events_data['columns'].append([{'value': mapping, 'editable': True}]) + for key, value in mapping.items(): + events_data['columns'].append([{'value': value, 'editable': True}, {'value': key, 'editable': True}]) # Set up the data for the events table parser = runitem.eventsparser() @@ -1619,19 +1617,23 @@ def events_rows2run(self, rowindex: int, colindex: int): # row: [[include, {column_in: regex}], # [cast, {column_out: newvalue}]] - mapping = self.events_rows.item(rowindex, colindex).text().strip() + mapping = self.events_rows.item(rowindex, colindex).text().strip() if self.events_rows.item(rowindex, colindex) else '' + nrows = self.events_rows.rowCount() - LOGGER.verbose(f"User sets events['rows'][{colindex+1}] to {mapping}' for {self.target_run}") + LOGGER.verbose(f"User sets events['rows'][{rowindex}] to {mapping}' for {self.target_run}") if mapping: - ncols = self.events_rows.columnCount() - if colindex == ncols - 1: - self.target_run.events['rows'].append({'include' if rowindex==0 else 'cast': ast.literal_eval(mapping)}) + try: + mapping = ast.literal_eval(mapping) # Convert stringified dict back to dict + except (ValueError, SyntaxError): + mapping = {} + if rowindex == nrows - 1: + self.target_run.events['rows'].append({'include' if colindex==0 else 'cast': mapping}) else: - self.target_run.events['rows'][colindex-1]['include' if rowindex==0 else 'cast'] = ast.literal_eval(mapping) - elif colindex <= len(self.target_run.events['rows']): # Remove the row - del self.target_run.events['rows'][colindex-1] + self.target_run.events['rows'][rowindex]['include' if colindex==0 else 'cast'] = mapping + elif colindex == 0 and rowindex < nrows - 1: # Remove the row + del self.target_run.events['rows'][rowindex] else: - LOGGER.bcdebug(f"Cannot remove events['rows'][{colindex-1}] for {self.target_run}") + LOGGER.bcdebug(f"Cannot remove events['rows'][{rowindex}] for {self.target_run}") # Refresh the events tables, i.e. delete empty rows or add a new row if a key is defined on the last row _,_,_,_,events_data = self.run2data() @@ -1644,16 +1646,17 @@ def events_columns2run(self, rowindex: int, colindex: int): # events_data['columns'] = [[{'source1': target1}], # [{'source2': target2}], # [..]] - mapping = self.events_columns.item(rowindex, colindex).text().strip() - LOGGER.verbose(f"User sets the column name to: '{mapping}' for {self.target_run}") - if mapping: # Evaluate and store the data - nrows = self.events_columns.rowCount() + input = self.events_columns.item(rowindex, 0).text().strip() if self.events_columns.item(rowindex, 0) else '' + output = self.events_columns.item(rowindex, 1).text().strip() if self.events_columns.item(rowindex, 1) else '' + nrows = self.events_columns.rowCount() + LOGGER.verbose(f"User sets the column {colindex} to: '{input}: {output}' for {self.target_run}") + if input and output: # Evaluate and store the data if rowindex == nrows - 1: - self.target_run.events['columns'].append(ast.literal_eval(mapping)) + self.target_run.events['columns'].append({output: input}) self.events_columns.insertRow(nrows) else: - self.target_run.events['columns'][rowindex] = ast.literal_eval(mapping) - else: # Remove the column + self.target_run.events['columns'][rowindex] = {output: input} + elif rowindex < nrows - 1: # Remove the row del self.target_run.events['columns'][rowindex] self.events_columns.blockSignals(True) # Not sure if this is needed? self.events_columns.removeRow(rowindex) @@ -1716,6 +1719,7 @@ def change_run(self, suffix_idx): if val and key in self.target_run.bids and not self.target_run.bids[key]: self.target_run.bids[key] = val self.target_run.meta = template_run.meta.copy() + self.target_run.events = copy.deepcopy(template_run.events) # Reset the edit window with the new target_run self.reset(refresh=True) @@ -1791,10 +1795,11 @@ def reset(self, refresh: bool=False): self.fill_table(self.attributes_table, attributes_data) self.fill_table(self.bids_table, bids_data) self.fill_table(self.meta_table, meta_data) - self.fill_table(self.events_time, events_data['time']) - self.fill_table(self.events_rows, events_data['rows']) - self.fill_table(self.events_columns, events_data['columns']) - self.fill_table(self.events_table, events_data['table']) + if events_data: + self.fill_table(self.events_time, events_data['time']) + self.fill_table(self.events_rows, events_data['rows']) + self.fill_table(self.events_columns, events_data['columns']) + self.fill_table(self.events_table, events_data['table']) # Refresh the BIDS output name self.refresh_bidsname() diff --git a/bidscoin/bidsmapper.py b/bidscoin/bidsmapper.py index ec145ae6..ffae54a6 100755 --- a/bidscoin/bidsmapper.py +++ b/bidscoin/bidsmapper.py @@ -100,8 +100,7 @@ def bidsmapper(sourcefolder: str, bidsfolder: str, bidsmap: str, template: str, bidsmap_old = copy.deepcopy(bidsmap_new) # Import the data scanning plugins - plugins = [bcoin.import_plugin(plugin, ('bidsmapper_plugin',)) for plugin in bidsmap_new.plugins] - plugins = [plugin for plugin in plugins if plugin] # Filter the empty items from the list + plugins = [plugin for name in bidsmap_new.plugins if (plugin := bcoin.import_plugin(name, ('bidsmapper_plugin',)))] if not plugins: LOGGER.warning(f"The plugins listed in your bidsmap['Options'] did not have a usable `bidsmapper_plugin` function, nothing to do") LOGGER.info('-------------- FINISHED! ------------') diff --git a/bidscoin/heuristics/bidsmap_dccn.yaml b/bidscoin/heuristics/bidsmap_dccn.yaml index ef76fe6d..f6ef2b4a 100644 --- a/bidscoin/heuristics/bidsmap_dccn.yaml +++ b/bidscoin/heuristics/bidsmap_dccn.yaml @@ -38,6 +38,8 @@ Options: anon: y # Set this anonymization flag to 'y' to round off age and discard acquisition date from the meta data meta: [.json, .tsv, .tsv.gz] # The file extensions of the equally named metadata sourcefiles that are copied over to the BIDS sidecar files fallback: y # Appends unhandled dcm2niix suffixes to the `acq` label if 'y' (recommended, else the suffix data is discarded) + events2bids: + meta: [.json, .tsv, .tsv.gz] DICOM: @@ -1009,10 +1011,10 @@ Presentation: subject: <> # This filesystem property extracts the subject label from the source directory. NB: Any property or attribute can be used as subject-label, e.g. session: <> # This filesystem property extracts the subject label from the source directory. NB: Any property or attribute can be used as session-label, e.g. - beh: # ----------------------- All behavioural runs ------------------- + beh: # ----------------------- All behavioural runs ------------------- - attributes: &presentationent_attr Scenario: - bids: &presentationent_func # See: schema/rules/files/raw/func.yaml + bids: &presentationent_beh # See: schema/rules/files/raw/task.yaml task: acq: run: <<>> @@ -1040,34 +1042,34 @@ Presentation: start: Code: 10 # Code with which the first (or any) pulse is logged - eeg: # ----------------------- All EEG runs --------------------------- + eeg: # ----------------------- All EEG runs --------------------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events ieeg: # ----------------------- All iEEG runs -------------------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events - meg: # ----------------------- All MEG runs --------------------------- + meg: # ----------------------- All MEG runs --------------------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events - nirs: # ----------------------- All nirs runs -------------------------- + nirs: # ----------------------- All NIRS runs -------------------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events func: # ----------------------- All functional runs -------------------- - attributes: *presentationent_attr bids: - <<: *presentationent_func + <<: *presentationent_beh ce: dir: rec: @@ -1079,7 +1081,7 @@ Presentation: <<: *presentationent_attr Scenario: .* bids: - <<: *presentationent_func + <<: *presentationent_beh ce: dir: rec: @@ -1088,13 +1090,13 @@ Presentation: extra_data: # ----------------------- All extra data ------------------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events exclude: # ----------------------- Data that will be left out ------------- - attributes: *presentationent_attr - bids: *presentationent_func + bids: *presentationent_beh meta: *presentation_func_meta events: *presentation_events diff --git a/bidscoin/plugins/events2bids.py b/bidscoin/plugins/events2bids.py index 4b69b175..7484d515 100644 --- a/bidscoin/plugins/events2bids.py +++ b/bidscoin/plugins/events2bids.py @@ -9,7 +9,6 @@ from bidscoin import bids from bidscoin.bids import BidsMap, DataFormat, EventsParser, is_hidden, Plugin # from convert_eprime.utils import remove_unicode -# from convert_eprime.tests.utils import get_test_data_path LOGGER = logging.getLogger(__name__) diff --git a/docs/plugins.rst b/docs/plugins.rst index ea8895fa..6d853248 100644 --- a/docs/plugins.rst +++ b/docs/plugins.rst @@ -24,6 +24,11 @@ Nibabel2bids: a generic plugin for imaging data The nibabel2bids plugin wraps around the versatile `nibabel `__ tool to convert a wide variety of data formats into NIfTI-files. Currently, the default template bidsmap is tailored to NIfTI source data only (but this can readily be extended), and BIDS sidecar files are not automatically produced by nibabel (but see the note further below). Please cite: `DOI: 10.5281/zenodo.591597 `__ +Events2bids: a plugin for NeuroBS Presentation log data +------------------------------------------------------- + +The events2bids plugin parses `NeuroBS `__ stimulus Presentation log files to BIDS task events files. + .. note:: Out of the box, BIDScoin plugins typically produce sidecar files that contain metadata from the source headers. However, when such meta-data is missing (e.g. as for nibabel2bids), or when it needs to be appended or overruled, then users can add sidecar files to the source data (as explained `here <./bidsmap.html>`__) or add that meta-data using the bidseditor (the latter takes precedence). @@ -42,6 +47,8 @@ In short, the purpose of the plugin is to interact with the data, by providing t - **bidsmapper_plugin()**: From a given session folder, identify the different runs (source datatypes) and, if they haven't been discovered yet, add them to the study bidsmap - **bidscoiner_plugin()**: From a given session folder, identify the different runs (source datatypes) and convert them to BIDS output files using the mapping data specified in the runitem +Optionally, a ``EventsParser()`` class can be defined to convert stimulus presentation log data to task events files. This class inherits from the equally named class in the ``bids`` library, and should add code to make an initial parsing of the source data to a Pandas dataframe (table). + The above API is illustrated in more detail in the placeholder Python code below. For real world examples you best first take a look at the nibabel2bids plugin, which exemplifies a clean and fairly minimal implementation of the required functionality. A similar, but somewhat more elaborated implementation (supporting multiple dataformats) can be found in the spec2nii2bids plugin. Finally, the dcm2niix2bids plugin is the more complicated example, due to the logic needed to deal with special output files and various irregularities. .. code-block:: python3 @@ -49,7 +56,7 @@ The above API is illustrated in more detail in the placeholder Python code below import logging from pathlib import Path from bidscoin.due import due, Doi - from bidscoin.bids import BidsMap, is_hidden + from bidscoin.bids import BidsMap, EventsParser, is_hidden LOGGER = logging.getLogger(__name__) @@ -242,4 +249,27 @@ The above API is illustrated in more detail in the placeholder Python code below metadata = bids.poolmetadata(run.datasource, sidecar, run.meta, ext_meta) save(sidecar, metadata) + + class PresentationEvents(EventsParser): + """Parser for stimulus presentation logfiles""" + + def __init__(self, sourcefile: Path, _data): + """ + Reads the event table from a logfile + + :param sourcefile: The full filepath of the logfile + :param data: The run['events'] data (from a bidsmap) + """ + + super().__init__(sourcefile, _data) + + # Parse an initial table from the Presentation logfile + self.sourcetable = pd.read_csv(self.sourcefile, sep='\t', skiprows=3, skip_blank_lines=True) + + @property + def logtable(self) -> pd.DataFrame: + """Returns the source logging data""" + + return self.sourcetable + *Plugin placeholder code, illustrating the structure of a plugin with minimal functionality*