Skip to content

Commit

Permalink
Add option to select a Presentation source table
Browse files Browse the repository at this point in the history
  • Loading branch information
marcelzwiers committed Nov 1, 2024
1 parent 6dead00 commit 833f75d
Show file tree
Hide file tree
Showing 4 changed files with 45 additions and 23 deletions.
8 changes: 5 additions & 3 deletions bidscoin/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,17 +62,18 @@
class EventsParser(ABC):
"""Parser for stimulus presentation logfiles"""

def __init__(self, sourcefile: Path, eventsdata: dict):
def __init__(self, sourcefile: Path, eventsdata: dict, options: dict):
"""
Reads the events table from the events logfile
:param sourcefile: The full filepath of the raw logfile
:param eventsdata: The run['events'] data (from a bidsmap)
:param options: The plugin options
"""

self.sourcefile = sourcefile
self._data = eventsdata
# TODO: Check if edits in self.start/timecols propagate back to the bidsmap data
self.options = options

def __repr__(self):

Expand Down Expand Up @@ -121,6 +122,7 @@ def eventstable(self) -> pd.DataFrame:

# Loop over the row groups to filter/edit the rows
rows = pd.Series([len(self.rows) == 0] * len(df)).astype(bool) # Series with True values if no row expressions were specified
rows.index = df.index # Make sure the indices align
for group in self.rows:

for column, regex in group['include'].items():
Expand Down Expand Up @@ -822,7 +824,7 @@ def eventsparser(self) -> EventsParser:

for name in self.plugins:
if plugin := bcoin.import_plugin(name, (f"{self.dataformat}Events",)):
return getattr(plugin, f"{self.dataformat}Events")(self.provenance, self.events)
return getattr(plugin, f"{self.dataformat}Events")(self.provenance, self.events, self.plugins[name])


class DataType:
Expand Down
10 changes: 5 additions & 5 deletions bidscoin/bidseditor.py
Original file line number Diff line number Diff line change
Expand Up @@ -1281,7 +1281,7 @@ def fill_table(self, table: MyQTable, data: list):

elif header:
if i == 0: # The first/header row of the data has the column names
table.setHorizontalHeaderLabels(item.get('value') for item in row)
table.setHorizontalHeaderLabels(str(item.get('value')) for item in row)
continue
i -= 1 # Account for the header row

Expand Down Expand Up @@ -1310,7 +1310,7 @@ def fill_table(self, table: MyQTable, data: list):
myitem.setToolTip(get_entityhelp(key))
elif tablename == 'meta' and j == 0:
myitem.setToolTip(get_metahelp(key))
elif tablename == 'events_columns' and i == 1:
elif tablename == 'events_columns' and j == 1:
myitem.setToolTip(get_eventshelp(itemvalue))
table.setItem(i, j, myitem)

Expand Down Expand Up @@ -1423,18 +1423,18 @@ def run2data(self) -> tuple:
events_data['columns'] = [[{'value': 'input', 'editable': False}, {'value': 'output', 'editable': False}]]
for mapping in runitem.events.get('columns') or []:
for key, value in mapping.items():
events_data['columns'].append([{'value': value, 'editable': True}, {'value': key, 'editable': True}])
events_data['columns'].append([{'value': value, 'editable': True}, {'value': key, 'editable': key not in ('onset','duration')}])

# Set up the data for the events table
parser = runitem.eventsparser()
if parser:
df = parser.logtable
events_data['log_table'] = [[{'value': name, 'editable': False} for name in df.columns]]
events_data['log_table'] = [[{'value': name, 'editable': False} for name in df.columns]] if len(df) else []
for i in range(len(df)):
events_data['log_table'].append([{'value': value, 'editable': False} for value in df.iloc[i]])

df = parser.eventstable
events_data['table'] = [[{'value': name, 'editable': False} for name in df.columns]]
events_data['table'] = [[{'value': name, 'editable': False} for name in df.columns]] if len(df) else []
for i in range(len(df)):
events_data['table'].append([{'value': value, 'editable': False} for value in df.iloc[i]])
else:
Expand Down
3 changes: 2 additions & 1 deletion bidscoin/heuristics/bidsmap_dccn.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ Options:
meta: [.json, .tsv, .tsv.gz] # The file extensions of the equally named metadata sourcefiles that are copied over to the BIDS sidecar files
fallback: y # Appends unhandled dcm2niix suffixes to the `acq` label if 'y' (recommended, else the suffix data is discarded)
events2bids:
meta: [.json, .tsv, .tsv.gz]
table: event # The table that is used to generate the output table (https://www.neurobs.com/pres_docs/html/03_presentation/07_data_reporting/01_logfiles/index.html)
meta: [.json, .tsv]


DICOM:
Expand Down
47 changes: 33 additions & 14 deletions bidscoin/plugins/events2bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
LOGGER = logging.getLogger(__name__)

# The default/fallback options that are set when installing/using the plugin
OPTIONS = Plugin({'meta': ['.json', '.tsv']}) # The file extensions of the equally named metadata sourcefiles that are copied over as BIDS sidecar files
OPTIONS = Plugin({'table': 'event', 'meta': ['.json', '.tsv']}) # The file extensions of the equally named metadata sourcefiles that are copied over as BIDS sidecar files


def test(options: Plugin=OPTIONS) -> int:
Expand Down Expand Up @@ -228,28 +228,47 @@ def bidscoiner_plugin(session: Path, bidsmap: BidsMap, bidsses: Path) -> None:
class PresentationEvents(EventsParser):
"""Parser for Presentation (Neurobs) logfiles"""

def __init__(self, sourcefile: Path, _data):
def __init__(self, sourcefile: Path, _data: dict, options: dict):
"""
Reads the event table from the Presentation logfile
:param sourcefile: The full filepath of the logfile
:param data: The run['events'] data (from a bidsmap)
:param options: The plugin options
"""

super().__init__(sourcefile, _data)
super().__init__(sourcefile, _data, options)

# Read the event table from the Presentation logfile
self.sourcetable = df = pd.read_csv(self.sourcefile, sep='\t', skiprows=3, skip_blank_lines=True)
"""The Presentation event table (https://www.neurobs.com/pres_docs/html/03_presentation/07_data_reporting/01_logfiles/index.html)"""

# Drop the stimulus, video and survey tables
endoftable = df['Subject'].isin(['Event Type', 'filename', 'Time']).idxmax()
if endoftable:
LOGGER.bcdebug(f"Dropping sourcetable data at row: {endoftable}")
self.sourcetable = df.iloc[:endoftable]
# Read the log-tables from the Presentation logfile
self._sourcetable = pd.read_csv(self.sourcefile, sep='\t', skiprows=3, skip_blank_lines=True)
"""The Presentation log-tables (https://www.neurobs.com/pres_docs/html/03_presentation/07_data_reporting/01_logfiles/index.html)"""

@property
def logtable(self) -> pd.DataFrame:
"""Returns the source logging data"""
"""Returns a Presentation log-table"""

nrows = len(self._sourcetable)
stimulus_start = (self._sourcetable.iloc[:, 0] == 'Event Type').idxmax() or nrows
video_start = (self._sourcetable.iloc[:, 0] == 'filename').idxmax() or nrows
survey_start = (self._sourcetable.iloc[:, 0] == 'Time').idxmax() or nrows

# Drop the stimulus, video and survey tables
if self.options['table'] == 'event':
begin = 0
end = min(stimulus_start, video_start, survey_start)
elif self.options['table'] == 'stimulus':
self._sourcetable.columns = self._sourcetable.iloc[stimulus_start]
begin = stimulus_start + 1
end = min(video_start, survey_start)
elif self.options['table'] == 'video':
self._sourcetable.columns = self._sourcetable.iloc[video_start]
begin = video_start + 1
end = survey_start
else:
begin = 0
end = nrows
LOGGER.error(f"NOT IMPLEMENTED TABLE: {self.options['table']}")

LOGGER.bcdebug(f"Slicing '{self.options['table']}' sourcetable[{begin}:{end}]")

return self.sourcetable
return self._sourcetable.iloc[begin:end]

0 comments on commit 833f75d

Please sign in to comment.