diff --git a/.gitignore b/.gitignore index cc6934a..eeb908e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,6 @@ __pycache__* docs/.DS_Store dist/ openhdemg.egg-info/ - +openhdemg/.DS_Store prove.py prove_storage.py diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 6c6dec4..986bf5c 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -2,17 +2,11 @@ ## Our Pledge -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to make participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, sex characteristics, gender identity and expression, -level of experience, education, socio-economic status, nationality, personal -appearance, race, religion, or sexual identity and orientation. +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards -Examples of behavior that contributes to creating a positive environment -include: +Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences @@ -22,55 +16,32 @@ include: Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery and unwelcome sexual attention or - advances +* The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope -This Code of Conduct applies within all project spaces, and it also applies when -an individual is representing the project or its community in public spaces. -Examples of representing a project or community include using an official -project e-mail address, posting via an official social media account, or acting -as an appointed representative at an online or offline event. Representation of -a project may be further defined and clarified by project maintainers. +This Code of Conduct applies within all project spaces, and it also applies when an individual is representing the project or its community in public spaces. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at giacomo.valli@phd.unipd.it. All -complaints will be reviewed and investigated and will result in a response that -is deemed necessary and appropriate to the circumstances. The project team is -obligated to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at openhdemg@gmail.com. All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html +This Code of Conduct is adapted from the Contributor Covenant homepage, version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html [homepage]: https://www.contributor-covenant.org -For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq +For answers to common questions about this code of conduct, see https://www.contributor-covenant.org/faq diff --git a/README.md b/README.md index a8232b0..cc579f3 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,15 @@ # Welcome to openhdemg +

+ + PyPI + + PyPI + + + + +

+

@@ -14,7 +25,7 @@ *openhdemg* is an open-source framework written in Python 3 with many functionalities specifically designed for the analysis of High-Density Electromyography (HD-EMG) recordings. Some of its main features are listed below, but there is much more to discover! For a full list of available functions, please refer to the **API reference** section at [www.giacomovalli.com/openhdemg](https://www.giacomovalli.com/openhdemg/). -1. **Load** decomposed HD-EMG files from various sources, such as .mat and .csv files. This allows to interface *openhdemg* with the commonly used softwares like OTBioLab+ or DEMUSE and potentially with any other software. +1. **Load** decomposed HD-EMG files from various sources, such as .mat and .csv files. This allows to interface *openhdemg* with the commonly used softwares like OTBioLab+, DEMUSE, Delsys NeuroMap and potentially with any other software. 2. **Visualise** your EMG or force/reference signal, as well as the motor units' firing times and their action potentials shape. 3. **Edit** your file changing the reference signal offset, filtering noise, calculating differential derivations and removing unwanted motor units. 4. **Analyse** motor units' recruitment/derecruitment thresholds, discharge rate, conduction velocity, action potentials amplitude and more... diff --git a/docs/about-us.md b/docs/about-us.md index 2f437c2..96acd90 100644 --- a/docs/about-us.md +++ b/docs/about-us.md @@ -50,11 +50,11 @@ graph TB; Giacomo Valli: -- giacomo.valli@phd.unipd.it +- giacomo.valli@unibs.it -- The creator of the project and the developer of the library. +- The creator/maintainer of the project and developer of the library. -- Giacomo Valli obtained a master degree in Sports Science and a research fellowship in molecular biology of exercise at the University of Urbino (IT). He is currently a PhD student at the University of Padova (IT) in neuromuscular physiology. He is investigating the electrophysiological modifications happening during disuse, disease and aging and linking this information to the molecular alterations of the muscle. +- Giacomo Valli obtained a master degree in Sports Science and a research fellowship in molecular biology of exercise. He completed the PhD in neuromuscular physiology at the University of Padova (IT) and he is currently a PostDoc fellow at the University of Brescia (IT). His main focus is on investigating electrophysiological modifications that occur during periods of disuse, disease, and aging, and in linking this information to the molecular alterations of the muscle. Paul Ritsche: @@ -64,6 +64,15 @@ Paul Ritsche: - Paul Ritsche obtained a master degree in Sports Science at the University of Basel (CH). He is currently a research associate at the University of Basel (CH) focusing on muscle ultrasonography. He is investigating automatic ultrasonography image analysis methods to evaluate muscle morphological as well architectural parameters. + +Drew-James Beauchamp: + +- jbeaucha@andrew.cmu.edu + +- Developer of the library. + +- James (Drew) Beauchamp completed his doctoral studies in Engineering at Northwestern University, where his work focused on characterizing the deficits in human motor function that are introduced by neuromodulatory inputs to spinal motoneurons. He is interested in providing creative ways to decouple the structure of descending motor commands and is currently a post-doctoral researcher at Carnegie Mellon University. + ## Meet the contributors Francesco Negro: @@ -74,6 +83,14 @@ Francesco Negro: - Francesco Negro is a Full Professor at the Department of Clinical and Experimental Sciences at Universita’ degli Studi di Brescia (IT). His research interests include applied physiology of the human motor system, signal processing of intramuscular and surface electromyography, and modeling of spinal neural networks. +Gregory EP Pearcey: + +- gpearcey@northwestern.edu + +- Contribution:   :fontawesome-solid-brain: Knowledge sharing   :fontawesome-solid-file-code: Code sharing   :octicons-codescan-checkmark-24: Accuracy check + +- Gregory Pearcey is an Assistant Professor in the School of Human Kinetics and Recreation at Memorial University of Newfoundland and holds a cross-appointment in BioMedical Sciences (Faculty of Medicine, Memorial), as well as an Adjunct Faculty position in Physical Therapy & Human Movement Sciences (Northwestern University). He is interested in decoding the neural control of human movement via recording myoelectric signals from the surface and within human muscle with a goal of understanding and enhancing neuroplasticity and the recovery of motor function after neurological impairment. + Andrea Casolo: - andrea.casolo@unipd.it @@ -89,4 +106,3 @@ Giuseppe De Vito: - Contribution:   :fontawesome-solid-brain: Knowledge sharing - Giuseppe De Vito is a full Professor of Human Physiology in the Department of Biomedical Sciences at University of Padova (IT). He was, from 2007 until 2019, Professor and Dean in the School of Public Health, Physiotherapy & Sports Science at University College Dublin (IE) (Head of School between 2014 and 2019). Giuseppe does research in Human and Exercise Physiology. - diff --git a/docs/api_analysis.md b/docs/api_analysis.md index 76a82e9..efe84d9 100644 --- a/docs/api_analysis.md +++ b/docs/api_analysis.md @@ -1,7 +1,6 @@ Description ----------- -This module contains all the functions used to analyse the MUs properties when -not involving the MUs action potential shape. +This module contains all the functions used to analyse the MUs properties when not involving the MUs action potential shape.
diff --git a/docs/api_compatibility.md b/docs/api_compatibility.md new file mode 100644 index 0000000..13dcde7 --- /dev/null +++ b/docs/api_compatibility.md @@ -0,0 +1,14 @@ +Description +----------- +This module contains the functions used to ensure the compatibility of file formats saved with previous versions of *openhdemg* with the most recent versions of the library. + +
+ +::: openhdemg.compatibility.conversions.convert_json_output + options: + show_root_full_path: False + show_root_heading: True + merge_init_into_class: True + show_if_no_docstring: False + +
\ No newline at end of file diff --git a/docs/api_muap.md b/docs/api_muap.md index cfa3fae..f8c25c7 100644 --- a/docs/api_muap.md +++ b/docs/api_muap.md @@ -19,6 +19,13 @@ This module contains functions to produce and analyse MUs anction potentials
+::: openhdemg.library.muap.extract_delsys_muaps + options: + show_root_full_path: False + show_root_heading: True + +
+ ::: openhdemg.library.muap.sta options: show_root_full_path: False diff --git a/docs/api_openfiles.md b/docs/api_openfiles.md index a7a6788..dec6bc7 100644 --- a/docs/api_openfiles.md +++ b/docs/api_openfiles.md @@ -1,6 +1,6 @@ Description ----------- -This module contains all the functions that are necessary to open or save MATLAB (.mat), JSON (.json) or custom (.csv) files. .mat files are currently used to store data from the DEMUSE and the OTBiolab+ software, while .csv files are used to store custom data. Instead, .json files are used to save and load files from this library.
+This module contains all the functions that are necessary to open or save MATLAB (.mat), text (.txt), JSON (.json) or custom (.csv) files. MATLAB files are used to store data from the DEMUSE, OTBiolab+ and Delsys software while JSON files are used to save and load files from this library.
The choice of saving files in the open standard JSON file format was preferred over the MATLAB file format since it has a better integration with Python and has a very high cross-platform compatibility. Function's scope @@ -9,10 +9,12 @@ Function's scope Used to load the sample file provided with the library. - **emg_from_otb** and **emg_from_demuse**:
Used to load .mat files coming from the DEMUSE or the OTBiolab+ software. Demuse has a fixed file structure while the OTB file, in order to be compatible with this library should be exported with a strict structure as described in the function emg_from_otb. In both cases, the input file is a .mat file. +- **emg_from_delsys**:
+ Used to load a combination of .mat and .txt files exported by the Delsys Neuromap and Neuromap explorer software containing the raw EMG signal and the decomposition outcome. - **emg_from_customcsv**:
Used to load custom file formats contained in .csv files. -- **refsig_from_otb** and **refsig_from_customcsv**:
- Used to load files from the OTBiolab+ software or from a custom .csv file that contain only the REF_SIGNAL. +- **refsig_from_otb**, **refsig_from_delsys** and **refsig_from_customcsv**:
+ Used to load files from the OTBiolab+ (.mat) and the Delsys Neuromap software (.mat) or from a custom .csv file that contain only the reference signal. - **save_json_emgfile**, **emg_from_json**:
Used to save the working file to a .json file or to load the .json file. - **askopenfile**, **asksavefile**:
@@ -20,14 +22,14 @@ Function's scope Notes ----- -Once opened, the file is returned as a dictionary with keys:
+Once opened, the file is returned as a dictionary with key-value pairs:
-"SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB")
+"SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB", "DELSYS")
"FILENAME" : the name of the opened file
"RAW_SIGNAL" : the raw EMG signal
"REF_SIGNAL" : the reference signal
"ACCURACY" : accuracy score (depending on source file type)
-"IPTS" : pulse train (decomposed source)
+"IPTS" : pulse train (decomposed source, depending on source file type)
"MUPULSES" : instants of firing
"FSAMP" : sampling frequency
"IED" : interelectrode distance
@@ -38,7 +40,7 @@ Once opened, the file is returned as a dictionary with keys:
The only exception is when files are loaded with just the reference signal: -"SOURCE" : source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG")
+"SOURCE" : source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG", "DELSYS_REFSIG")
"FILENAME" : the name of the opened file
"FSAMP" : sampling frequency
"REF_SIGNAL" : the reference signal
@@ -66,14 +68,14 @@ Furthermore, all the users are encouraged to read the dedicated tutorial [Struct
-::: openhdemg.library.openfiles.refsig_from_otb +::: openhdemg.library.openfiles.emg_from_demuse options: show_root_full_path: False show_root_heading: True
-::: openhdemg.library.openfiles.emg_from_demuse +::: openhdemg.library.openfiles.emg_from_delsys options: show_root_full_path: False show_root_heading: True @@ -87,6 +89,20 @@ Furthermore, all the users are encouraged to read the dedicated tutorial [Struct
+::: openhdemg.library.openfiles.refsig_from_otb + options: + show_root_full_path: False + show_root_heading: True + +
+ +::: openhdemg.library.openfiles.refsig_from_delsys + options: + show_root_full_path: False + show_root_heading: True + +
+ ::: openhdemg.library.openfiles.refsig_from_customcsv options: show_root_full_path: False diff --git a/docs/cite-us.md b/docs/cite-us.md index 1fae751..5416566 100644 --- a/docs/cite-us.md +++ b/docs/cite-us.md @@ -1,3 +1,5 @@ -If you use *openhdemg* for your reaserch, please cite the project as: +If you use *openhdemg* for your reaserch, please cite our [tutorial article](/isek_jek_tutorials#jek-tutorial-article). Any citation will help us to continue our work. -*We are working to publish soon..* \ No newline at end of file +Cite us as: + +*Valli G, Ritsche P, Casolo A, Negro F, De Vito G.* **Tutorial: analysis of central and peripheral motor unit properties from decomposed High-Density surface EMG signals with openhdemg.** Journal of Electromyography and Kinesiology, 2023, 102850, ISSN 1050-6411, [https://doi.org/10.1016/j.jelekin.2023.102850](https://doi.org/10.1016/j.jelekin.2023.102850){:target="_blank"}. \ No newline at end of file diff --git a/docs/gui_advanced.md b/docs/gui_advanced.md index 286d27d..4c84d3d 100644 --- a/docs/gui_advanced.md +++ b/docs/gui_advanced.md @@ -2,6 +2,8 @@ This is the toturial for the `Advanced Tools` in the *openhdemg* GUI. Great that you made it this far! In the next few sections we will take a look at the more advanced functions implemented in the GUI. But first of all, you need to click the `Advanced Tools`button in the main window of the GUI to get to the respective adavanced analysis. The `Advanced Tools Window` will open. +Please note, the `Advanced Tools` might not be available for all the files, as some of them might not have a sufficient number of electrodes to directly perform the advanced analyses. If you want to use the advanced tools anyway, you can still do so from the library. + ![advanced_analysis](md_graphics/gui/advanced_analysis_window.png) So far, we have included three advanced analyses in the *openhdemg* GUI. diff --git a/docs/gui_basics.md b/docs/gui_basics.md index ac9625f..ad6ed17 100644 --- a/docs/gui_basics.md +++ b/docs/gui_basics.md @@ -29,7 +29,9 @@ To remove MUs included in your analysis file, you can click the `Remove MUs` but ``` will result in the second MU to be deleted (Python is base 0). -4. Click the `Remove MU` button to remove the MU. +4. Click the `Remove MU` button to remove the MU. + +Alternatively, you can click the `Remove empty MUs` button to delete all the MUs without discharge times. These can be present in the emgfile as the result of decomposed duplicates that have not been fully removed. ## Reference Signal Editing The *openhdemg* GUI also allows you to edit and filter reference signals corresponding to your analysis file (this can be either a file containing both the MUs and the reference signal or a file containing only the reference signal). diff --git a/docs/gui_intro.md b/docs/gui_intro.md index 0542f12..83467cd 100644 --- a/docs/gui_intro.md +++ b/docs/gui_intro.md @@ -24,10 +24,12 @@ This is your starting point for every analysis. On the left hand side you can fi 1. In order to load file into the GUI, you first need to select something in the **Type of file** dropdown box at the top left corner. The available filetypes are: - - `OTB` (.mat file exportable by OTBiolab+) + - `OPENHDEMG` (emgfile or reference signal stored in .json format) - `DEMUSE` (.mat file used in DEMUSE) + - `OTB` (.mat file exportable by OTBiolab+) - `OTB_REFSIG` (Reference signal in the .mat file exportable by OTBiolab+) - - `OPENHDEMG` (emgfile or reference signal stored in .json format) + - `DELSYS` (.mat and .txt files exportable by Delsys software) + - `DELSYS_REFSIG` (.mat file exportable by Delsys software) - `CUSTOMCSV` (custom data from a .csv file) - `CUSTOMCSV_REFSIG` (Reference signal in a custom .csv file) diff --git a/docs/index.md b/docs/index.md index 2f6171e..f9fbb8c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,4 +1,15 @@ # Welcome to openhdemg +

+ + PyPI + + PyPI + + + + +

+
![banner logo](md_graphics/index/banner_logo.png) @@ -11,7 +22,7 @@ *openhdemg* is an open-source framework written in Python 3 with many functionalities specifically designed for the analysis of High-Density Electromyography (HD-EMG) recordings. Some of its main features are listed below, but there is much more to discover! For a full list of available functions, please refer to the **API reference** section. -1. **Load** decomposed HD-EMG files from various sources, such as .mat and .csv files. This allows to interface *openhdemg* with the commonly used softwares like OTBioLab+ or DEMUSE and potentially with any other software. +1. **Load** decomposed HD-EMG files from various sources, such as .mat and .csv files. This allows to interface *openhdemg* with the commonly used softwares like OTBioLab+, DEMUSE, Delsys NeuroMap and potentially with any other software. 2. **Visualise** your EMG or force/reference signal, as well as the motor units' firing times and their action potentials shape. 3. **Edit** your file changing the reference signal offset, filtering noise, calculating differential derivations and removing unwanted motor units. 4. **Analyse** motor units' recruitment/derecruitment thresholds, discharge rate, conduction velocity, action potentials amplitude and more... diff --git a/docs/isek_jek_tutorials.md b/docs/isek_jek_tutorials.md new file mode 100644 index 0000000..ea17874 --- /dev/null +++ b/docs/isek_jek_tutorials.md @@ -0,0 +1,40 @@ +# ISEK-JEK Tutorials + +Welcome to the page dedicated to our dissemination activity with the International Society of Electrophysiology and Kinesiology (ISEK) and the Journal of Electromyography and Kinesiology (JEK). + +
+ +## JEK Tutorial article + +**Tutorial: analysis of central and peripheral motor unit properties from decomposed High-Density surface EMG signals with *openhdemg*.** + +*[https://doi.org/10.1016/j.jelekin.2023.102850](https://doi.org/10.1016/j.jelekin.2023.102850){:target="_blank"}* + +Here you can find the manuscript, published open access and freely available to anybody, along with all the up-to-date material necessary to follow the tutorial article using *openhdemg*. + +You can download the sample files and the sample scripts [here](https://drive.google.com/drive/folders/1lxXSVTDg7eOntkmapIbGQwAvxrQY96oM?usp=sharing){:target="_blank"}. We hope you find this work useful. If that's the case, please cite it in your research; it will help us continue the development of *openhdemg*. + +[Dowload files   :fontawesome-solid-download:](https://drive.google.com/drive/folders/1lxXSVTDg7eOntkmapIbGQwAvxrQY96oM?usp=sharing){:target="_blank", .md-button .md-button--primary } + +
+ + + +
+ +## 2023 ISEK Workshop + +**Workshop: Simplified analysis of motor unit properties with *openhdemg*.** + +On the 26th of June 2024, we will present the *openhdemg* framework at the ISEK congress, which will be held in Nagoya, Japan. + +Below, you can find a preliminary design of the workshop and the fantastic speakers who will participate. We look forward to seeing you there! + +More info will come soon. + +
+ +*Click on the image for full-screen mode* + +[![preliminary_workshop_design](md_graphics/isek_jek_tutorials/preliminary_workshop_design.png)](md_graphics/isek_jek_tutorials/preliminary_workshop_design.png){:target="_blank"} + diff --git a/docs/md_graphics/gui/remove_mu_window.png b/docs/md_graphics/gui/remove_mu_window.png index fe79a04..a1b0f2a 100644 Binary files a/docs/md_graphics/gui/remove_mu_window.png and b/docs/md_graphics/gui/remove_mu_window.png differ diff --git a/docs/md_graphics/isek_jek_tutorials/preliminary_workshop_design.png b/docs/md_graphics/isek_jek_tutorials/preliminary_workshop_design.png new file mode 100644 index 0000000..d3ca100 Binary files /dev/null and b/docs/md_graphics/isek_jek_tutorials/preliminary_workshop_design.png differ diff --git a/docs/online_pdfs/pre-proof_valli_et_al_2023_jek.pdf b/docs/online_pdfs/pre-proof_valli_et_al_2023_jek.pdf new file mode 100644 index 0000000..2b3595f Binary files /dev/null and b/docs/online_pdfs/pre-proof_valli_et_al_2023_jek.pdf differ diff --git a/docs/overrides/custom.css b/docs/overrides/custom.css new file mode 100644 index 0000000..e69de29 diff --git a/docs/overrides/main.html b/docs/overrides/main.html new file mode 100644 index 0000000..c88118d --- /dev/null +++ b/docs/overrides/main.html @@ -0,0 +1,15 @@ +{% extends "base.html" %} + + + +{% block announce %} +
+
+

🎉🎉 Exciting News!

+

We are thrilled to announce the publication of our Tutorial article!

+ Read it Now +
+
+{% endblock %} \ No newline at end of file diff --git a/docs/tutorials/convert_old_json_files.md b/docs/tutorials/convert_old_json_files.md new file mode 100644 index 0000000..7c69d6e --- /dev/null +++ b/docs/tutorials/convert_old_json_files.md @@ -0,0 +1,61 @@ +As *openhdemg* evolves and introduces new features, migrating to the latest version becomes essential for leveraging optimized functions and improved capabilities. However, it is not always possible to implement new or optimized functionalities without altering the utilized data structure. + +When the data structure is changed, the users of previous versions of *openhdemg* will not be able to access the newer functionalities with the files *they* saved in the older data structure. To overcome this limitation, we created the `convert_json_output` class which facilitates a seamless transition by converting older .json files to the format compatible with the latest *openhdemg* version. + +This tutorial guides you through the process, ensuring a smooth upgrade while maintaining data integrity. + +Why should you convert your files to the newer *openhdemg* versions? + +- Optimized Functionality: Newer *openhdemg* versions come with optimized functions, enhancing performance and providing a more efficient user experience. +- Compatibility: Ensure your data remains compatible with the latest features and improvements introduced in *openhdemg*. + +## From 0.1.0-b2 to 0.1.0-b3 + +The *openhdemg* version 0.1.0-beta.3 introduced noticeable [changes and improvements](../what's-new.md#010-beta3), particularly regarding the speed of saving and loading of .json files. Furthermore, these files are efficiently compressed, so that they occupy less space in your storage. However, to achieve this goal, it was necessary to optimise the default data structure used by *openhdemg* and, as a consequence, the newer *openhdemg* version is not compatible with the files saved from previous *openhdemg* versions. + +In this section of the tutorial we explain how to easily convert the files you saved from *openhdemg* version 0.1.0-beta.2 to make them compatible with *openhdemg* version 0.1.0-beta.3. + +The class necessary to perform this conversion is stored in the [conversions module](../api_compatibility.md) inside the compatibility subpackage and can be imported as + +```Python +# Import the necessary libraries +from openhdemg.compatibility import convert_json_output +``` + +With this class, we can select different methods for converting our files. + +Let's start from the easiest one. Indeed, we can convert one file (or perform a batch conversion of multiple files) with a simple graphical user interface (GUI). If you want to convert one file, select the desired file, if you want to convert more files, select more of them. + +```Python +# Import the necessary libraries +from openhdemg.compatibility import convert_json_output + +# Convert file/s appending "converted" to the name of the converted file. +convert_json_output(gui=True, append_name="converted") +``` + +Alternatively, you can perform the tasks without GUI. In the following example you will convert all the files in a folder and save them in the same location with a different name. + +```Python +# Import the necessary libraries +from openhdemg.compatibility import convert_json_output + +# Specify the path to the folder where the original files are and where the +# converted ones should be. +old = "C:/Users/.../test conversions/" +new = "C:/Users/.../test conversions/" + +# Convert them +convert_json_output( + old=old, + new=new, + append_name="converted", + gui=False, +) +``` + +For more options, please refer to the documentation of the [convert_json_output](../api_compatibility.md#openhdemg.compatibility.conversions.convert_json_output) class. + +## More questions? + +We hope that this tutorial was useful. If you need any additional information, do not hesitate to read the answers or ask a question in the [*openhdemg* discussion section](https://github.com/GiacomoValliPhD/openhdemg/discussions){:target="_blank"}. If you are not familiar with GitHub discussions, please read this [post](https://github.com/GiacomoValliPhD/openhdemg/discussions/42){:target="_blank"}. This will allow the *openhdemg* community to answer your questions. \ No newline at end of file diff --git a/docs/tutorials/emgfile_structure.md b/docs/tutorials/emgfile_structure.md index c1389c8..392b4a9 100644 --- a/docs/tutorials/emgfile_structure.md +++ b/docs/tutorials/emgfile_structure.md @@ -36,13 +36,14 @@ print(type(emgfile)) print(emgfile.keys()) """Output -dict_keys(['SOURCE', 'FILENAME', 'RAW_SIGNAL', 'REF_SIGNAL', 'ACCURACY', 'IPTS', 'MUPULSES', 'FSAMP', 'IED', 'EMG_LENGTH', 'NUMBER_OF_MUS', 'BINARY_MUS_FIRING']) +dict_keys(['SOURCE', 'FILENAME', 'RAW_SIGNAL', 'REF_SIGNAL', 'ACCURACY', 'IPTS', 'MUPULSES', 'FSAMP', 'IED', 'EMG_LENGTH', 'NUMBER_OF_MUS', 'BINARY_MUS_FIRING', 'EXTRAS']) """ ``` That means that the `emgfile` contains the following keys (or variables, in simpler terms): -- "SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB") +- "SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB", "DELSYS") +- "FILENAME" : the name of the original file - "RAW_SIGNAL" : the raw EMG signal - "REF_SIGNAL" : the reference signal - "ACCURACY" : accuracy score (depending on source file type) @@ -207,7 +208,7 @@ At the moment, the only alternative to the basic `emgfile` structure is reserved In this case, the `emg_refsig` is a Python dictionary with the following keys: -- "SOURCE": source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG") +- "SOURCE": source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG", "DELSYS_REFSIG") - "FSAMP": sampling frequency - "REF_SIGNAL": the reference signal - "EXTRAS" : additional custom values @@ -235,7 +236,7 @@ emgfile = emg.emg_from_samplefile() print(emgfile.keys()) """Output -dict_keys(['SOURCE', 'FILENAME', 'RAW_SIGNAL', 'REF_SIGNAL', 'ACCURACY', 'IPTS', 'MUPULSES', 'FSAMP', 'IED', 'EMG_LENGTH', 'NUMBER_OF_MUS', 'BINARY_MUS_FIRING']) +dict_keys(['SOURCE', 'FILENAME', 'RAW_SIGNAL', 'REF_SIGNAL', 'ACCURACY', 'IPTS', 'MUPULSES', 'FSAMP', 'IED', 'EMG_LENGTH', 'NUMBER_OF_MUS', 'BINARY_MUS_FIRING', 'EXTRAS']) """ # Visualise the original data structure contained in the 'REF_SIGNAL' key diff --git a/docs/tutorials/import_from_other_software.md b/docs/tutorials/import_from_other_software.md new file mode 100644 index 0000000..17fd8a6 --- /dev/null +++ b/docs/tutorials/import_from_other_software.md @@ -0,0 +1,180 @@ +*openhdemg* is designed to seamlessly interface with various software applications, enabling a smooth integration of data and workflows. + +In this article, we will explore how you can import EMG data from other software into *openhdemg*. + +The *openhdemg* team is committed to simplifying the data import process. Through continuous efforts, they regularly introduce new functions designed to automatically load files exported from third-party software. These efforts prioritize compatibility with the most commonly used software applications; however, it's important to note that the provided functions may not always fulfill every user's unique needs. If you cannot find a solution that fits your needs, you can always implement your own function to load any file with the *openhdemg* data structure. More info about the *openhdemg* data structure can be found in the tutorial [Structure of the emgfile](emgfile_structure.md). + +## From .csv files + +Since it is not possible to predict any data format, we created dedicated functions to load any possible dataset from .csv files. Several considerations guided us in choosing the .csv format for loading custom files: + +- Ubiquitous Data Structure: .csv (Comma-Separated Values) stands out as one of the most widely used data structures in data analyses. + +- Universal Export Compatibility: It can be exported from virtually any software, making it a versatile choice for compatibility with various data sources. + +- Language and Software Agnosticism: .csv files can be read by any programming language, ensuring flexibility and ease of integration across diverse software environments, including popular tools like Microsoft Excel. + +- Structured Layout: With its clear organization into columns and rows, .csv simplifies the storage of labeled information. This structured format not only enhances data interpretation but also facilitates seamless integration into analytical workflows. + +Therefore, anybody developing scripts and algorithms for data analysis is familar with the .csv format. If you are not, just think that any table that you usually populate in Microsoft Excel, LibreOffice Calc, Google Sheets and many other tools, can be easily exported in .csv format. + +*openhdemg* currently has two functions dedicated to loading data from .csv files. Please refer to the [API](../api_openfiles.md) of the specific function to see what data your .csv file should contain to be opened in *openhdemg*: + +- [emg_from_customcsv()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_customcsv): This function is used to import the decomposition outcome from a custom .csv file. +- [refsig_from_customcsv()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_customcsv): This function is used to import the reference signal from a custom .csv file. + +### Export the decomposition outcome + +The decomposition outcome can differ between decomposition algorithms and, therefore, the function [emg_from_customcsv()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_customcsv) is quite flexible, so that it can adapt to different user needs. + +However, some variables must always be present in the exported file. These include: + +- The raw EMG signal. +- At least one of "MUPULSES" (instants of firing) or "BINARY_MUS_FIRING" (binary representation of MUs firings). + +If "MUPULSES" is absent, it will be calculated from "BINARY_MUS_FIRING" and viceversa. + +Other default variables that can be exported include: + +- The reference signal. +- The decomposed source. + +All these variables should be stored in different columns because the import function detects the content of the .csv by parsing the .csv columns. When assigning a name to the columns, you can decide to simply use the *openhdemg* standard names or custom ones, as follows: + +**Using the *openhdemg* standard names** + +If you use *openhdemg* standard names for column labels, you won't need to specify these in the import function. Standard names include: + +- Reference Signal: Label the column containing the reference signal "REF_SIGNAL". +- Raw EMG Signal: Label the columns containing the raw EMG signal with "RAW_SIGNAL" + channel number. For example, "RAW_SIGNAL_0", "RAW_SIGNAL_1", "RAW_SIGNAL_2", "RAW_SIGNAL_n-1". +- Pulse Train (decomposed source): Label the column(s) containing the decomposed source with "IPTS" + MU number. For example, "IPTS_0", "IPTS_1", "IPTS_2", "IPTS_n-1". +- Times of Firing (mupulses): Label the column(s) containing the times of firing with "MUPULSES' + MU number. For example, "MUPULSES_0", "MUPULSES_1", "MUPULSES_2", "MUPULSES_n-1". +- Binary MUs Firing: Label the column(s) containing the binary representation of the MUs firings with "BINARY_MUS_FIRING" + MU number. For example, "BINARY_MUS_FIRING_0", "BINARY_MUS_FIRING_1", "BINARY_MUS_FIRING_2", "BINARY_MUS_FIRING_n-1". +- Accuracy Score: Label the column(s) containing the accuracy score of the MUs firings with "ACCURACY" + MU number. For example, "ACCURACY_0", "ACCURACY_1", "ACCURACY_2", "ACCURACY_n-1". + +Interestingly, this function allows also to import additional signals compared to what you saw before. Therefore, you could also export other signals together with what previously mentioned. Please note that files saved with "extra" information can only be loaded from the library (the use of "EXTRAS" is not supported in the graphical user interface) Please read the [specific API](../api_openfiles.md#openhdemg.library.openfiles.emg_from_customcsv) for more information. + +- Extras: Label the column containing custom values with "EXTRAS" or, if more columns are needed, with "EXTRAS" + n. For example, "EXTRAS_0", "EXTRAS_1", "EXTRAS_2", "EXTRAS_n-1". + +If some of the variables are not present, simply don't specify them in your .csv file. + +Once you structured your table in Excel or any other tool, you can easily save it as type: "CSV (Comma delimited)", and that's all it takes. + +**Using custom names** + +Obviously, you have the flexibility to decide any name you want for your columns. However, in this case, you should specify the label you used to describe each variable when calling the function [emg_from_customcsv()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_customcsv). + +Please remember that different representations of the same variable (e.g., the EMG signal from different channels or the accuracy of different MUs) must have the same basic label. For example, the column(s) containing the times of firing should be labeled with "MYLABEL' + MU number (i.e., "MYLABEL_0", "MYLABEL_1", "MYLABEL_2", "MYLABEL_n-1"). + +Although custom column labels can be used, the information that can be exported is the same described above in "**Using the *openhdemg* standard names**". + +If some of the possible variables are not present, simply don't specify them in your .csv file. + +Once you structured your table in Excel or any other tool, you can easily save it as type: "CSV (Comma delimited)", and that's all it takes. + +### Export the reference signal + +In some cases, like for example for MVC trials, you might want to export only the reference signal. Exporting the reference signal is as simple as exporting a table with only one column. Also in this case, you can decide to label the column containing the refere signal with *openhdemg* standard names or with custom names. Please refer to the previous section for further details. + +Also the function [refsig_from_customcsv()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_customcsv) allows to import additional signals. Therefore, you could also export other signals together withthe reference signal. Please note that files saved with "extra" information can only be loaded from the library (the use of "EXTRAS" is not supported in the graphical user interface) and the parameter "extras=" must be specified when calling the function. Please read the [specific API](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_customcsv) for more information and read the previous section of this tutorial. + +Once you structured your table in Excel or any other tool, you can easily save it as type: "CSV (Comma delimited)", and that's all it takes. + +## From DEMUSE + +For years, DEMUSE has been the only commercially available tool for MUs decomposition non related to a specific acquisition device. DEMUSE is essentially a MATLAB-based application and it allows to export files in .mat format. Also in this case, it is quite easy to save files, as the only thing the user needs to do after decomposition and manual editing of the spike train, is to click: "save results". + +Once the results are saved, these can be loaded in *openhdemg* with the function: + +- [emg_from_demuse()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_demuse): This function is used to import the decomposition outcome saved from the DEMUSE tool. + +## From OTBioLab+ + +OTBioLab+ is the software used to record EMG signals from the OTB "Quattrocento", "Sessantaquattro+" and other devices. However, it also allows to perform MUs decomposition and editing. Once the decomposition is done, the user can export the decomposition outcome in different file formats. Among them, the option to export .mat files is the one that provides the most convenient and easy way to export all the needed information. + +*openhdemg* currently has two functions dedicated to loading data from .mat files exported from OTBioLab+. Please refer to the [API](../api_openfiles.md) of the specific function for more information: + +- [emg_from_otb()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_otb): This function is used to import the decomposition outcome from a .mat file exported from the OTBioLab+ software. +- [refsig_from_otb()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_otb): This function is used to import the reference signal from a .mat file exported from the OTBioLab+ software. + +Depending on whether you want to export all the decomposition outcome or only the reference signal, follow these steps: + +### Export the decomposition outcome + +Once the decomposition and manual editing is completed, the software saves the decomposition outcome in a new tab. The decomposition outcome includes: + +- Binary firings: the binary representation of the MUs discharge times. This is usually named "Decomposition of ...". +- Decomposed source: the source signal used to detect the discharge times. This is usually named "Source for decomposition ...". + +Before exporting the decomposition outcome, copy in the new tab containing the decomposition outcome also: + +- The EMG signal with all channels, without exception. +- The reference signal. This is usually named "acquired data". + +At this point: + +1. Select all the signals in the new tab (or a portion of them if that's all you need). +2. Click on "Export". +3. Click on "As .mat file". +4. During the export process, you will be asked if you want to save the signal in different files. Click "No". + +Now that the .mat file has been created, it can be easily loaded in *openhdemg* with the function [emg_from_otb()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_otb). + +Interestingly, this function allows also to import additional signals compared to what you saw before. Therefore, you could also export other signals together with what previously mentioned. Please note that files saved with "extra" information can only be loaded from the library (the use of "EXTRAS" is not supported in the graphical user interface) and the parameter "extras=" must be specified when calling the function. Please read the [specific API](../api_openfiles.md#openhdemg.library.openfiles.emg_from_otb) for more information. + +### Export the reference signal + +In some cases, like for example for MVC trials, you might want to export only the reference signal. This can be simply done by: + +1. Select the reference signals, usually named "acquired data" (or a portion of it if that's all you need). +2. Click on "Export". +3. Click on "As .mat file". + +Now that the .mat file has been created, it can be easily loaded in *openhdemg* with the function [refsig_from_otb()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_otb). + +Interestingly, this function allows also to import additional signals. Therefore, you could also export other signals together withthe reference signal. Please note that files saved with "extra" information can only be loaded from the library (the use of "EXTRAS" is not supported in the graphical user interface) and the parameter "extras=" must be specified when calling the function. Please read the [specific API](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_otb) for more information. + +## From Delsys + +!!! Please note, the following two functions underwent limited testing. If they don't work, [report to us](../contacts.md) the issue and we will try to make you load your files !!! + +Delsys has a number of software that can be used to record EMG signals from their acquisition systems. Additionally, some of them also allow to perform automatic MUs decomposition and some analyses. + +*openhdemg* currently has two functions dedicated to loading data from Delsys software. Please refer to the [API](../api_openfiles.md) of the specific function for more information: + +- [emg_from_delsys()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_delsys): This function is used to import the decomposition outcome from the EMGworks and NeuroMap software. +- [refsig_from_delsys()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_delsys): This function is used to import the reference signal from the EMGworks software. + +Depending on whether you want to export all the decomposition outcome or only the reference signal, follow these steps: + +### Export the decomposition outcome + +For the raw EMG signal: + +- Collect the data in EMGworks Analysis with the correct sensors settings to allow for decomposition in the NeuroMap software. (Correct sensor settings will be indicated by the NeuroMap symbol next to compatible options in the sampling rate, range, and bandwidth settings). +- The data will save as a .hpf file. +- Open the Delsys File Utility and convert the .hpf file to a .mat file. +- Ensure that you still have access to the original .hpf file for the decomposition process. + +For the decomposition outcome: + +- Open the NeuroMap software. +- Import the .hpf file that was collected previously. +- Press decompose. +- This will produce a .dhpf file. +- Open the .dhpf file in NeuroMap Explorer. +- Export the files as .txt from the export options. + +At this point, you will have a .mat file containing the raw EMG signal and the reference signal, and a folder containing different .txt files. The .txt files contain the decomposition outcome, the MUAPs and some MUs statistics, including their accuracy score. Please, do not rename the .txt files or, if you rename them, do not alter the ending identifier (e.g., _MUAPs). + +Now that the .mat and .txt files have been created, they can be easily loaded in *openhdemg* with the function [emg_from_delsys()](../api_openfiles.md#openhdemg.library.openfiles.emg_from_delsys). + +Interestingly, this function allows also to import the MUAPs computed by Deslys during the decomposition. The computed MUAPs will be stored under the "EXTRAS" key and will be easily accessible with the function [extract_delsys_muaps()](../api_muap.md#openhdemg.library.muap.extract_delsys_muaps). + +### Export the reference signal + +You can export the reference signal in a .mat file from the NeuroMap software by following the steps proposed in the previous section. Once the .mat file has been created, it can be easily loaded in *openhdemg* with the function [refsig_from_delsys()](../api_openfiles.md#openhdemg.library.openfiles.refsig_from_delsys). + +## More questions? + +We hope that this tutorial was useful. If you need any additional information, do not hesitate to read the answers or ask a question in the [*openhdemg* discussion section](https://github.com/GiacomoValliPhD/openhdemg/discussions){:target="_blank"}. If you are not familiar with GitHub discussions, please read this [post](https://github.com/GiacomoValliPhD/openhdemg/discussions/42){:target="_blank"}. This will allow the *openhdemg* community to answer your questions. \ No newline at end of file diff --git a/docs/tutorials/setup_working_env.md b/docs/tutorials/setup_working_env.md index 2541353..d344c31 100644 --- a/docs/tutorials/setup_working_env.md +++ b/docs/tutorials/setup_working_env.md @@ -26,8 +26,8 @@ To install Python 3 on Windows: 1. Visit the official Python website: [https://www.python.org/downloads/](https://www.python.org/downloads/){:target="_blank"} 2. Click on the "Downloads" tab. -3. Scroll down to the section titled "Python Releases for Windows." -4. Click on the "Download" button for the latest version of Python (e.g., Python 3.11.4). +3. Scroll down to the section titled "Windows". +4. Click on "Windows installer" under the latest compatible version of Python (i.e., Python 3.11.6). *openhdemg* is currently working with Python 3.11 or earlier versions (tested from Python 3.8.x to 3.11.x). 5. Run the downloaded installer. 6. On the installer, make sure to check the box "Add Python to PATH" and then click "Install Now." 7. Python will be installed to your system (optionally, you can verify the installation by opening a command prompt and typing python --version). @@ -47,7 +47,7 @@ To install Python 3 on Mac: 1. Visit the official Python website: [https://www.python.org/downloads/](https://www.python.org/downloads/){:target="_blank"} 2. Click on the "Downloads" tab. 3. Scroll down to the section titled "Python Releases for macOS." -4. Click on the "Download" button for the latest version of Python (e.g., Python 3.11.4). +4. Click on "macOS 64-bit universal2 installer" under the latest compatible version of Python (i.e., Python 3.11.6). *openhdemg* is currently working with Python 3.11 or earlier versions (tested from Python 3.8.x to 3.11.x). 5. Run the downloaded installer package. 6. Follow the instructions on the installer to complete the installation. 7. Python will be installed to your system. (optionally, you can verify the installation by opening a terminal and typing python3 --version). @@ -187,6 +187,18 @@ Set-ExecutionPolicy Unrestricted -Force This should solve the issue and you should now be able to activate your Virtual environment. For additional information on this topic visit the [stackoverflow thread](https://stackoverflow.com/questions/18713086/virtualenv-wont-activate-on-windows){:target="_blank"}. +### *openhdemg* Installation Issues + +Windows: + +If trying to install *openhdemg* via pip you see this (or a similar) message in the terminal: + +``` +ImportError: DLL load failed while importing _cext: The specified module could not be found. +``` + +the problem might be that you do not have the necessary Visual C++ Redistributable. This can be simply solved by visitng the [Microsoft website](https://learn.microsoft.com/en-US/cpp/windows/latest-supported-vc-redist?view=msvc-170){:target="_blank"} and dowloading and installing the latest Visual Studio 2015, 2017, 2019 and 2022 redistributable. Please note, this is a single redistributable, you don't need to perform multiple dowloads. This should solve the issue and you should now be able to pip install *openhdemg*. If that's not the case, continue reading. + ## More questions? We hope that this tutorial was useful. If you need any additional information, do not hesitate to read the answers or ask a question in the [*openhdemg* discussion section](https://github.com/GiacomoValliPhD/openhdemg/discussions){:target="_blank"}. If you are not familiar with GitHub discussions, please read this [post](https://github.com/GiacomoValliPhD/openhdemg/discussions/42){:target="_blank"}. This will allow the *openhdemg* community to answer your questions. diff --git a/docs/what's-new.md b/docs/what's-new.md index 4e321d9..582c6fa 100644 --- a/docs/what's-new.md +++ b/docs/what's-new.md @@ -1,9 +1,63 @@ +## :octicons-tag-24: 0.1.0-beta.3 +:octicons-clock-24: November 2023 + +This release is focused on expanding the range of supported input files (decomposition outcomes) and to increase the speed and efficiency of the code. Furthermore, the introduction of a new backward compatibility module brings *openhdemg* a step closer to exiting the beta phase. + +### Major Achievements + +- **Extended input file compatibility**: now supporting a wider range of input files, making it easily accessible to anybody. +- **Much faster**: Spend more time on research and less time with tools. +- **Backward compatibility**: enjoy a smooth transition without concerns about compatibility. + +### Major Changes + +- **Support for Delsys decomposition outcome**: users of the 4 pin Galileo sensors can now directly open and analyze their motor units in *openhdemg*. This will expand the user base of the *openhdemg* framework. + + - New function `emg_from_delsys` to load the Delsys decomposition outcome. + - New function `refsig_from_delsys` to load the reference signal from Delsys. + - New function `extract_delsys_muaps` to use the MUAPs computed during Delsys decomposition wherever the MUAPs are necessary. + +- **Awesome performance improvements**: + + - The `save_json_emgfile` function has been modified, and it is now possible to adjust the level of compression of the output file. It is now up to 90-95% faster than the previous implementation, with the output file 50% smaller than before and 50% smaller than the corresponding file in .mat format. This will facilitate and promote the adoption of the *openhdemg* file format. + - The `emg_from_json` function has been modified, and you can now load files 55% faster than the previous implementation. This will facilitate and promote the adoption of the *openhdemg* file format. + - The function `emg_from_demuse` can now load files 25 to 50% faster than the previous implementation. This will promote the use of other file sources in the *openhdemg* framework. + - The functions performing spike-triggered averaging (`sta` and `st_muap`) have been optimized and are now 95% faster. This allows for a faster execution of all the functions requiring the MU action potential shape. + +- **Backward compatibility**: we introduced a dedicated [module](api_compatibility.md) to ensure backward compatibility starting from this version and going forward. This will permit any user to easily migrate to newer versions of the library. + +### Other Changes + +- 90% Faster execution of the function `create_binary_firings`. +- Support for arrays: The various functions using MUAPs now support also grids with only one column (arrays of electrodes). This is predisposing *openhdemg* to interface also with arrays and not only with grids. +- MUs conduction velocity estimation now allows to set the size of the figure to make it as large and easy to see as you wish. It also returns which column and rows have been used to estimate conduction velocity. The functions used to estimate conduction velocity will undergo a major optimisation in the next releases to make them faster and more flexible. +- When calculating recruitment and derecruitment thresholds with the functions `compute_thresholds` and `basic_mus_properties`, it is now possible to calculate the thresholds as the average value of n firings. + +### Bug Fixes + +- Fixed a bug in the `emg.info().data()` function that crashed when called with CUSTOMCSV file sources. +- Class docstrings can now be accessed directly from Visual Studio Code. + +### Tutorials + +Added new tutorials explaining: + +- How to export your decomposed files to directly load them in *openhdemg*. +- How to use the backward compatibility module to easily migrate to the newer *openhdemg* releases. +- In the tutorial “Setup working environment,” we specified that *openhdemg* is currently working with Python up to the 3.11.x version. We are working to make it compatible with Python 3.12. + +### Backward Compatibility + +By default, the .json files saved from *openhdemg* version 0.1.0-beta.2 (released in September 2023) cannot be opened in *openhdemg* version 0.1.0-beta.3. However, these files can be easily converted to the newer file format thanks to the new backward compatibility module. We also created a [tutorials section](tutorials/convert_old_json_files.md) where the users are guided to the migration towards newer versions of the library. This will ensure easy migration to the latest *openhdemg* release. + +
+ ## :octicons-tag-24: 0.1.0-beta.2 :octicons-clock-24: September 2023 -This release introduces important changes. It is mainly addressing the necessity of maximum flexibility and easy integration with any custom or proprietary file source. This release is not backward compatible. +This release is mainly addressing the necessity of maximum flexibility and easy integration with any custom or proprietary file source. This release is not backward compatible. -MAJOR CHANGES: +### Major changes - **Accuracy Measurement:** Replaced the double accuracy measures in the `emgfile` (i.e., “SIL” and “PNR”) with a single accuracy measure named “ACCURACY.” For files containing the decomposed source (also named “IPTS”), the “ACCURACY” variable will contain the silhouette score (Negro et al. 2016). For files that do not contain the decomposed source, the accuracy will be the original (often proprietary) accuracy estimate. This allows for maximum flexibility and is fundamental to interface the *openhdemg* library with any proprietary and custom implementation of the different decomposition algorithms currently available. @@ -18,25 +72,26 @@ MAJOR CHANGES: - **File Import Restriction:** Restricted flexibility in the import of files. To import decomposed HD-EMG files, these must contain at least the raw EMG signal and one of the times of discharge of each MU ("MUPULSES") or their binary representation. This change ensures consistency and avoids compatibility issues with other functions. -**OTHER CHANGES:** +### Other changes -- **Sampling Frequency** and **Interelectrode Distance:** Sampling frequency and interelectrode distance are now represented by float point values to accommodate different source files. +- Sampling Frequency and Interelectrode Distance: Sampling frequency and interelectrode distance are now represented by float point values to accommodate different source files. -- **emg_from_customcsv** and **emg_from_otb:** Improved robustness and flexibility, with the possibility to load custom information in “EXTRAS.” +- `emg_from_customcsv` and `emg_from_otb`: Improved robustness and flexibility, with the possibility to load custom information in “EXTRAS.” -- **emg_from_demuse:** Improved robustness and flexibility. +- `emg_from_demuse`: Improved robustness and flexibility. -- **New Functions:** +- New Functions: - `refsig_from_customcsv` to load the reference signal from a custom .csv file. - `delete_empty_mus` to delete all the MUs without firings. -- **Exposed Function:** Exposed `mupulses_from_binary` to extract the times of firing from the binary representation of MUs firings. +- Exposed Function: Exposed `mupulses_from_binary` to extract the times of firing from the binary representation of MUs firings. -- **Dependency Management:** Addressed reported functioning issues related to external dependencies invoked by *openhdemg*. Stricter rules have been adopted in the setup.py file for automatically installing the correct version of these dependencies. +- Dependency Management: Addressed reported functioning issues related to external dependencies invoked by *openhdemg*. Stricter rules have been adopted in the setup.py file for automatically installing the correct version of these dependencies. -- **Bug Fixes:** - - Fixed a BUG in the GUI when saving results in Excel files. The bug was due to changes in newer pandas versions. - - Fixed a BUG in the function “sort_mus” when empty MUs were present. +### Bug Fixes + +- Fixed a BUG in the GUI when saving results in Excel files. The bug was due to changes in newer pandas versions. +- Fixed a BUG in the function “sort_mus” when empty MUs were present.
diff --git a/mkdocs.yml b/mkdocs.yml index ce6d35f..8d818aa 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -8,14 +8,17 @@ site_description: >- repo_name: openhdemg repo_url: https://github.com/GiacomoValliPhD/openhdemg -copyright: Copyright © 2022 - 2023 Giacomo Valli & Paul Ritsche +copyright: Copyright © 2022 - 2023. The openhdemg community theme: - name: "material" + name: material + custom_dir: docs/overrides logo: yml_graphics/transp_icon.png favicon: yml_graphics/transp_icon.png features: - content.code.copy + - announce.dismiss + - navigation.top palette: # Palette toggle for light mode - media: "(prefers-color-scheme: light)" @@ -29,6 +32,7 @@ theme: toggle: icon: material/brightness-4 name: Switch to light mode +extra_css: [docs/custom.css] # Select a plugin to get the docs from docstrings and setup the numpy stile as # the default style. @@ -62,6 +66,9 @@ nav: - Setup working environment: tutorials/setup_working_env.md - Basics of openhdemg: - Structure of the emgfile: tutorials/emgfile_structure.md + - Import from other software: tutorials/import_from_other_software.md + - Move to newer versions: + - Convert old .json files: tutorials/convert_old_json_files.md - Graphical Interface: - Intro: gui_intro.md - Basics: gui_basics.md @@ -70,6 +77,7 @@ nav: - Contacts: contacts.md - Cite us: cite-us.md - Contribute: contribute.md + - ISEK-JEK Tutorials: isek_jek_tutorials.md - API Reference: - openfiles: api_openfiles.md - plotemg: api_plotemg.md @@ -79,6 +87,7 @@ nav: - muap: api_muap.md - electrodes: api_electrodes.md - info: api_info.md + - compatibility: api_compatibility.md # Extensions markdown_extensions: @@ -132,8 +141,10 @@ extra: link: https://github.com/GiacomoValliPhD/openhdemg - icon: fontawesome/brands/twitter link: https://twitter.com/openhdemg - version: - provider: mike # TODO versioning + - icon: fontawesome/brands/youtube + link: https://www.youtube.com/@openhdemg +# version: +# provider: mike # TODO versioning # Need to have or pip install: # mkdocs diff --git a/openhdemg/__init__.py b/openhdemg/__init__.py index 8a5e814..3f54c02 100644 --- a/openhdemg/__init__.py +++ b/openhdemg/__init__.py @@ -1,3 +1,3 @@ __all__ = ["__version__"] -__version__ = "0.1.0-beta.2" +__version__ = "0.1.0-beta.3" diff --git a/openhdemg/compatibility/__init__.py b/openhdemg/compatibility/__init__.py new file mode 100644 index 0000000..bbcf064 --- /dev/null +++ b/openhdemg/compatibility/__init__.py @@ -0,0 +1,3 @@ +__all__ = ["conversions"] + +from openhdemg.compatibility.conversions import convert_json_output diff --git a/openhdemg/compatibility/conversions.py b/openhdemg/compatibility/conversions.py new file mode 100644 index 0000000..267780d --- /dev/null +++ b/openhdemg/compatibility/conversions.py @@ -0,0 +1,534 @@ +import pandas as pd +import numpy as np +from tkinter import * +from tkinter import filedialog +from tkinter import messagebox +import json +import gzip +import os + + +class convert_json_output(): + """ + Convert .json files saved from previous openhdemg versions to the desired + format (target openhdemg version). + + Parameters + ---------- + old : str, default "" + A path pointing to a .json file, or to a folder containing multiple + .json files, saved from the openhdemg version specified in + ``old_version``. + The path can be a simple string, the use of Path() is not necessary. + If ``old`` points to a folder, all the .json files contained in that + folder will be converted. Make sure that the folder contains only .json + files from the openhdemg version specified in ``old_version``. + new : str, default "" + A path pointing to the folder where the converted .json file/files + will be saved. The path can be a simple string, the use of Path() is + not necessary. + old_version : str {0.1.0-beta.2}, default 0.1.0-beta.2 + The openhdemg version used to save the ``old`` files. + Only "0.1.0-beta.2" is currently supported. + new_version : str {0.1.0-beta.3}, default 0.1.0-beta.3 + The target openhdemg version for which you want to convert the files. + Only "0.1.0-beta.3" is currently supported. + append_name : str, default "converted" + String to append to the name of the converted file. Use append_name="" + to don't append any name. + compresslevel : int, default 4 + An int from 0 to 9, where 0 is no compression and nine maximum + compression. Compressed files will take less space, but will require + more computation. The relationship between compression level and time + required for the compression is not linear. For optimised performance, + we suggest values between 2 and 6, with 4 providing the best balance. + gui : bool, default True + If true, the user will be able to select one or multiple .json files + to convert and the output folder with a convenient graphical interface. + If true, ``old`` and ``new`` can be ignored. + ignore_safety_checks : bool, default False + Safety checks are performed to avoid overwriting the original file. If + ``ignore_safety_checks=True``, the original file could be overwritten + without asking user permission. The risk of overwriting files happens + when converted files are saved in their original directory and with + ``append_name=""``. + + Examples + -------- + Convert the file/s with a practical GUI. + + >>> from openhdemg.compatibility import convert_json_output + >>> convert_json_output(gui=True, append_name="converted") + + Convert all the files in a folder without GUI. Save them in the same + location with a different name. + + >>> from openhdemg.compatibility import convert_json_output + >>> old = "C:/Users/.../test conversions/" + >>> new = "C:/Users/.../test conversions/" + >>> convert_json_output( + ... old=old, + ... new=new, + ... append_name="converted", + ... gui=False, + ... ) + + Convert a file in a folder without GUI and overwrite it. + + >>> from openhdemg.compatibility import convert_json_output + >>> old = "C:/Users/.../test conversions/old_testfile.json" + >>> new = "C:/Users/.../test conversions/" + >>> convert_json_output( + ... old=old, + ... new=new, + ... append_name="", + ... gui=False, + ... ignore_safety_checks=True, + ... ) + """ + + def __init__( + self, + old="", + new="", + old_version="0.1.0-beta.2", + new_version="0.1.0-beta.3", + append_name="converted", + compresslevel=4, + gui=True, + ignore_safety_checks=False, + ): + + # Check if correct versions have been passed + accepted_old_versions = [ + "0.1.0-beta.2", + ] + accepted_new_versions = [ + "0.1.0-beta.3", + ] + if old_version not in accepted_old_versions: + raise ValueError("Unsupported old_version") + if new_version not in accepted_new_versions: + raise ValueError("Unsupported new_version") + + if gui: + # Create and hide the tkinter root window necessary for the GUI + # based load file/directory function. + root = Tk() + root.withdraw() + + # Get a tuple of files to open + files_to_open = filedialog.askopenfilenames( + title="Select one or multiple .json files to convert", + filetypes=[("JSON files", "*.json")], + ) + if len(files_to_open) < 1: + # End class execution if cancel button has been pressed. + return + + # Get the directory to save converted files + save_directory = filedialog.askdirectory( + title="Select the folder where to save the converted files", + ) + if len(save_directory) < 1: + # End class execution if cancel button has been pressed. + return + + # Destroy the root since it is no longer necessary + root.destroy() + + # Safety check to avoid overwriting the original file + if not ignore_safety_checks and len(append_name) == 0: + if save_directory == os.path.dirname(files_to_open[0]): + root = Tk() + root.withdraw() + # Ask for user confirmation + user_response = messagebox.askyesno( + "Confirmation", + "You are going to overwrite the original file/s." + + "Do you want to continue?", + ) + # Destroy the main window + root.destroy() + if not user_response: + print("Conversion interrupted") + return + + # Iterate all the elements in files_toOpen. Convert and save all + # the selected files. + for pos, fin in enumerate(files_to_open): + print(f"Converting {fin}") + print(f"Converting n°{pos+1} out of {len(files_to_open)} files") + # Load file + emgfile = self.load_0_1_0_b2(filepath=fin) + + # Get the appropriate name and filepath + base_name = os.path.basename(fin) + base_name, _ = os.path.splitext(base_name) + if len(append_name) > 0: + base_name = base_name + "_" + append_name + filepath = ( + save_directory + "/" + base_name + ".json" + ) + + # Save file + self.save_0_1_0_b3( + emgfile, + filepath=filepath, + compresslevel=compresslevel, + ) + print("Conversion completed") + + else: + # Check if the path is a file + if os.path.isfile(old): + files_to_open = (old,) + + # Check if the path is a directory + elif os.path.isdir(old): + json_files = [ + os.path.join(old, file) + for file in os.listdir(old) + if file.endswith(".json") + ] + files_to_open = tuple(json_files) + + else: + raise FileNotFoundError("The path 'old' does not exist.") + + # Check if the new path is a directory + if not os.path.isdir(new): + raise ValueError("'new' is not pointing to a directory") + + # Safety check to avoid overwriting the original file + if not ignore_safety_checks and len(append_name) == 0: + res = input( + "You are going to overwrite the original file. " + "Continue? Y/n ->" + ) + if res not in ["Y", "y"]: + print("Conversion interrupted") + return + + # Iterate all the elements in files_toOpen. Convert and save all + # the selected files. + for pos, fin in enumerate(files_to_open): + print(f"Converting {fin}") + print(f"Converting n°{pos+1} out of {len(files_to_open)} files") + # Load file + emgfile = self.load_0_1_0_b2(filepath=fin) + + # Get the appropriate name and filepath + base_name = os.path.basename(fin) + base_name, _ = os.path.splitext(base_name) + if len(append_name) > 0: + base_name = base_name + "_" + append_name + filepath = ( + new + "/" + base_name + ".json" + ) + + # Save file + self.save_0_1_0_b3( + emgfile, + filepath=filepath, + compresslevel=compresslevel, + ) + + print("Conversion completed") + + def load_0_1_0_b2(self, filepath): + """ """ + """ + Load the version 0.1.0-beta.2 emgfile and emg_refsig. + + Parameters + ---------- + filepath : str or Path + The directory and the name of the file to load (including file + extension .json). + This can be a simple string, the use of Path is not necessary. + + Returns + ------- + emgfile : dict + The dictionary containing the emgfile. + """ + + # Read and decompress json file + with gzip.open(filepath, "r") as fin: + json_bytes = fin.read() + # Decode json file + json_str = json_bytes.decode("utf-8") + jsonemgfile = json.loads(json_str) + + # Access the dictionaries and extract the data + # jsonemgfile[0] contains the SOURCE in a dictionary + source_dict = json.loads(jsonemgfile[0]) + source = source_dict["SOURCE"] + # jsonemgfile[1] contains the FILENAME in all the sources + filename_dict = json.loads(jsonemgfile[1]) + filename = filename_dict["FILENAME"] + + if source in ["DEMUSE", "OTB", "CUSTOMCSV"]: + # jsonemgfile[2] contains the RAW_SIGNAL in a dictionary, it can be + # extracted in a new dictionary and converted into a pd.DataFrame. + # index and columns are imported as str, we need to convert it to + # int. + raw_signal_dict = json.loads(jsonemgfile[2]) + raw_signal_dict = json.loads(raw_signal_dict["RAW_SIGNAL"]) + raw_signal = pd.DataFrame(raw_signal_dict) + raw_signal.columns = raw_signal.columns.astype(int) + raw_signal.index = raw_signal.index.astype(int) + raw_signal.sort_index(inplace=True) + # jsonemgfile[3] contains the REF_SIGNAL to be treated as + # jsonemgfile[2] + ref_signal_dict = json.loads(jsonemgfile[3]) + ref_signal_dict = json.loads(ref_signal_dict["REF_SIGNAL"]) + ref_signal = pd.DataFrame(ref_signal_dict) + ref_signal.columns = ref_signal.columns.astype(int) + ref_signal.index = ref_signal.index.astype(int) + ref_signal.sort_index(inplace=True) + # jsonemgfile[4] contains the ACCURACY to be treated as + # jsonemgfile[2] + accuracy_dict = json.loads(jsonemgfile[4]) + accuracy_dict = json.loads(accuracy_dict["ACCURACY"]) + accuracy = pd.DataFrame(accuracy_dict) + accuracy.columns = accuracy.columns.astype(int) + accuracy.index = accuracy.index.astype(int) + accuracy.sort_index(inplace=True) + # jsonemgfile[5] contains the IPTS to be treated as jsonemgfile[2] + ipts_dict = json.loads(jsonemgfile[5]) + ipts_dict = json.loads(ipts_dict["IPTS"]) + ipts = pd.DataFrame(ipts_dict) + ipts.columns = ipts.columns.astype(int) + ipts.index = ipts.index.astype(int) + ipts.sort_index(inplace=True) + # jsonemgfile[6] contains the MUPULSES which is a list of lists but + # has to be converted in a list of ndarrays. + mupulses = json.loads(jsonemgfile[6]) + for num, element in enumerate(mupulses): + mupulses[num] = np.array(element) + # jsonemgfile[7] contains the FSAMP to be treated as jsonemgfile[0] + fsamp_dict = json.loads(jsonemgfile[7]) + fsamp = float(fsamp_dict["FSAMP"]) + # jsonemgfile[8] contains the IED to be treated as jsonemgfile[0] + ied_dict = json.loads(jsonemgfile[8]) + ied = float(ied_dict["IED"]) + # jsonemgfile[9] contains the EMG_LENGTH to be treated as + # jsonemgfile[0] + emg_length_dict = json.loads(jsonemgfile[9]) + emg_length = int(emg_length_dict["EMG_LENGTH"]) + # jsonemgfile[10] contains the NUMBER_OF_MUS to be treated as + # jsonemgfile[0] + number_of_mus_dict = json.loads(jsonemgfile[10]) + number_of_mus = int(number_of_mus_dict["NUMBER_OF_MUS"]) + # jsonemgfile[11] contains the BINARY_MUS_FIRING to be treated as + # jsonemgfile[2] + binary_mus_firing_dict = json.loads(jsonemgfile[11]) + binary_mus_firing_dict = json.loads( + binary_mus_firing_dict["BINARY_MUS_FIRING"] + ) + binary_mus_firing = pd.DataFrame(binary_mus_firing_dict) + binary_mus_firing.columns = binary_mus_firing.columns.astype(int) + binary_mus_firing.index = binary_mus_firing.index.astype(int) + # jsonemgfile[12] contains the EXTRAS to be treated as + # jsonemgfile[2] + extras_dict = json.loads(jsonemgfile[12]) + extras_dict = json.loads(extras_dict["EXTRAS"]) + extras = pd.DataFrame(extras_dict) + # extras.columns = extras.columns.astype(int) + # extras.index = extras.index.astype(int) + # extras.sort_index(inplace=True) + # Don't alter extras, leave that to the user for maximum control + + emgfile = { + "SOURCE": source, + "FILENAME": filename, + "RAW_SIGNAL": raw_signal, + "REF_SIGNAL": ref_signal, + "ACCURACY": accuracy, + "IPTS": ipts, + "MUPULSES": mupulses, + "FSAMP": fsamp, + "IED": ied, + "EMG_LENGTH": emg_length, + "NUMBER_OF_MUS": number_of_mus, + "BINARY_MUS_FIRING": binary_mus_firing, + "EXTRAS": extras, + } + + elif source in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]: + # jsonemgfile[2] contains the fsamp + fsamp_dict = json.loads(jsonemgfile[2]) + fsamp = float(fsamp_dict["FSAMP"]) + # jsonemgfile[3] contains the REF_SIGNAL + ref_signal_dict = json.loads(jsonemgfile[3]) + ref_signal_dict = json.loads(ref_signal_dict["REF_SIGNAL"]) + ref_signal = pd.DataFrame(ref_signal_dict) + ref_signal.columns = ref_signal.columns.astype(int) + ref_signal.index = ref_signal.index.astype(int) + ref_signal.sort_index(inplace=True) + # jsonemgfile[4] contains the EXTRAS + extras_dict = json.loads(jsonemgfile[4]) + extras_dict = json.loads(extras_dict["EXTRAS"]) + extras = pd.DataFrame(extras_dict) + + emgfile = { + "SOURCE": source, + "FILENAME": filename, + "FSAMP": fsamp, + "REF_SIGNAL": ref_signal, + "EXTRAS": extras, + } + + else: + raise Exception("\nFile source not recognised\n") + + return emgfile + + def save_0_1_0_b3(self, emgfile, filepath, compresslevel): + """ """ + """ + Save the emgfile or emg_refsig compatible with openhdemg version + 0.1.0-beta.3. + + Parameters + ---------- + emgfile : dict + The dictionary containing the emgfile. + filepath : str or Path + The directory and the name of the file to save (including file + extension .json). + This can be a simple string; The use of Path is not necessary. + compresslevel : int + An int from 0 to 9, where 0 is no compression and nine maximum + compression. Compressed files will take less space, but will + require more computation. The relationship between compression + level and time required for the compression is not linear. For + optimised performance, we suggest values between 2 and 6, with 4 + providing the best balance. + """ + + if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: + """ + We need to convert all the components of emgfile to a dictionary + and then to json object. + pd.DataFrame cannot be converted with json.dumps. + Once all the elements are converted to json objects, we create a + dict of json objects and dump/save it into a single json file. + emgfile = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "RAW_SIGNAL": RAW_SIGNAL, + "REF_SIGNAL": REF_SIGNAL, + "ACCURACY": ACCURACY, + "IPTS": IPTS, + "MUPULSES": MUPULSES, + "FSAMP": FSAMP, + "IED": IED, + "EMG_LENGTH": EMG_LENGTH, + "NUMBER_OF_MUS": NUMBER_OF_MUS, + "BINARY_MUS_FIRING": BINARY_MUS_FIRING, + "EXTRAS": EXTRAS, + } + """ + + # str or float + # Directly convert str or float to a json format. + source = json.dumps(emgfile["SOURCE"]) + filename = json.dumps(emgfile["FILENAME"]) + fsamp = json.dumps(emgfile["FSAMP"]) + ied = json.dumps(emgfile["IED"]) + emg_length = json.dumps(emgfile["EMG_LENGTH"]) + number_of_mus = json.dumps(emgfile["NUMBER_OF_MUS"]) + + # df + # Access and convert the df to a json object. + # orient='split' is fundamental for performance. + raw_signal = emgfile["RAW_SIGNAL"].to_json(orient='split') + ref_signal = emgfile["REF_SIGNAL"].to_json(orient='split') + accuracy = emgfile["ACCURACY"].to_json(orient='split') + ipts = emgfile["IPTS"].to_json(orient='split') + binary_mus_firing = emgfile["BINARY_MUS_FIRING"].to_json(orient='split') + extras = emgfile["EXTRAS"].to_json(orient='split') + + # list of ndarray. + # Every array has to be converted in a list; then, the list of + # lists can be converted to json. + mupulses = [] + for ind, array in enumerate(emgfile["MUPULSES"]): + mupulses.insert(ind, array.tolist()) + mupulses = json.dumps(mupulses) + + # Convert a dict of json objects to json. The result of the + # conversion will be saved as the final json file. + emgfile = { + "SOURCE": source, + "FILENAME": filename, + "RAW_SIGNAL": raw_signal, + "REF_SIGNAL": ref_signal, + "ACCURACY": accuracy, + "IPTS": ipts, + "MUPULSES": mupulses, + "FSAMP": fsamp, + "IED": ied, + "EMG_LENGTH": emg_length, + "NUMBER_OF_MUS": number_of_mus, + "BINARY_MUS_FIRING": binary_mus_firing, + "EXTRAS": extras, + } + + # Compress and write the json file + with gzip.open( + filepath, + "wt", + encoding="utf-8", + compresslevel=compresslevel, + ) as f: + json.dump(emgfile, f) + + elif emgfile["SOURCE"] in [ + "OTB_REFSIG", + "CUSTOMCSV_REFSIG", + "DELSYS_REFSIG" + ]: + """ + refsig = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "FSAMP": FSAMP, + "REF_SIGNAL": REF_SIGNAL, + "EXTRAS": EXTRAS, + } + """ + # str or float + # Directly convert str or float to a json format. + source = json.dumps(emgfile["SOURCE"]) + filename = json.dumps(emgfile["FILENAME"]) + fsamp = json.dumps(emgfile["FSAMP"]) + + # df + # Access and convert the df to a json object. + ref_signal = emgfile["REF_SIGNAL"].to_json(orient='split') + extras = emgfile["EXTRAS"].to_json(orient='split') + + # Merge all the objects in one dict + refsig = { + "SOURCE": source, + "FILENAME": filename, + "FSAMP": fsamp, + "REF_SIGNAL": ref_signal, + "EXTRAS": extras, + } + + # Compress and save + with gzip.open( + filepath, + "wt", + encoding="utf-8", + compresslevel=compresslevel, + ) as f: + json.dump(refsig, f) + + else: + raise ValueError("\nFile source not recognised\n") diff --git a/openhdemg/gui/openhdemg_gui.py b/openhdemg/gui/openhdemg_gui.py index 5652783..c416d3e 100644 --- a/openhdemg/gui/openhdemg_gui.py +++ b/openhdemg/gui/openhdemg_gui.py @@ -5,6 +5,7 @@ import os import tkinter as tk import customtkinter +import threading import webbrowser from tkinter import ttk, filedialog, Canvas from tkinter import StringVar, Tk, N, S, W, E, DoubleVar @@ -244,6 +245,8 @@ class emgGUI: Executed when button "Remove MUs" in master GUI window pressed. remove() Method used to remove single motor units. + remove_empty() + Method that removes all empty MUs. edit_refsig() Opens seperate window to edit emg reference signal. Executed when button "RefSig Editing" in master GUI window pressed. @@ -334,13 +337,35 @@ def __init__(self, master): master_path = os.path.dirname(os.path.abspath(__file__)) iconpath = master_path + "/gui_files/Icon.ico" self.master.iconbitmap(iconpath) + self.master.columnconfigure(0, weight=1) + self.master.rowconfigure(0, weight=1) # Create left side framing for functionalities self.left = ttk.Frame(self.master, padding="10 10 12 12") - self.left.grid(column=0, row=0, sticky=(N, S, W)) + self.left.grid(column=0, row=0, sticky="nsew") self.left.columnconfigure(0, weight=1) self.left.columnconfigure(1, weight=1) self.left.columnconfigure(2, weight=1) + self.left.columnconfigure(3, weight=1) + self.left.rowconfigure(0, weight=1) + self.left.rowconfigure(1, weight=1) + self.left.rowconfigure(2, weight=1) + self.left.rowconfigure(3, weight=1) + self.left.rowconfigure(4, weight=1) + self.left.rowconfigure(5, weight=1) + self.left.rowconfigure(6, weight=1) + self.left.rowconfigure(7, weight=1) + self.left.rowconfigure(8, weight=1) + self.left.rowconfigure(9, weight=1) + self.left.rowconfigure(10, weight=1) + self.left.rowconfigure(11, weight=1) + self.left.rowconfigure(12, weight=1) + self.left.rowconfigure(13, weight=1) + self.left.rowconfigure(14, weight=1) + self.left.rowconfigure(15, weight=1) + self.left.rowconfigure(16, weight=1) + self.left.rowconfigure(17, weight=1) + self.left.rowconfigure(18, weight=1) # Style style = ttk.Style() @@ -357,10 +382,10 @@ def __init__(self, master): style.configure("TEntry", font=("Lucida Sans", 12), foreground="black") style.configure("TCombobox", background="LightBlue4", foreground="black") style.configure("TLabelFrame", foreground="black", font=("Lucida Sans", 16)) - + style.configure("TProgressbar", foreground="#FFBF00", background="#FFBF00") # Specify Signal self.filetype = StringVar() - signal_value = ("OPENHDEMG", "OTB", "DEMUSE", "OTB_REFSIG", "CUSTOMCSV", "CUSTOMCSV_REFSIG") + signal_value = ("OPENHDEMG", "DEMUSE","OTB", "OTB_REFSIG", "DELSYS", "DELSYS_REFSIG", "CUSTOMCSV", "CUSTOMCSV_REFSIG") signal_entry = ttk.Combobox( self.left, text="Signal", width=10, textvariable=self.filetype ) @@ -483,7 +508,14 @@ def __init__(self, master): # Create right side framing for functionalities self.right = ttk.Frame(self.master, padding="10 10 12 12") - self.right.grid(column=1, row=0, sticky=(N, S, E)) + self.right.grid(column=1, row=0, sticky=(N, S, E, W)) + self.right.columnconfigure(0, weight=1) + self.right.columnconfigure(1, weight=1) + self.right.rowconfigure(0, weight=1) + self.right.rowconfigure(1, weight=1) + self.right.rowconfigure(2, weight=1) + self.right.rowconfigure(3, weight=1) + self.right.rowconfigure(4, weight=1) # Create empty figure self.first_fig = Figure(figsize=(20 / 2.54, 15 / 2.54)) @@ -617,124 +649,250 @@ def get_file_input(self): -------- emg_from_demuse, emg_from_otb, refsig_from_otb and emg_from_json in library. """ - try: - if self.filetype.get() in ["OTB", "DEMUSE", "OPENHDEMG", "CUSTOMCSV"]: - # Check filetype for processing - if self.filetype.get() == "OTB": - # Ask user to select the decomposed file - file_path = filedialog.askopenfilename( - title="Open OTB file", filetypes=[("MATLAB files", "*.mat")] - ) - self.file_path = file_path - # Load file - self.resdict = openhdemg.emg_from_otb( - filepath=self.file_path, - ext_factor=int(self.extension_factor.get()), - ) + def load_file(): + try: + if self.filetype.get() in ["OTB", "DEMUSE", "OPENHDEMG", "CUSTOMCSV", "DELSYS"]: + # Check filetype for processing + if self.filetype.get() == "OTB": + # Ask user to select the decomposed file + file_path = filedialog.askopenfilename( + title="Open OTB file to load", filetypes=[("MATLAB files", "*.mat")] + ) + self.file_path = file_path + # Load file + self.resdict = openhdemg.emg_from_otb( + filepath=self.file_path, + ext_factor=int(self.extension_factor.get()), + ) + # Add filespecs + ttk.Label( + self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns)) + ).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid( + column=2, row=3, sticky=(W, E) + ) + ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid( + column=2, row=4, sticky=(W, E) + ) - elif self.filetype.get() == "DEMUSE": - # Ask user to select the file - file_path = filedialog.askopenfilename( - title="Open DEMUSE file", filetypes=[("MATLAB files", "*.mat")] - ) - self.file_path = file_path + elif self.filetype.get() == "DEMUSE": + # Ask user to select the file + file_path = filedialog.askopenfilename( + title="Open DEMUSE file to load", filetypes=[("MATLAB files", "*.mat")] + ) + self.file_path = file_path + # load file + self.resdict = openhdemg.emg_from_demuse(filepath=self.file_path) + # Add filespecs + ttk.Label( + self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns)) + ).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid( + column=2, row=3, sticky=(W, E) + ) + ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid( + column=2, row=4, sticky=(W, E) + ) + elif self.filetype.get() == "DELSYS": + # Ask user to select the file + file_path = filedialog.askopenfilename( + title="Select a DELSYS file with raw EMG to load", + filetypes=[("MATLAB files", "*.mat")] + ) + # Ask user to open the Delsys decompostition + self.mus_path = filedialog.askdirectory( + title="Select the folder containing the DELSYS decomposition", + ) + self.file_path = file_path + + # load DELSYS + self.resdict = openhdemg.emg_from_delsys(rawemg_filepath=self.file_path, + mus_directory=self.mus_path) + # Add filespecs + ttk.Label(self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns))).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid( + column=2, row=3, sticky=(W, E) + ) + ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid( + column=2, row=4, sticky=(W, E) + ) - # load file - self.resdict = openhdemg.emg_from_demuse(filepath=self.file_path) + elif self.filetype.get() == "OPENHDEMG": + # Ask user to select the file + file_path = filedialog.askopenfilename( + title="Open JSON file to load", filetypes=[("JSON files", "*.json")] + ) + self.file_path = file_path + # load OPENHDEMG (.json) + self.resdict = openhdemg.emg_from_json(filepath=self.file_path) + # Add filespecs + # NOTE this is not correct because when the user asks to load OPENHDEMG (.json) + # files, these could contain also the reference signal only. Therefore line 2 + # and 3 will crash. I temporarily fixed it, please review it for next release. + if self.resdict["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: + ttk.Label(self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns))).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid(column=2, row=3, sticky=(W, E)) + ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid(column=2, row=4, sticky=(W, E)) + else: + # Reconfigure labels for refsig + ttk.Label( + self.left, text=str(len(self.resdict["REF_SIGNAL"].columns)) + ).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text="NA").grid(column=2, row=3, sticky=(W, E)) + ttk.Label(self.left, text=" ").grid( + column=2, row=4, sticky=(W, E) + ) + else: + # Ask user to select the file + file_path = filedialog.askopenfilename( + title="Open CUSTOMCSV file to load", + filetypes=[("CSV files", "*.csv")], + ) + self.file_path = file_path + # load file + self.resdict = openhdemg.emg_from_customcsv( + filepath=self.file_path, + fsamp=float(self.fsamp.get()), + ) + # Add filespecs + ttk.Label(self.left, text="Custom CSV").grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text="").grid(column=2, row=3, sticky=(W, E)) + ttk.Label(self.left, text="").grid(column=2, row=4, sticky=(W, E)) - elif self.filetype.get() == "OPENHDEMG": - # Ask user to select the file - file_path = filedialog.askopenfilename( - title="Open JSON file", filetypes=[("JSON files", "*.json")] - ) - self.file_path = file_path + # Get filename + filename = os.path.splitext(os.path.basename(file_path))[0] + self.filename = filename - # load OPENHDEMG (.json) - self.resdict = openhdemg.emg_from_json(filepath=self.file_path) + # Add filename to label + self.master.title(self.filename) + # This sections is used for refsig loading as they required not the + # the filespecs to be loaded. else: - # Ask user to select the file - file_path = filedialog.askopenfilename( - title="Open CUSTOMCSV file", - filetypes=[("CSV files", "*.csv")], - ) - self.file_path = file_path - - # load file - self.resdict = openhdemg.emg_from_customcsv(filepath=self.file_path) + if self.filetype.get() == "OTB_REFSIG": + file_path = filedialog.askopenfilename( + title="Open OTB_REFSIG file to load", + filetypes=[("MATLAB files", "*.mat")], + ) + self.file_path = file_path + # load refsig + self.resdict = openhdemg.refsig_from_otb(filepath=self.file_path) - # Get filename - filename = os.path.splitext(os.path.basename(file_path))[0] - self.filename = filename + elif self.filetype.get() == "DELSYS_REFSIG": - # Add filename to label - self.master.title(self.filename) + # Ask user to select the file + file_path = filedialog.askopenfilename( + title="Select a DELSYS_REFSIG file with raw EMG to load", + filetypes=[("MATLAB files", "*.mat")] + ) + self.file_path = file_path + # load DELSYS + self.resdict = openhdemg.refsig_from_delsys(filepath=self.file_path) + + elif self.filetype.get() == "CUSTOMCSV_REFSIG": + file_path = filedialog.askopenfilename( + title="Open CUSTOMCSV_REFSIG file to load", + filetypes=[("CSV files", "*.csv")], + ) + self.file_path = file_path + # load refsig + self.resdict = openhdemg.refsig_from_customcsv( + filepath=self.file_path, + fsamp=float(self.fsamp.get()), + ) # NOTE please check that I used correctly self.fsamp.get() here and above. - # Add filespecs - ttk.Label( - self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns)) - ).grid(column=2, row=2, sticky=(W, E)) - ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid( - column=2, row=3, sticky=(W, E) - ) - ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid( - column=2, row=4, sticky=(W, E) - ) - """ - # BUG with "OPENHDEMG" type we identify all files saved from openhdemg, - regardless of the content. This will result in an error for ttk.Label - self.resdict["NUMBER_OF_MUS"] and self.resdict["EMG_LENGTH"]. - """ + # Get filename + filename = os.path.splitext(os.path.basename(file_path))[0] + self.filename = filename - else: - # Ask user to select the refsig file - if self.filetype.get() == "OTB_REFSIG": - file_path = filedialog.askopenfilename( - title="Open OTB_REFSIG file", - filetypes=[("MATLAB files", "*.mat")], - ) - self.file_path = file_path - # load refsig - self.resdict = openhdemg.refsig_from_otb(filepath=self.file_path) + # Add filename to label + self.master.title(self.filename) - else: # CUSTOMCSV_REFSIG - file_path = filedialog.askopenfilename( - title="Open CUSTOMCSV_REFSIG file", - filetypes=[("CSV files", "*.csv")], + # Reconfigure labels for refsig + ttk.Label( + self.left, text=str(len(self.resdict["REF_SIGNAL"].columns)) + ).grid(column=2, row=2, sticky=(W, E)) + ttk.Label(self.left, text="NA").grid(column=2, row=3, sticky=(W, E)) + ttk.Label(self.left, text=" ").grid( + column=2, row=4, sticky=(W, E) ) - self.file_path = file_path - # load refsig - self.resdict = openhdemg.refsig_from_customcsv(filepath=self.file_path) + + # End progress + progress.stop() + progress.grid_remove() - # Get filename - filename = os.path.splitext(os.path.basename(file_path))[0] - self.filename = filename + except ValueError: + tk.messagebox.showerror( + "Information", + "When an OTB file is loaded, make sure to " + + "\nspecify an extension factor (number) first." + + "\n" + + "When a DELSYS file is loaded, make sure to " + + "\nspecify the correct folder." + ) + # End progress + progress.stop() + progress.grid_remove() - # Add filename to label - self.master.title(self.filename) + except FileNotFoundError: + # End progress + progress.stop() + progress.grid_remove() - # Reconfigure labels for refsig - ttk.Label( - self.left, text=str(len(self.resdict["REF_SIGNAL"].columns)) - ).grid(column=2, row=2, sticky=(W, E)) - ttk.Label(self.left, text="NA").grid(column=2, row=3, sticky=(W, E)) - ttk.Label(self.left, text=" ").grid( - column=2, row=4, sticky=(W, E) + except TypeError: + tk.messagebox.showerror( + "Information", + "Make sure to load correct file" + + "\naccording to your specification.", ) + # End progress + progress.stop() + progress.grid_remove() - except ValueError: - tk.messagebox.showerror( - "Information", - "When an OTB file is loaded, make sure to " - + "\nspecify an extension factor (number) first.", - ) + except KeyError: + tk.messagebox.showerror( + "Information", + "Make sure to load correct file" + + "\naccording to your specification.", + ) + # End progress + progress.stop() + progress.grid_remove() + except: + # End progress + progress.stop() + progress.grid_remove() + + # Indicate Progress + progress = ttk.Progressbar(self.left, mode="indeterminate") + progress.grid(row=4, column=0) + progress.start(1) + + # Create a thread to run the load_file function + save_thread = threading.Thread(target=load_file) + save_thread.start() def on_filetype_change(self, *args): """ This function is called when the value of the filetype variable is changed. - When the filetype is set to "OTB" it will create a second combobox on the grid at column 0 and row 2, - and when the filetype is set to something else it will remove the second combobox from the grid. - """ + When the filetype is set to "OTB", "CUSTOMCSV", "CUSTOMCSV_REFSIG" it will + create a second combobox on the grid at column 0 and row 2 and when the filetype + is set to something else it will remove the second combobox from the grid. + """ + # Forget previous widget when filetype is changes + # NOTE I had to separate them and put them on top of the function to + # ensure that changing file type consecutively would not miss the + # previous entry or combobox. + if self.filetype.get() not in ["OTB"]: + if hasattr(self, "otb_combobox"): + self.otb_combobox.grid_forget() + if self.filetype.get() not in ["CUSTOMCSV"]: + if hasattr(self, "csv_entry"): + self.csv_entry.grid_forget() + if self.filetype.get() not in ["CUSTOMCSV_REFSIG"]: + if hasattr(self, "csv_entry"): + self.csv_entry.grid_forget() + # Add a combobox containing the OTB extension factors # in case an OTB file is loaded if self.filetype.get() == "OTB": @@ -759,44 +917,71 @@ def on_filetype_change(self, *args): self.otb_combobox.grid(column=0, row=2, sticky=(W, E), padx=5) self.otb_combobox.set("Extension Factor") - # Forget widget when filetype is changes - else: - if hasattr(self, "otb_combobox"): - self.otb_combobox.grid_forget() + # NOTE I forgot to mention, but people should be able to select fsamp for .csv files. + # Please check if this can be done better. + elif self.filetype.get() in ["CUSTOMCSV", "CUSTOMCSV_REFSIG"]: + self.fsamp = StringVar(value="Fsamp") + self.csv_entry = ttk.Entry( + self.left, + width=8, + textvariable=self.fsamp, + ) + self.csv_entry.grid(column=0, row=2, sticky=(W, E), padx=5) + # TODO remove this and any reference to it def decompose_file(self): pass def save_emgfile(self): """ - Instance method to save the edited emgfile. Results are saves in .json file. + Instance method to save the edited emgfile. Results are saved in a .json file. - Executed when the button "Save File" in master GUI window is pressed. + Executed when the "Save File" button in the master GUI window is pressed. Raises ------ AttributeError - When file was not loaded in the GUI. + When a file was not loaded in the GUI. See Also -------- save_json_emgfile in library. """ - try: - # Ask user to select the directory and file name - save_filepath = filedialog.asksaveasfilename( - defaultextension=".*", - filetypes=(("JSON files", "*.json"), ("all files", "*.*")), - ) + def save_file(): + try: + # Ask user to select the directory and file name + save_filepath = filedialog.asksaveasfilename( + defaultextension=".json", + filetypes=(("JSON files", "*.json"), ("all files", "*.*")), + ) - # Get emgfile - save_emg = self.resdict + if not save_filepath: + # End progress + progress.stop() + progress.grid_remove() + return # User canceled the file dialog - # Save json file - openhdemg.save_json_emgfile(emgfile=save_emg, filepath=save_filepath) + # Get emgfile + save_emg = self.resdict - except AttributeError: - tk.messagebox.showerror("Information", "Make sure a file is loaded.") + # Save json file + openhdemg.save_json_emgfile(emgfile=save_emg, filepath=save_filepath) + + # End progress + progress.stop() + progress.grid_remove() + + except AttributeError: + tk.messagebox.showerror("Information", "Make sure a file is loaded.") + + # Indicate Progress + progress = ttk.Progressbar(self.left, mode="indeterminate") + progress.grid(row=4, column=0) + progress.start(1) + + # Create a thread to run the save_file function + save_thread = threading.Thread(target=save_file) + save_thread.start() def export_to_excel(self): """ @@ -874,7 +1059,7 @@ def reset_analysis(self): # user decided to rest analysis try: # reload original file - if self.filetype.get() in ["OTB", "DEMUSE", "OPENHDEMG", "CUSTOMCSV"]: + if self.filetype.get() in ["OTB", "DEMUSE", "OPENHDEMG", "CUSTOMCSV", "DELSYS"]: if self.filetype.get() == "OTB": self.resdict = openhdemg.emg_from_otb( filepath=self.file_path, @@ -893,7 +1078,9 @@ def reset_analysis(self): self.resdict = openhdemg.emg_from_customcsv( filepath=self.file_path ) - + elif self.filetype.get() == "DELSYS": + self.resdict = openhdemg.emg_from_delsys(rawemg_filepath=self.file_path, + mus_directory=self.mus_path) # Update Filespecs ttk.Label( self.left, text=str(len(self.resdict["RAW_SIGNAL"].columns)) @@ -949,6 +1136,15 @@ def open_advanced_tools(self): """ Open a window for advanced analysis tools. """ + # Disable config for DELSYS files + if self.resdict["SOURCE"] == "DELSYS": + tk.messagebox.showerror( + "Information", + "Advanced Tools for Delsys are only accessible from the library.", + ) + # NOTE I would show an error message + return + # Open window self.a_window = tk.Toplevel(bg="LightBlue4", height=200) self.a_window.title("Advanced Tools Window") @@ -1001,7 +1197,7 @@ def open_advanced_tools(self): matrix_code = ttk.Combobox( self.a_window, width=10, textvariable=self.mat_code_adv ) - matrix_code["values"] = ("GR08MM1305", "GR04MM1305", "GR10MM0808", "None") + matrix_code["values"] = ("GR08MM1305", "GR04MM1305", "GR10MM0808", "Trigno Galileo Sensor", "None") matrix_code["state"] = "readonly" matrix_code.grid(row=4, column=1, sticky=(W, E)) self.mat_code_adv.set("GR08MM1305") @@ -1066,8 +1262,8 @@ def in_gui_plotting(self, plot="idr"): -------- plot_refsig, plot_idr in the library. """ - try: - if self.filetype.get() in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]: + try: # NOTE as I previously said, OPENHDEMG (.json) files can contain anything. better check SOURCE. + if self.resdict["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]: self.fig = openhdemg.plot_refsig( emgfile=self.resdict, showimmediately=False, tight_layout=True ) @@ -1123,6 +1319,9 @@ def sort_mus(self): except AttributeError: tk.messagebox.showerror("Information", "Make sure a file is loaded.") + except KeyError: + tk.messagebox.showerror("Information", "Sorting not possible when ≤ 1" + + "\nMU is present in the File (i.e. Refsigs)") # ----------------------------------------------------------------------------------------------- # Removal of single motor units @@ -1149,7 +1348,7 @@ def remove_mus(self): # Select Motor Unit ttk.Label(self.head, text="Select MU:").grid( - column=0, row=0, padx=5, pady=5 + column=1, row=0, padx=5, pady=5, sticky=W ) self.mu_to_remove = StringVar() @@ -1159,18 +1358,21 @@ def remove_mus(self): ) removed_mu["values"] = removed_mu_value removed_mu["state"] = "readonly" - removed_mu.grid(column=1, row=0, sticky=(W, E), padx=5, pady=5) + removed_mu.grid(column=1, row=1, columnspan=2, sticky=(W, E), padx=5, pady=5) # Remove Motor unit remove = ttk.Button(self.head, text="Remove MU", command=self.remove) - remove.grid(column=1, row=1, sticky=(W, E), padx=5, pady=5) + remove.grid(column=1, row=2, sticky=(W, E), padx=5, pady=5) + # Remove empty MUs + remove_empty = ttk.Button(self.head, text="Remove empty MUs", command=self.remove_empty) + remove_empty.grid(column=2, row=2, padx=5, pady=5) else: tk.messagebox.showerror("Information", "Make sure a file is loaded.") def remove(self): """ - Instance methof that actually removes a selected motor unit based on user specification. + Instance method that actually removes a selected motor unit based on user specification. Executed when button "Remove MU" in Motor Unit Removal Window is pressed. The emgfile and the plot are subsequently updated. @@ -1197,7 +1399,44 @@ def remove(self): ) removed_mu["values"] = removed_mu_value removed_mu["state"] = "readonly" - removed_mu.grid(column=1, row=0, sticky=(W, E), padx=5, pady=5) + removed_mu.grid(column=1, row=1, columnspan=2, sticky=(W, E), padx=5, pady=5) + + # Update plot + if hasattr(self, "fig"): + self.in_gui_plotting() + + except AttributeError: + tk.messagebox.showerror("Information", "Make sure a file is loaded.") + + def remove_empty(self): + """ + Instance method that removes all empty MUs. + + Executed when button "Remove empty MUs" in Motor Unit Removal Window is pressed. + The emgfile and the plot are subsequently updated. + + See Also + -------- + delete_empty_mus in library. + """ + try: + # Get resdict with MU removed + self.resdict = openhdemg.delete_empty_mus(self.resdict) + + # Upate MU number + ttk.Label(self.left, text=str(self.resdict["NUMBER_OF_MUS"])).grid( + column=2, row=3, sticky=(W, E) + ) + + # Update selection field + self.mu_to_remove = StringVar() + removed_mu_value = [*range(0, self.resdict["NUMBER_OF_MUS"])] + removed_mu = ttk.Combobox( + self.head, width=10, textvariable=self.mu_to_remove + ) + removed_mu["values"] = removed_mu_value + removed_mu["state"] = "readonly" + removed_mu.grid(column=1, row=1, columnspan=2, sticky=(W, E), padx=5, pady=5) # Update plot if hasattr(self, "fig"): @@ -1511,12 +1750,24 @@ def resize_file(self): titlesize=10, ) start, end = points[0], points[1] - self.resdict, _, _ = openhdemg.resize_emgfile( - emgfile=self.resdict, area=[start, end] - ) + + # Delsys requires different handling for resize + if self.resdict["SOURCE"] == "DELSYS": + self.resdict, _, _ = openhdemg.resize_emgfile( + emgfile=self.resdict, area=[start, end], accuracy="maintain" + ) + else: + self.resdict, _, _ = openhdemg.resize_emgfile( + emgfile=self.resdict, area=[start, end] + ) # Update Plot self.in_gui_plotting() + # Update filelength + ttk.Label(self.left, text=str(self.resdict["EMG_LENGTH"])).grid( + column=2, row=4, sticky=(W, E) + ) + except AttributeError: tk.messagebox.showerror("Information", "Make sure a file is loaded.") @@ -1990,7 +2241,7 @@ def plot_emg(self): ttk.Label(self.head, text="Matrix Code").grid(row=0, column=3, sticky=(W)) self.mat_code = StringVar() matrix_code = ttk.Combobox(self.head, width=15, textvariable=self.mat_code) - matrix_code["values"] = ("GR08MM1305", "GR04MM1305", "GR10MM0808", "None") + matrix_code["values"] = ("GR08MM1305", "GR04MM1305", "GR10MM0808", "Trigno Galileo Sensor", "None") matrix_code["state"] = "readonly" matrix_code.grid(row=0, column=4, sticky=(W, E)) self.mat_code.set("GR08MM1305") @@ -2008,6 +2259,8 @@ def plot_emg(self): orientation["state"] = "readonly" orientation.grid(row=1, column=4, sticky=(W, E)) self.mat_orientation.set("180") + if self.resdict["SOURCE"] == "DELSYS": + orientation.config(state="disabled") # Plot derivation # Button @@ -2056,6 +2309,9 @@ def plot_emg(self): config_muap["state"] = "readonly" config_muap.grid(row=4, column=4, sticky=(W, E)) self.muap_config.set("Configuration") + # Disable config for DELSYS files + if self.resdict["SOURCE"] == "DELSYS": + config_muap.config(state="disabled") # Combobox MU Number self.muap_munum = StringVar() @@ -2072,6 +2328,8 @@ def plot_emg(self): timewindow["values"] = ("25", "50", "100", "200") timewindow.grid(row=4, column=6, sticky=(W, E)) self.muap_time.set("Timewindow (ms)") + if self.resdict["SOURCE"] == "DELSYS": + timewindow.config(state="disabled") # Matrix Illustration Graphic matrix_canvas = Canvas(self.head, height=150, width=600, bg="white") @@ -2215,7 +2473,9 @@ def plt_refsignal(self): # Plot reference signal openhdemg.plot_refsig( - emgfile=self.resdict, timeinseconds=self.time_sec.get(), figsize=figsize + emgfile=self.resdict, + timeinseconds=eval(self.time_sec.get()), + figsize=figsize, ) def plt_mupulses(self): @@ -2451,76 +2711,83 @@ def plot_derivation(self): ) except UnboundLocalError: tk.messagebox.showerror( - "Information", "Enter valid Configuration and Matrx Column." + "Information", "Enter valid Configuration and Matrix Column." ) except KeyError: - tk.messagebox.showerror("Information", "Enter valid Matrx Column.") + tk.messagebox.showerror("Information", "Enter valid Matrix Column.") def plot_muaps(self): """ Instance methos to plot motor unit action potenital obtained from STA from one or - multiple MUs. + multiple MUs. Except for DELSYS files, where the STA is not comupted. There is no limit to the number of MUs and STA files that can be overplotted. ``Remember: the different STAs should be matched`` with same number of electrode, processing (i.e., differential) and computed on the same timewindow. """ try: - if self.mat_code.get() == "None": - # Get rows and columns and turn into list - list_rcs = [int(i) for i in self.matrix_rc.get().split(",")] + # DELSYS requires different MUAPS plot + if self.resdict["SOURCE"] == "DELSYS": + figsize = [int(i) for i in self.size_fig.get().split(",")] + muaps_dict = openhdemg.extract_delsys_muaps(self.resdict) + openhdemg.plot_muaps(muaps_dict[int(self.muap_munum.get())], figsize=figsize) + + else: + if self.mat_code.get() == "None": + # Get rows and columns and turn into list + list_rcs = [int(i) for i in self.matrix_rc.get().split(",")] - try: + try: + # Sort emg file + sorted_file = openhdemg.sort_rawemg( + emgfile=self.resdict, + code=self.mat_code.get(), + orientation=int(self.mat_orientation.get()), + n_rows=list_rcs[0], + n_cols=list_rcs[1] + ) + + except ValueError: + tk.messagebox.showerror( + "Information", + "Number of specified rows and columns must match" + + "\nnumber of channels." + ) + return + + else: # Sort emg file sorted_file = openhdemg.sort_rawemg( emgfile=self.resdict, code=self.mat_code.get(), orientation=int(self.mat_orientation.get()), - n_rows=list_rcs[0], - n_cols=list_rcs[1] ) - except ValueError: - tk.messagebox.showerror( - "Information", - "Number of specified rows and columns must match" - + "\nnumber of channels." - ) - return - - else: - # Sort emg file - sorted_file = openhdemg.sort_rawemg( - emgfile=self.resdict, - code=self.mat_code.get(), - orientation=int(self.mat_orientation.get()), - ) - - # calcualte derivation - if self.muap_config.get() == "Single differential": - diff_file = openhdemg.diff(sorted_rawemg=sorted_file) + # calcualte derivation + if self.muap_config.get() == "Single differential": + diff_file = openhdemg.diff(sorted_rawemg=sorted_file) - elif self.muap_config.get() == "Double differential": - diff_file = openhdemg.double_diff(sorted_rawemg=sorted_file) + elif self.muap_config.get() == "Double differential": + diff_file = openhdemg.double_diff(sorted_rawemg=sorted_file) - elif self.muap_config.get() == "Monopolar": - diff_file = sorted_file + elif self.muap_config.get() == "Monopolar": + diff_file = sorted_file - # Calculate STA dictionary - # Plot deviation - sta_dict = openhdemg.sta( - emgfile=self.resdict, - sorted_rawemg=diff_file, - firings="all", - timewindow=int(self.muap_time.get()), - ) + # Calculate STA dictionary + # Plot deviation + sta_dict = openhdemg.sta( + emgfile=self.resdict, + sorted_rawemg=diff_file, + firings="all", + timewindow=int(self.muap_time.get()), + ) - # Create list of figsize - figsize = [int(i) for i in self.size_fig.get().split(",")] + # Create list of figsize + figsize = [int(i) for i in self.size_fig.get().split(",")] - # Plot MUAPS - openhdemg.plot_muaps(sta_dict[int(self.muap_munum.get())], figsize=figsize) + # Plot MUAPS + openhdemg.plot_muaps(sta_dict[int(self.muap_munum.get())], figsize=figsize) except ValueError: tk.messagebox.showerror( @@ -2539,7 +2806,7 @@ def plot_muaps(self): tk.messagebox.showerror("Information", "Enter valid Configuration.") except KeyError: - tk.messagebox.showerror("Information", "Enter valid Matrx Column.") + tk.messagebox.showerror("Information", "Enter valid Matrix Column.") # ----------------------------------------------------------------------------------------------- # Advanced Analysis @@ -2548,13 +2815,15 @@ def advanced_analysis(self): """ Open top-level windows based on the selected advanced method. """ + if self.advanced_method.get() == "Motor Unit Tracking": head_title = "MUs Tracking Window" - elif self.advanced_method.get() == "Duplicate Removal": - head_title = "Duplicate Removal Window" - else: + elif self.advanced_method.get() == "Conduction Velocity": head_title = "Conduction Velocity Window" + else: + head_title = "Duplicate Removal Window" + self.head = tk.Toplevel(bg="LightBlue4") self.head.title(head_title) @@ -2661,9 +2930,7 @@ def advanced_analysis(self): # Add Which widget and update the track button # to match functionalities required for duplicate removal if self.advanced_method.get() == "Duplicate Removal": - - # Update title - + # Add Which label ttk.Label(self.head, text="Which").grid(column=0, row=14) # Combobox for Which option @@ -2710,7 +2977,14 @@ def advanced_analysis(self): + "\nnumber of channels." ) return - + # # DELSYS conduction velocity not available + # elif self.mat_code_adv.get() == "Trigno Galileo Sensor": + # tk.messagebox.showerror( + # "Information", + # "MUs conduction velocity estimation is not available for this matrix." + # ) + # return + else: # Sort emg file sorted_rawemg = openhdemg.sort_rawemg( diff --git a/openhdemg/library/__init__.py b/openhdemg/library/__init__.py index 8352b61..1df624e 100644 --- a/openhdemg/library/__init__.py +++ b/openhdemg/library/__init__.py @@ -12,8 +12,10 @@ from openhdemg.library.openfiles import ( emg_from_otb, emg_from_demuse, - refsig_from_otb, + emg_from_delsys, emg_from_customcsv, + refsig_from_otb, + refsig_from_delsys, refsig_from_customcsv, save_json_emgfile, emg_from_json, diff --git a/openhdemg/library/analysis.py b/openhdemg/library/analysis.py index 86ca7f2..dd6edd9 100644 --- a/openhdemg/library/analysis.py +++ b/openhdemg/library/analysis.py @@ -11,7 +11,13 @@ import math -def compute_thresholds(emgfile, event_="rt_dert", type_="abs_rel", mvc=0): +def compute_thresholds( + emgfile, + event_="rt_dert", + type_="abs_rel", + n_firings=1, + mvc=0, +): """ Calculates recruitment/derecruitment thresholds. @@ -39,6 +45,13 @@ def compute_thresholds(emgfile, event_="rt_dert", type_="abs_rel", mvc=0): Only relative tresholds will be calculated. ``abs`` Only absolute tresholds will be calculated. + n_firings : int, default 1 + The number of firings used to calculate recruitment/derecruitment + thresholds. If n_firings = 1, the threshold is the value of the + reference signal at the instant in which the firing happens. + If n_firings > 1, the threshold is the average value of the + reference signal at the instants in which the n consecutive firings + happen. mvc : float, default 0 The maximum voluntary contraction (MVC). if mvc is 0, the user is asked to input MVC; otherwise, the value @@ -126,14 +139,14 @@ def compute_thresholds(emgfile, event_="rt_dert", type_="abs_rel", mvc=0): for mu in range(NUMBER_OF_MUS): # Manage the exception of empty MUs if len(MUPULSES[mu]) > 0: - # Detect the first and last firing of the MU and - mup_rec = MUPULSES[mu][0] - mup_derec = MUPULSES[mu][-1] + # Detect the first and last firing of the MU + mup_rec = MUPULSES[mu][0:n_firings] + mup_derec = MUPULSES[mu][-n_firings:] # Calculate absolute and relative RT and DERT if requested - abs_RT = ((float(REF_SIGNAL.at[mup_rec, 0]) * mvc) / 100) - abs_DERT = ((float(REF_SIGNAL.at[mup_derec, 0]) * mvc) / 100) - rel_RT = float(REF_SIGNAL.at[mup_rec, 0]) - rel_DERT = float(REF_SIGNAL.at[mup_derec, 0]) + abs_RT = ((float(REF_SIGNAL.iloc[mup_rec, 0].mean()) * mvc) / 100) + abs_DERT = ((float(REF_SIGNAL.iloc[mup_derec, 0].mean()) * mvc) / 100) + rel_RT = float(REF_SIGNAL.iloc[mup_rec, 0].mean()) + rel_DERT = float(REF_SIGNAL.iloc[mup_derec, 0].mean()) else: abs_RT = np.nan @@ -362,10 +375,9 @@ def compute_dr( c2 = math.isnan(index_endsteady) if not c1 and not c2: - # # DR drstartsteady - # Use +1 because to work only on the steady state (here and after) - # because the idr is calculated on the previous firing (on the - # ramp). + # DR drstartsteady + # Use +1 to work only on the steady state (here and after) + # because the idr is calculated on the previous firing. selected_idr = idr[mu]["idr"].loc[ index_startsteady + 1: index_startsteady + n_firings_steady ] @@ -427,6 +439,7 @@ def compute_dr( def basic_mus_properties( emgfile, + n_firings_rt_dert=1, n_firings_RecDerec=4, n_firings_steady=10, start_steady=-1, @@ -451,6 +464,13 @@ def basic_mus_properties( ---------- emgfile : dict The dictionary containing the emgfile. + n_firings_rt_dert : int, default 1 + The number of firings used to calculate recruitment/derecruitment + thresholds. If n_firings_rt_dert = 1, the threshold is the value of the + reference signal at the instant in which the firing happens. + If n_firings_rt_dert > 1, the threshold is the average value of the + reference signal at the instants in which the n consecutive firings + happen. n_firings_RecDerec : int, default 4 The number of firings at recruitment and derecruitment to consider for the calculation of the DR. @@ -536,6 +556,12 @@ def basic_mus_properties( ) start_steady, end_steady = points[0], points[1] + # Double check if the user selected before or after the start of the signal + if start_steady < 0: + start_steady = 0 + if end_steady < 0: + end_steady = 0 + # Collect the information to export # First: create a dataframe that contains all the output exportable_df = [] @@ -650,7 +676,11 @@ def basic_mus_properties( ) # Calculate RT and DERT - mus_thresholds = compute_thresholds(emgfile=emgfile, mvc=mvc) + mus_thresholds = compute_thresholds( + emgfile=emgfile, + n_firings=n_firings_rt_dert, + mvc=mvc, + ) exportable_df = pd.concat([exportable_df, mus_thresholds], axis=1) # Calculate DR at recruitment, derecruitment, all, start, end of the diff --git a/openhdemg/library/electrodes.py b/openhdemg/library/electrodes.py index 97eddc7..899b8c6 100644 --- a/openhdemg/library/electrodes.py +++ b/openhdemg/library/electrodes.py @@ -93,6 +93,44 @@ } """ +DELSYSelectrodes_tuple = ( + "Trigno Galileo Sensor", +) +""" +Tuple containing the names of different recording electrodes. + +>>> DELSYSelectrodes_tuple +( + 'Trigno Galileo Sensor', +) +""" + +DELSYSelectrodes_ied = { + "Trigno Galileo Sensor": 5, +} +""" +A dict containing information about the interelectrode distance for each +matrix in DELSYSelectrodes_tuple. + +>>> DELSYelectrodes_ied +{ + 'Trigno Galileo Sensor': 5, +} +""" + +DELSYSelectrodes_Nelectrodes = { + "Trigno Galileo Sensor": 4, +} +""" +A dict containing information about the number of electrodes for each +matrix in DELSYSelectrodes_tuple. + +>>> DELSYSelectrodes_Nelectrodes +{ + 'Trigno Galileo Sensor': 4, +} +""" + # --------------------------------------------------------------------- # Sort the electrodes of different matrices. @@ -111,23 +149,31 @@ def sort_rawemg( To date, built-in sorting functions have been implemented for the matrices: - Code (Orientation) - GR08MM1305 (0, 180), - GR04MM1305 (0, 180), - GR10MM0808 (0, 180). + Code (Orientation) + GR08MM1305 (0, 180), + GR04MM1305 (0, 180), + GR10MM0808 (0, 180), + Trigno Galileo Sensor (na). Parameters ---------- emgfile : dict The dictionary containing the emgfile. - code : str {"GR08MM1305", "GR04MM1305", "GR10MM0808", "None"}, default "GR08MM1305" - The code of the matrix used. + code : str, default "GR08MM1305" + The code of the matrix used. It can be one of: + + ``GR08MM1305`` + ``GR04MM1305`` + ``GR10MM0808`` + ``Trigno Galileo Sensor`` If "None", the electodes are not sorted but n_rows and n_cols must be specified when dividebycolumn == True. orientation : int {0, 180}, default 180 Orientation in degree of the matrix. E.g. 180 corresponds to the matrix connection toward the researcher or the ground (depending on the limb). + Ignore if using the "Trigno Galileo Sensor". In this case, channels + will be oriented as in the Delsys Neuromap Explorer software. dividebycolumn = bool, default True Whether to return the sorted channels classified by matrix column. n_rows : None or int, default None @@ -142,11 +188,12 @@ def sort_rawemg( Returns ------- sorted_rawemg : dict or pd.DataFrame - If dividebycolumn == True a dict containing the sorted electrodes. - Every key of the dictionary represents a different column of the - matrix. Rows are stored in the dict as a pd.DataFrame. + If dividebycolumn == True a dict containing the sorted electrodes is + returned. Every key of the dictionary represents a different column of + the matrix. Rows are stored in the dict as a pd.DataFrame. If dividebycolumn == False a pd.DataFrame containing the sorted - electrodes. The matrix channels are stored in the pd.DataFrame columns. + electrodes is returned. The matrix channels are stored in the + pd.DataFrame columns. Notes ----- @@ -220,7 +267,14 @@ def sort_rawemg( 62463 0.020854 0.028992 0.017802 ... 0.013733 0.037638 NaN """ - if code not in ["GR08MM1305", "GR04MM1305", "GR10MM0808", "None"]: + valid_codes = [ + "GR08MM1305", + "GR04MM1305", + "GR10MM0808", + "Trigno Galileo Sensor", + "None", + ] + if code not in valid_codes: return ValueError("Unsupported code in sort_rawemg()") # Work on a copy of the RAW_SIGNAL @@ -339,6 +393,23 @@ def sort_rawemg( 63, 62, 61, 60, 59, 58, 57, 56, ] + elif code == "Trigno Galileo Sensor": + """ + Channel Order Trigno Galileo Sensor + + 1 + 4 2 + 3 + + Will be represented as: + 0 + 0 1 + 1 2 + 2 3 + 3 4 + """ + base0_sorting_order = [0, 1, 2, 3] + else: pass @@ -364,6 +435,10 @@ def sort_rawemg( n_rows = 8 n_cols = 8 + elif code == "Trigno Galileo Sensor": + n_rows = 4 + n_cols = 1 + else: # Check if n_rows and n_cols have been passed if not isinstance(n_rows, int): diff --git a/openhdemg/library/info.py b/openhdemg/library/info.py index f4528c8..f7afc01 100644 --- a/openhdemg/library/info.py +++ b/openhdemg/library/info.py @@ -69,7 +69,7 @@ def data(self, emgfile): . """ - if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOM"]: + if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: print("\nData structure of the emgfile") print("-----------------------------\n") print(f"emgfile type is:\n{type(emgfile)}\n") @@ -93,7 +93,7 @@ def data(self, emgfile): print(f"emgfile['BINARY_MUS_FIRING'] is a {type(emgfile['BINARY_MUS_FIRING'])} of value:\n{emgfile['BINARY_MUS_FIRING']}\n") print(f"emgfile['EXTRAS'] is a {type(emgfile['EXTRAS'])} of value:\n{emgfile['EXTRAS']}\n") - elif emgfile["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]: + elif emgfile["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]: print("\nData structure of the emgfile") print("-----------------------------\n") print(f"emgfile type is:\n{type(emgfile)}\n") @@ -218,33 +218,7 @@ def aboutus(self): Us -- - The developers of this project are: - - Mr. Giacomo Valli: - The creator of the project and the developer of the library. - \x1B[3m - Mr. Giacomo Valli obtained a master degree in Sports Science and a - research fellowship in molecular biology of exercise at the - University of Urbino (IT). - He is currently a PhD student at the University of Padova (IT) in - neuromuscular physiology. - He is investigating the electrophysiological modifications - happening during disuse, disease and aging and linking this - information to the molecular alterations of the muscle. - \x1B[0m - Mr. Paul Ritsche: - The developer of the GUI. - \x1B[3m - Mr. Paul Ritsche obtained a master degree in Sports Science at the - University of Basel (CH). - He is currently a research associate at the University of Basel - (CH) focusing on muscle ultrasonography. - He is investigating automatic ultrasonography image analysis - methods to evaluate muscle morphological as well architectural - parameters. - \x1B[0m - - For the full list of contributors visit: + For the full list of contributors and developers visit: https://www.giacomovalli.com/openhdemg/about-us/ """ @@ -271,14 +245,14 @@ def contacts(self): "Primary contact": "openhdemg@gmail.com", "Twitter": "@openhdemg", "Maintainer": "Giacomo Valli", - "Maintainer Email": "giacomo.valli@phd.unipd.it", + "Maintainer Email": "giacomo.valli@unibs.it", """ contact = { "Primary contact": "openhdemg@gmail.com", "Twitter": "@openhdemg", "Maintainer": "Giacomo Valli", - "Maintainer Email": "giacomo.valli@phd.unipd.it", + "Maintainer Email": "giacomo.valli@unibs.it", } # Pretty dict printing diff --git a/openhdemg/library/muap.py b/openhdemg/library/muap.py index aedd7e2..6b29502 100644 --- a/openhdemg/library/muap.py +++ b/openhdemg/library/muap.py @@ -210,7 +210,59 @@ def double_diff(sorted_rawemg): return dd -# This function exploits parallel processing to compute the STA +def extract_delsys_muaps(emgfile): + """ + Extract MUAPs obtained from Delsys decomposition. + + The extracted MUAPs will be stored in the same structure of the MUAPs + obtained with the ``sta`` funtion. + + Parameters + ---------- + emgfile : dict + The dictionary containing the emgfile. + + Returns + ------- + muaps_dict : dict + dict containing a dict of MUAPs (pd.DataFrame) for every MUs. + + See also + -------- + - sta : Computes the spike-triggered average (STA) of every MUs. + + Notes + ----- + The returned file can be used wherever MUAPs from spike triggered + averaging are required. + + Examples + -------- + Visualise the MUAPs of the first MU. + + >>> import openhdemg.library as emg + >>> emgfile = emg.askopenfile(filesource="DELSYS") + >>> muaps = emg.extract_delsys_muaps(emgfile) + >>> emg.plot_muaps(muaps[0]) + + Visualise the MUAPs of the first 3 MUs. + + >>> import openhdemg.library as emg + >>> emgfile = emg.askopenfile(filesource="DELSYS") + >>> muaps = emg.extract_delsys_muaps(emgfile) + >>> emg.plot_muaps([muaps[0], muaps[1], muaps[2]]) + """ + + all_muaps = emgfile["EXTRAS"] + muaps_dict = {mu: None for mu in range(emgfile["NUMBER_OF_MUS"])} + for mu in range(emgfile["NUMBER_OF_MUS"]): + df = pd.DataFrame(all_muaps.filter(regex=f"MU_{mu}_CH_")) + df.columns = range(len(df.columns)) + muaps_dict[mu] = {"col0": df} + + return muaps_dict + + def sta( emgfile, sorted_rawemg, firings=[0, 50], timewindow=50 ): @@ -325,72 +377,31 @@ def sta( sta_dict = {mu: {} for mu in range(emgfile["NUMBER_OF_MUS"])} # Calculate STA on sorted_rawemg for every mu and put it into sta_dict[mu] - # Loop all the MUs to fill sta_dict for mu in sta_dict.keys(): - pass - - # STA function to run in parallel - def parallel(mu): # Set firings if firings="all" if firings == "all": firings_ = [0, len(emgfile["MUPULSES"][mu])] else: firings_ = firings - # Loop the matrix columns + # Calculate STA for each column in sorted_rawemg sorted_rawemg_sta = {} for col in sorted_rawemg.keys(): - # Container of STA for matrix rows row_dict = {} - # Loop the matrix rows for row in sorted_rawemg[col].columns: - - # Find the mupulses thismups = emgfile["MUPULSES"][mu][firings_[0]: firings_[1]] - - # Container of ST area for averaging - df = {} - for pos, pulse in enumerate(thismups): - df[pos] = ( - sorted_rawemg[col][row] - .iloc[pulse - halftime: pulse + halftime] - .reset_index(drop=True) - ) - - # Average df columns and fill df - df = pd.DataFrame(df) - df = df.mean(axis="columns") - - row_dict[row] = df - + df = sorted_rawemg[col][row].to_numpy() + # Calculate STA using NumPy vectorized operations + sta_values = [] + for pulse in thismups: + sta_values.append(df[pulse - halftime: pulse + halftime]) + row_dict[row] = np.mean(sta_values, axis=0) sorted_rawemg_sta[col] = pd.DataFrame(row_dict) - - # Add a reference to the MU number to sort the values returned by - # parallel processing - sorted_rawemg_sta["munumber"] = mu - - return sorted_rawemg_sta - # TODO verify built-in options to return from joblib.Parallel - - # Start parallel execution - # Meausere running time - t0 = time.time() - - res = Parallel(n_jobs=-1)(delayed(parallel)(mu) for mu in sta_dict.keys()) - - t1 = time.time() - print(f"\nTime of sta parallel processing: {round(t1-t0, 2)} Sec\n") - - # Sort output of the parallel processing according to MU number - for i in res: - mu = i["munumber"] - del i["munumber"] - sta_dict[mu] = i + sta_dict[mu] = sorted_rawemg_sta return sta_dict -# This function exploits parallel processing to compute the MUAPs def st_muap(emgfile, sorted_rawemg, timewindow=50): """ Generate spike triggered MUAPs of every MUs. @@ -414,7 +425,7 @@ def st_muap(emgfile, sorted_rawemg, timewindow=50): stmuap : dict dict containing a dict of ST MUAPs (pd.DataFrame) for every MUs. The pd.DataFrames containing the ST MUAPs are organised based on matrix - rows (dict) and matrix channel. + rows (dict) and matrix channels. For example, the ST MUAPs of the first MU (0), in the second electrode of the first matrix column can be accessed as stmuap[0]["col0"][1]. @@ -460,64 +471,33 @@ def st_muap(emgfile, sorted_rawemg, timewindow=50): timewindow_samples = round((timewindow / 1000) * emgfile["FSAMP"]) halftime = round(timewindow_samples / 2) - # Container of the STA for every MUs - # {0: {}, 1: {}, 2: {}, 3: {}} - stmuap = {mu: {} for mu in range(emgfile["NUMBER_OF_MUS"])} - - # Calculate ST MUAPs on sorted_rawemg for every mu and put it into - # sta_dict[mu]. Loop all the MUs to fill sta_dict. - # ST MUAPS function to run in parallel - def parallel(mu): - # Loop the matrix columns - sorted_rawemg_st = {} - for col in sorted_rawemg.keys(): + # Container of the ST for every MUs + # {0: {}, 1: {}, 2: {}, 3: {} ...} + sta_dict = {mu: {} for mu in range(emgfile["NUMBER_OF_MUS"])} - # Container of ST MUAPs for matrix rows - row_dict = {} - # Loop the matrix rows + # Calculate ST on sorted_rawemg for every mu and put it into sta_dict[mu] + for mu in sta_dict.keys(): + # Container for the st of each MUs' matrix column. + sta_dict_cols = {} + # Get MUPULSES for this MU + thismups = emgfile["MUPULSES"][mu] + # Calculate ST for each channel in each column in sorted_rawemg + for col in sorted_rawemg.keys(): + # Container for the st of each channel (row) in that matrix column. + sta_dict_crows = {} for row in sorted_rawemg[col].columns: - - # Find the mupulses - thismups = emgfile["MUPULSES"][mu] - - # Container of ST area for averaging - df = {} + this_emgsig = sorted_rawemg[col][row].to_numpy() + # Container for the pd.DataFrame with MUAPs of each channel. + crow_muaps = {} + # Calculate ST using NumPy vectorized operations for pos, pulse in enumerate(thismups): - df[pos] = ( - sorted_rawemg[col][row] - .iloc[pulse - halftime: pulse + halftime] - .reset_index(drop=True) - ) - - # Fill df with ST MUAPs - df = pd.DataFrame(df) - - row_dict[row] = df - - sorted_rawemg_st[col] = row_dict - - # Add a reference to the MU number to sort the values returned by - # parallel processing - sorted_rawemg_st["munumber"] = mu - - return sorted_rawemg_st - - # Start parallel execution - # Meausere running time - t0 = time.time() + muap = this_emgsig[pulse - halftime: pulse + halftime] + crow_muaps[pos] = muap + sta_dict_crows[row] = pd.DataFrame(crow_muaps) + sta_dict_cols[col] = sta_dict_crows + sta_dict[mu] = sta_dict_cols - res = Parallel(n_jobs=1)(delayed(parallel)(mu) for mu in stmuap.keys()) - - t1 = time.time() - print(f"\nTime of st_muap parallel processing: {round(t1-t0, 2)} Sec\n") - - # Sort output of the parallel processing according to MU number - for i in res: - mu = i["munumber"] - del i["munumber"] - stmuap[mu] = i - - return stmuap + return sta_dict def unpack_sta(sta_mu): @@ -736,7 +716,6 @@ def align_by_xcorr(sta_mu1, sta_mu2, finalduration=0.5): # TODO update examples for code="None" # This function exploits parallel processing: -# - sta: calls the emg.sta function which is executed in parallel # - align and xcorr are processed in parallel # - plotting is processed in parallel def tracking( @@ -750,12 +729,13 @@ def tracking( orientation=180, n_rows=None, n_cols=None, + custom_muaps=None, exclude_belowthreshold=True, filter=True, show=False, ): """ - Track MUs across two different files. + Track MUs across two files comparing the MUAPs' shape and distribution. Parameters ---------- @@ -779,8 +759,13 @@ def tracking( threshold : float, default 0.8 The 2-dimensional cross-correlation minimum value to consider two MUs to be the same. Ranges 0-1. - matrixcode : str {"GR08MM1305", "GR04MM1305", "GR10MM0808", "None"}, default "GR08MM1305" - The code of the matrix used. This is necessary to sort the channels in + matrixcode : str, default "GR08MM1305" + The code of the matrix used. It can be one of: + + ``GR08MM1305`` + ``GR04MM1305`` + ``GR10MM0808`` + This is necessary to sort the channels in the correct order. If matrixcode="None", the electrodes are not sorted. In this case, n_rows and n_cols must be specified. orientation : int {0, 180}, default 180 @@ -794,6 +779,18 @@ def tracking( The number of columns of the matrix. This parameter is used to divide the channels based on the matrix shape. These are normally inferred by the matrix code and must be specified only if code == None. + custom_muaps : None or list, default None + With this parameter, it is possible to perform MUs tracking on MUAPs + computed with custom techniques. If this parameter is None (default), + MUs tracking is performed on the MUAPs computed via spike triggered + averaging. Otherwise, it is possible to pass a list of 2 dictionaries + containing the MUAPs of the MUs from 2 different files. These + dictionaries should be structured as the output of the ``sta`` + function. If custom MUAPs are passed, all the previous parameters + (except for ``emgfile1`` and ``emgfile2`` can be ignored). + If custom MUAPs are provided, these are not aligned by the algorithm, + contrary to what is done for MUAPs obtained via spike triggered + averaging. exclude_belowthreshold : bool, default True Whether to exclude results with XCC below threshold. filter : bool, default True @@ -825,8 +822,8 @@ def tracking( ----- Parallel processing can improve performances by 5-10 times compared to serial processing. In this function, parallel processing has been - implemented for the tasks involving 2-dimensional cross-correlation, sta - and plotting. + implemented for the tasks involving 2-dimensional cross-correlation, and + plotting. This might change in future releases. Examples -------- @@ -839,7 +836,7 @@ def tracking( ... emgfile1=emgfile1, ... emgfile2=emgfile2, ... firings="all", - ... derivation="mono", + ... derivation="sd", ... timewindow=50, ... threshold=0.8, ... matrixcode="GR08MM1305", @@ -864,43 +861,63 @@ def tracking( 10 22 16 0.836356 """ - # Sort the rawemg for the STAs - emgfile1_sorted = sort_rawemg( - emgfile1, - code=matrixcode, - orientation=orientation, - n_rows=n_rows, - n_cols=n_cols, - ) - emgfile2_sorted = sort_rawemg( - emgfile2, - code=matrixcode, - orientation=orientation, - n_rows=n_rows, - n_cols=n_cols, - ) + # Obtain STAs + if not isinstance(custom_muaps, list): + # Sort the rawemg for the STAs + emgfile1_sorted = sort_rawemg( + emgfile1, + code=matrixcode, + orientation=orientation, + n_rows=n_rows, + n_cols=n_cols, + ) + emgfile2_sorted = sort_rawemg( + emgfile2, + code=matrixcode, + orientation=orientation, + n_rows=n_rows, + n_cols=n_cols, + ) - # Calculate the derivation if needed - if derivation == "mono": - pass - elif derivation == "sd": - emgfile1_sorted = diff(sorted_rawemg=emgfile1_sorted) - emgfile2_sorted = diff(sorted_rawemg=emgfile2_sorted) - elif derivation == "dd": - emgfile1_sorted = double_diff(sorted_rawemg=emgfile1_sorted) - emgfile2_sorted = double_diff(sorted_rawemg=emgfile2_sorted) - else: - raise ValueError( - f"derivation can be one of 'mono', 'sd', dd. {derivation} was passed instead" - ) + # Calculate the derivation if needed + if derivation == "mono": + pass + elif derivation == "sd": + emgfile1_sorted = diff(sorted_rawemg=emgfile1_sorted) + emgfile2_sorted = diff(sorted_rawemg=emgfile2_sorted) + elif derivation == "dd": + emgfile1_sorted = double_diff(sorted_rawemg=emgfile1_sorted) + emgfile2_sorted = double_diff(sorted_rawemg=emgfile2_sorted) + else: + raise ValueError( + f"derivation can be one of 'mono', 'sd', 'dd'. {derivation} was passed instead" + ) - # Get the STAs - sta_emgfile1 = sta( - emgfile1, emgfile1_sorted, firings=firings, timewindow=timewindow * 2, - ) - sta_emgfile2 = sta( - emgfile2, emgfile2_sorted, firings=firings, timewindow=timewindow * 2, - ) + # Get the STAs + sta_emgfile1 = sta( + emgfile1, + emgfile1_sorted, + firings=firings, + timewindow=timewindow * 2, + ) + sta_emgfile2 = sta( + emgfile2, + emgfile2_sorted, + firings=firings, + timewindow=timewindow * 2, + ) + + # Obtain custom MUAPs + else: + if len(custom_muaps) == 2: + sta_emgfile1 = custom_muaps[0] + sta_emgfile2 = custom_muaps[1] + if not isinstance(sta_emgfile1, dict): + raise ValueError("custom_muaps[0] is not a dictionary") + if not isinstance(sta_emgfile2, dict): + raise ValueError("custom_muaps[1] is not a dictionary") + else: + raise ValueError("custom_muaps is not a list of two dictionaries") print("\nTracking started") @@ -912,12 +929,15 @@ def parallel(mu_file1): # Loop all the MUs of file 1 # Compare mu_file1 against all the MUs in file2 for mu_file2 in range(emgfile2["NUMBER_OF_MUS"]): # Firs, align the STAs - aligned_sta1, aligned_sta2 = align_by_xcorr( - sta_emgfile1[mu_file1], - sta_emgfile2[mu_file2], - finalduration=0.5 - ) - #aligned_sta1, aligned_sta2 = sta_emgfile1[mu_file1], sta_emgfile2[mu_file2] + if not isinstance(custom_muaps, list): + aligned_sta1, aligned_sta2 = align_by_xcorr( + sta_emgfile1[mu_file1], + sta_emgfile2[mu_file2], + finalduration=0.5 + ) + else: + aligned_sta1 = sta_emgfile1[mu_file1] + aligned_sta2 = sta_emgfile2[mu_file2] # Second, compute 2d cross-correlation df1, _ = unpack_sta(aligned_sta1) @@ -945,7 +965,7 @@ def parallel(mu_file1): # Loop all the MUs of file 1 # Measure running time t0 = time.time() - res = Parallel(n_jobs=8)( + res = Parallel(n_jobs=-1)( delayed(parallel)(mu_file1) for mu_file1 in range(emgfile1["NUMBER_OF_MUS"]) ) @@ -1004,11 +1024,16 @@ def parallel(mu_file1): # Loop all the MUs of file 1 def parallel(ind): # Function for the parallel execution of plotting if tracking_res["XCC"].loc[ind] >= threshold: # Align STA - aligned_sta1, aligned_sta2 = align_by_xcorr( - sta_emgfile1[tracking_res["MU_file1"].loc[ind]], - sta_emgfile2[tracking_res["MU_file2"].loc[ind]], - finalduration=0.5, - ) + if not isinstance(custom_muaps, list): + aligned_sta1, aligned_sta2 = align_by_xcorr( + sta_emgfile1[tracking_res["MU_file1"].loc[ind]], + sta_emgfile2[tracking_res["MU_file2"].loc[ind]], + finalduration=0.5, + ) + else: + aligned_sta1 = sta_emgfile1[tracking_res["MU_file1"].loc[ind]] + aligned_sta2 = sta_emgfile2[tracking_res["MU_file2"].loc[ind]] + title = "MUAPs from MU '{}' in file 1 and MU '{}' in file 2, XCC = {}".format( tracking_res["MU_file1"].loc[ind], tracking_res["MU_file2"].loc[ind], @@ -1045,6 +1070,7 @@ def remove_duplicates_between( orientation=180, n_rows=None, n_cols=None, + custom_muaps=None, filter=True, show=False, which="munumber", @@ -1089,12 +1115,24 @@ def remove_duplicates_between( The number of columns of the matrix. This parameter is used to divide the channels based on the matrix shape. These are normally inferred by the matrix code and must be specified only if code == None. + custom_muaps : None or list, default None + With this parameter, it is possible to perform MUs tracking on MUAPs + computed with custom techniques. If this parameter is None (default), + MUs tracking is performed on the MUAPs computed via spike triggered + averaging. Otherwise, it is possible to pass a list of 2 dictionaries + containing the MUAPs of the MUs from 2 different files. These + dictionaries should be structured as the output of the ``sta`` + function. If custom MUAPs are passed, all the previous parameters + (except for ``emgfile1`` and ``emgfile2`` can be ignored). + If custom MUAPs are provided, these are not aligned by the algorithm, + contrary to what is done for MUAPs obtained via spike triggered + averaging. filter : bool, default True If true, when the same MU has a match of XCC > threshold with multiple MUs, only the match with the highest XCC is returned. show : bool, default False Whether to plot the STA of pairs of MUs with XCC above threshold. - which : str {"munumber", "accuracy"} + which : str {"munumber", "accuracy"}, default "munumber" How to remove the duplicated MUs. ``munumber`` @@ -1161,6 +1199,7 @@ def remove_duplicates_between( orientation=orientation, n_rows=n_rows, n_cols=n_cols, + custom_muaps=custom_muaps, exclude_belowthreshold=True, filter=filter, show=show, @@ -1290,7 +1329,7 @@ def xcc_sta(sta): return xcc_sta -class MUcv_gui: +class MUcv_gui(): """ Graphical user interface for the estimation of MUs conduction velocity. @@ -1340,12 +1379,8 @@ def __init__( sorted_rawemg, n_firings=[0, 50], muaps_timewindow=50, + figsize=[20, 15], ): - """ - Initialization of the master GUI window and of the necessary - attributes. - """ - # On start, compute the necessary information self.emgfile = emgfile self.dd = double_diff(sorted_rawemg) @@ -1356,6 +1391,7 @@ def __init__( timewindow=muaps_timewindow, ) self.sta_xcc = xcc_sta(self.st) + self.figsize = figsize # After that, set up the GUI self.root = tk.Tk() @@ -1470,7 +1506,7 @@ def __init__( self.res_df = pd.DataFrame( data=0, index=self.all_mus, - columns=["CV", "RMS", "XCC"], + columns=["CV", "RMS", "XCC", "Column", "From_Row", "To_Row"], ) self.textbox = tk.Text(self.frm, width=20) self.textbox.grid(row=2, column=8, sticky="ns") @@ -1509,6 +1545,7 @@ def gui_plot(self): sta_dict=self.st[mu], xcc_sta_dict=self.sta_xcc[mu], showimmediately=False, + figsize=self.figsize, ) # Place the figure in the GUI @@ -1581,6 +1618,10 @@ def compute_cv(self): xcc = self.sta_xcc[mu][self.col_cb.get()].iloc[:, xcc_col_list].mean().mean() self.res_df.loc[mu, "XCC"] = xcc + self.res_df.loc[mu, "Column"] = self.col_cb.get() + self.res_df.loc[mu, "From_Row"] = self.start_cb.get() + self.res_df.loc[mu, "To_Row"] = self.stop_cb.get() + self.textbox.replace( '1.0', 'end', diff --git a/openhdemg/library/openfiles.py b/openhdemg/library/openfiles.py index f5e124c..36089b3 100644 --- a/openhdemg/library/openfiles.py +++ b/openhdemg/library/openfiles.py @@ -2,8 +2,8 @@ Description ----------- This module contains all the functions that are necessary to open or save -MATLAB (.mat), JSON (.json) or custom (.csv) files. -MATLAB files are used to store data from the DEMUSE and the OTBiolab+ +MATLAB (.mat), text (.txt), JSON (.json) or custom (.csv) files. +MATLAB files are used to store data from the DEMUSE, OTBiolab+ and Delsys software while JSON files are used to save and load files from this library. The choice of saving files in the open standard JSON file format was @@ -20,11 +20,16 @@ order to be compatible with this library should be exported with a strict structure as described in the function emg_from_otb. In both cases, the input file is a .mat file. +emg_from_delsys : + Used to load a combination of .mat and .txt files exported by the Delsys + Neuromap and Neuromap explorer software containing the raw EMG signal and + the decomposition outcome. emg_from_customcsv : Used to load custom file formats contained in .csv files. -refsig_from_otb and refsig_from_customcsv: - Used to load files from the OTBiolab+ software or from a custom .csv file - that contain only the REF_SIGNAL. +refsig_from_otb, refsig_from_delsys and refsig_from_customcsv: + Used to load files from the OTBiolab+ (.mat) and the Delsys Neuromap + software (.mat) or from a custom .csv file that contain only the + reference signal. save_json_emgfile, emg_from_json : Used to save the working file to a .json file or to load the .json file. @@ -35,12 +40,12 @@ Notes ----- Once opened, the file is returned as a dict with keys: - "SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB") + "SOURCE" : source of the file (i.e., "CUSTOMCSV", "DEMUSE", "OTB", "DELSYS") "FILENAME" : the name of the opened file "RAW_SIGNAL" : the raw EMG signal "REF_SIGNAL" : the reference signal "ACCURACY" : accuracy score (depending on source file type) - "IPTS" : pulse train (decomposed source) + "IPTS" : pulse train (decomposed source, depending on source file type) "MUPULSES" : instants of firing "FSAMP" : sampling frequency "IED" : interelectrode distance @@ -50,7 +55,7 @@ "EXTRAS" : additional custom values The only exception is when files are loaded with just the reference signal: - "SOURCE": source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG") + "SOURCE": source of the file (i.e., "CUSTOMCSV_REFSIG", "OTB_REFSIG", "DELSYS_REFSIG") "FILENAME" : the name of the opened file "FSAMP": sampling frequency "REF_SIGNAL": the reference signal @@ -63,18 +68,20 @@ # Some functions contained in this file are called internally and should not # be exposed to the final user. # Functions should be exposed in the __init__ file as: -# from openhdemg.openfiles import ( -# emg_from_otb, -# emg_from_demuse, -# refsig_from_otb, -# emg_from_customcsv, -# refsig_from_customcsv -# save_json_emgfile, -# emg_from_json, -# askopenfile, -# asksavefile, -# emg_from_samplefile, -# ) # TODO add emg_from_delsys here, in init, in upper description and in docs description +# from openhdemg.library.openfiles import ( +# emg_from_otb, +# emg_from_demuse, +# emg_from_delsys, +# emg_from_customcsv, +# refsig_from_otb, +# refsig_from_delsys, +# refsig_from_customcsv, +# save_json_emgfile, +# emg_from_json, +# askopenfile, +# asksavefile, +# emg_from_samplefile, +# ) from scipy.io import loadmat @@ -89,10 +96,11 @@ import gzip import warnings import os +import fnmatch # --------------------------------------------------------------------- # -# Main function to open decomposed files coming from DEMUSE. +# Function to open decomposed files coming from DEMUSE. def emg_from_demuse(filepath): """ @@ -116,6 +124,7 @@ def emg_from_demuse(filepath): - refsig_from_otb : import REF_SIGNAL in the .mat file exportable by OTBiolab+. - emg_from_customcsv : Import custom data from a .csv file. + - askopenfile : Select and open files with a GUI. Notes ----- @@ -531,7 +540,7 @@ def emg_from_otb( extras=None, ): """ - Import the .mat file exportable by OTBiolab+. + Import the .mat file exportable from OTBiolab+. This function is used to import the .mat file exportable by the OTBiolab+ software as a dictionary of Python objects (mainly pandas dataframes). @@ -578,6 +587,7 @@ def emg_from_otb( OTBiolab+. - emg_from_demuse : import the .mat file used in DEMUSE. - emg_from_customcsv : Import custom data from a .csv file. + - askopenfile : Select and open files with a GUI. Raises ------ @@ -795,6 +805,7 @@ def refsig_from_otb( - emg_from_demuse : import the .mat file used in DEMUSE. - emg_from_customcsv : Import custom data from a .csv file. - refsig_from_customcsv : Import the reference signal from a custom .csv. + - askopenfile : Select and open files with a GUI. Notes --------- @@ -897,6 +908,345 @@ def refsig_from_otb( return emg_refsig +# --------------------------------------------------------------------- # +# Function to open decomposed files coming from Delsys. +def emg_from_delsys( + rawemg_filepath, + mus_directory, + emg_sensor_name="Galileo sensor", + refsig_sensor_name="Trigno Load Cell", + filename_from="mus_directory", +): + """ + Import the .mat and .txt files exportable from Delsys softwares. + + This function is used to load .mat files from the Delsys Neuromap software + (containing the RAW EMG signal and the reference signal) and .txt files + from the Delsys Neuromap Explorer software (containing the decomposition + outcome, accuracy measure and MUAPs). + + We currenlty support only recordings performed with the "Galileo sensor" + (4-pin). Support for the 5-pin sensor will be provided in the next + releases. + + Parameters + ---------- + rawemg_filepath : str or Path + The directory and the name of the file containing the raw EMG data to + load (including file extension .mat). + This can be a simple string, the use of Path is not necessary. + mus_directory : str or Path + The directory (path to the folder) containing .txt files with firing + times, MUAPs, and accuracy data. + This can be a simple string, the use of Path is not necessary. + The .txt files should all be contained in the same folder and should + follow the standard Deslys naming convention (e.g., the file + containing accuracy data will have the string "Stats" in its name). + emg_sensor_name : str, default "Galileo sensor" + The name of the EMG sensor used to collect the data. We currently + support only the "Galileo sensor" (4-pin). + refsig_sensor_name : str, default "Trigno Load Cell" + The name of the sensor used to record the reference signal. This is by + default "Trigno Load Cell". However, since this can have any name (and + can also be renamed by the user), here you should pass the effective + name (or regex pattern) by which you identify the sensor. + Ignore if no reference signal was recorded. + filename_from : str {"rawemg_file", "mus_directory"}, default "mus_directory" + The source by which the imported file will be named. This can either be + the same name of the file containing the raw EMG signal or of the + folder containing the decomposition outcome. + + Returns + -------- + emgfile : dict + A dictionary containing all the useful variables. + + See also + -------- + - refsig_from_delsys : Import the reference signal exportable from Delsys. + - askopenfile : Select and open files with a GUI. + + Notes + --------- + The returned file is called ``emgfile`` for convention. + + Structure of the returned emgfile: + + emgfile = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "RAW_SIGNAL": RAW_SIGNAL, + "REF_SIGNAL": REF_SIGNAL, + "ACCURACY": PROPRIETARY ACCURACY MEASURE, + "IPTS": IPTS, + "MUPULSES": MUPULSES, + "FSAMP": FSAMP, + "IED": IED, + "EMG_LENGTH": EMG_LENGTH, + "NUMBER_OF_MUS": NUMBER_OF_MUS, + "BINARY_MUS_FIRING": BINARY_MUS_FIRING, + "EXTRAS": EXTRAS, + } + + For Delsys files, the accuracy is the one provided after the decomposition + and it is not computed internally, being this a proprietary measure. + + We collect the raw EMG and the reference signal from the .mat file because + the .csv doesn't contain the information about sampling frequency. + Similarly, we collect the firing times, MUAPs and accuracy from the .txt + files because in the .mat file, the accuracy is contained in a table, + which is not compatible with Python. + + Examples + -------- + For an extended explanation of the imported emgfile use: + + >>> import openhdemg.library as emg + >>> emgfile = emg.emg_from_delsys( + ... rawemg_filepath="path/filename.mat", + ... mus_directory="/directory", + ... ) + >>> info = emg.info() + >>> info.data(emgfile) + """ + + # From the rawemg_filepath: + # Parse the .mat obtained from Delsys to see the available variables. + # We start from the file containing the raw EMG as this also contains the + # sampling frequency. + rawemg_file = loadmat(rawemg_filepath, simplify_cells=True) + """ print( + "\n--------------------------------\nAvailable dict keys are:\n\n{}\n".format( + rawemg_file.keys() + ) + ) """ + + # Use this to know the data source and name of the file + SOURCE = "DELSYS" + if filename_from == "rawemg_file": + FILENAME = os.path.basename(rawemg_filepath) + elif filename_from == "mus_directory": + FILENAME = os.path.basename(mus_directory) + else: + raise ValueError( + "\nfilename_from not valid, it must be one of 'rawemg_file', 'mus_directory'\n" + ) + FSAMP = float(rawemg_file["Fs"][0]) + IED = float(5) + + # Extract the data contained in the Data variable of the rawemg_file. + # This contains the raw EMG and the reference signal. + df = pd.DataFrame(rawemg_file["Data"].T, columns=rawemg_file["Channels"]) + + # Get RAW_SIGNAL + # Create a list of indexes where emg_sensor_name is found + RAW_SIGNAL = df.filter(regex=emg_sensor_name) + RAW_SIGNAL.columns = np.arange(len(RAW_SIGNAL.columns)) + # Verify to have the IPTS + if RAW_SIGNAL.empty: + raise ValueError( + "\nRaw EMG signal not found in the .mat file\n" + ) + + # Get REF_SIGNAL + REF_SIGNAL = df.filter(regex=refsig_sensor_name) + REF_SIGNAL.columns = np.arange(len(REF_SIGNAL.columns)) + if REF_SIGNAL.empty: + warnings.warn( + "\nReference signal not found, it might be necessary for some analyses\n" + ) + REF_SIGNAL = pd.DataFrame(columns=[0]) + + # From the mus_directory: + # Obtain the name (and path) of the files containing MUPULSES, ACCURACY + # and EXTRAS. Automate this because it will be too boring manually. + # Get all file names in the directory + files = os.listdir(mus_directory) + # Define the keywords to match + keywords = ["Firings", "Stats", "MUAPs"] + # Initialize a dictionary to store the keyword-path mapping + keyword_paths = {} + # Iterate over the files and match keywords + for keyword in keywords: + for file in files: + if fnmatch.fnmatch(file, f"*{keyword}*"): + keyword_paths[keyword] = os.path.join(mus_directory, file) + # Check if we have found paths for all three keywords + if not all(keyword in keyword_paths for keyword in keywords): + missing_keywords = [ + keyword for keyword in keywords if keyword not in keyword_paths + ] + raise ValueError( + f"Missing paths for: {', '.join(missing_keywords)}" + ) + # Now, 'keyword_paths' contains the mapping of keywords to file paths: + # For example, keyword_paths["Firings"] contains the path to the "Firings", + # file and so on for the other keywords. + + # Get MUPULSES + MUPULSES = np.genfromtxt( + keyword_paths["Firings"], + delimiter='\t', + skip_header=True, + ).T + # Store MUPULSES as a list of np.arrays + to_append = [] + for pulse in MUPULSES: + # Drop nan and convert from seconds to samples + pulse = pulse[~np.isnan(pulse)] * FSAMP + # Store int samples + to_append.append(np.round(pulse).astype(int)) + MUPULSES = to_append + + # Get EMG_LENGTH and NUMBER_OF_MUS + EMG_LENGTH = len(RAW_SIGNAL) + NUMBER_OF_MUS = len(MUPULSES) + + # Get BINARY_MUS_FIRING + BINARY_MUS_FIRING = create_binary_firings( + emg_length=EMG_LENGTH, + number_of_mus=NUMBER_OF_MUS, + mupulses=MUPULSES, + ) + + # Get IPTS + # Empty pd.DataFrame as we don't have this from Delsys decomposition. + IPTS = pd.DataFrame(columns=np.arange(NUMBER_OF_MUS)) + + # Get ACCURACY + ACCURACY = pd.read_csv(keyword_paths["Stats"], sep='\t') + ACCURACY = ACCURACY[["Accuracy"]] + ACCURACY.columns = [0] + + # Get EXTRAS (MUAPs for Delsys) + # MUAPs from Delsys for all the MUs are all stored in the same table. + # We want them divided in different columns based on MU and channel. + EXTRAS = pd.read_csv(keyword_paths["MUAPs"], sep='\t') + df = {} + for mu in range(1, NUMBER_OF_MUS + 1): # Named in base 1 from Delsys + this_mu_all_ch = EXTRAS.loc[EXTRAS["MU_Num"] == mu] + for ch in range(1, 5): # Galileo has 4 recording pins, in base 1 + col_name = f"MU_{mu-1}_CH_{ch-1}" + arr = this_mu_all_ch.filter(regex=str(ch)).to_numpy() + df[col_name] = arr[:, 0] + EXTRAS = pd.DataFrame(df) + + emgfile = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "RAW_SIGNAL": RAW_SIGNAL, + "REF_SIGNAL": REF_SIGNAL, + "ACCURACY": ACCURACY, + "IPTS": IPTS, + "MUPULSES": MUPULSES, + "FSAMP": FSAMP, + "IED": IED, + "EMG_LENGTH": EMG_LENGTH, + "NUMBER_OF_MUS": NUMBER_OF_MUS, + "BINARY_MUS_FIRING": BINARY_MUS_FIRING, + "EXTRAS": EXTRAS, + } + + return emgfile + + +# --------------------------------------------------------------------- # +# Function to open the reference signal from Delsys. +def refsig_from_delsys(filepath, refsig_sensor_name="Trigno Load Cell",): + """ + Import the reference signal in the .mat file exportable by Delsys Neuromap. + + This function is used to import the .mat file exportable by the Delsys + Neuromap software as a dictionary of Python objects (mainly pandas + dataframes). Compared to the function emg_from_delsys, this function only + imports the REF_SIGNAL and, therefore, it can be used for special cases + where only the REF_SIGNAL is necessary. This will allow for a faster + execution of the script and to avoid exceptions for missing data. + + Parameters + ---------- + filepath : str or Path + The directory and the name of the file to load (including file + extension .mat). This can be a simple string, the use of Path is not + necessary. + refsig_sensor_name : str, default "Trigno Load Cell" + The name of the sensor used to record the reference signal. This is by + default "Trigno Load Cell". However, since this can have any name (and + can also be renamed by the user), here you should pass the effective + name (or regex pattern) by which you identify the sensor. + + Returns + ------- + emg_refsig : dict + A dictionary containing all the useful variables. + + See also + -------- + - emg_from_delsys : Import the Delsys decomposition outcome. + - askopenfile : Select and open files with a GUI. + + Notes + --------- + The returned file is called ``emg_refsig`` for convention. + + Structure of the returned emg_refsig: + + emg_refsig = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "FSAMP": FSAMP, + "REF_SIGNAL": REF_SIGNAL, + "EXTRAS": EXTRAS, + } + + Examples + -------- + For an extended explanation of the imported emgfile use: + + >>> import openhdemg.library as emg + >>> emgfile = emg.refsig_from_delsys(filepath="path/filename.mat") + >>> info = emg.info() + >>> info.data(emgfile) + """ + # TODO add extras option + + # Parse the .mat obtained from Delsys to see the available variables. + # The .mat file should containing the reference signal and the sampling + # frequency. + refsig_file = loadmat(filepath, simplify_cells=True) + """ print( + "\n--------------------------------\nAvailable dict keys are:\n\n{}\n".format( + rawemg_file.keys() + ) + ) """ + + # Use this to know the data source and name of the file + SOURCE = "DELSYS_REFSIG" + FILENAME = os.path.basename(filepath) + FSAMP = float(refsig_file["Fs"][0]) + + # Extract the data contained in the Data variable of the .mat file. + # This contains the reference signal. + df = pd.DataFrame(refsig_file["Data"].T, columns=refsig_file["Channels"]) + # Get REF_SIGNAL + REF_SIGNAL = df.filter(regex=refsig_sensor_name) + REF_SIGNAL.columns = np.arange(len(REF_SIGNAL.columns)) + if REF_SIGNAL.empty: + raise ValueError( + "\nReference signal not found\n" + ) + + emg_refsig = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "FSAMP": FSAMP, + "REF_SIGNAL": REF_SIGNAL, + "EXTRAS": pd.DataFrame(columns=[0]), + } + + return emg_refsig + + # --------------------------------------------------------------------- # Function to load custom CSV documents. def emg_from_customcsv( @@ -971,6 +1321,7 @@ def emg_from_customcsv( - refsig_from_otb : import reference signal in the .mat file exportable by OTBiolab+. - refsig_from_customcsv : Import the reference signal from a custom .csv. + - askopenfile : Select and open files with a GUI. Notes ----- @@ -1179,6 +1530,11 @@ def refsig_from_customcsv( emg_refsig : dict A dictionary containing all the useful variables. + See also + -------- + - emg_from_customcsv : Import the emgfile from a custom .csv file. + - askopenfile : Select and open files with a GUI. + Notes --------- The returned file is called ``emg_refsig`` for convention. @@ -1254,7 +1610,7 @@ def refsig_from_customcsv( # --------------------------------------------------------------------- # Functions to convert and save the emgfile to JSON. -def save_json_emgfile(emgfile, filepath): +def save_json_emgfile(emgfile, filepath, compresslevel=4): """ Save the emgfile or emg_refsig as a JSON file. @@ -1266,21 +1622,27 @@ def save_json_emgfile(emgfile, filepath): The directory and the name of the file to save (including file extension .json). This can be a simple string; The use of Path is not necessary. + compresslevel : int, default 4 + An int from 0 to 9, where 0 is no compression and nine maximum + compression. Compressed files will take less space, but will require + more computation. The relationship between compression level and time + required for the compression is not linear. For optimised performance, + we suggest values between 2 and 6, with 4 providing the best balance. """ - if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV"]: + if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: """ We need to convert all the components of emgfile to a dictionary and then to json object. pd.DataFrame cannot be converted with json.dumps. - Once all the elements are converted to json objects, we create a list + Once all the elements are converted to json objects, we create a dict of json objects and dump/save it into a single json file. emgfile = { "SOURCE": SOURCE, "FILENAME": FILENAME, "RAW_SIGNAL": RAW_SIGNAL, "REF_SIGNAL": REF_SIGNAL, - "ACCURACY": ACCURACY + "ACCURACY": ACCURACY, "IPTS": IPTS, "MUPULSES": MUPULSES, "FSAMP": FSAMP, @@ -1291,53 +1653,25 @@ def save_json_emgfile(emgfile, filepath): "EXTRAS": EXTRAS, } """ - # str or int - # Directly convert the ditionary to a json format - source = {"SOURCE": emgfile["SOURCE"]} - filename = {"FILENAME": emgfile["FILENAME"]} - fsamp = {"FSAMP": emgfile["FSAMP"]} - ied = {"IED": emgfile["IED"]} - emg_length = {"EMG_LENGTH": emgfile["EMG_LENGTH"]} - number_of_mus = {"NUMBER_OF_MUS": emgfile["NUMBER_OF_MUS"]} - source = json.dumps(source) - filename = json.dumps(filename) - fsamp = json.dumps(fsamp) - ied = json.dumps(ied) - emg_length = json.dumps(emg_length) - number_of_mus = json.dumps(number_of_mus) + + # str or float + # Directly convert str or float to a json format. + source = json.dumps(emgfile["SOURCE"]) + filename = json.dumps(emgfile["FILENAME"]) + fsamp = json.dumps(emgfile["FSAMP"]) + ied = json.dumps(emgfile["IED"]) + emg_length = json.dumps(emgfile["EMG_LENGTH"]) + number_of_mus = json.dumps(emgfile["NUMBER_OF_MUS"]) # df - # Extract the df from the dict, convert the df to a json, put the - # json in a dict, convert the dict to a json. - # We use dict converted to json to locate better the objects while - # re-importing them in python. - raw_signal = emgfile["RAW_SIGNAL"] - ref_signal = emgfile["REF_SIGNAL"] - accuracy = emgfile["ACCURACY"] - ipts = emgfile["IPTS"] - binary_mus_firing = emgfile["BINARY_MUS_FIRING"] - extras = emgfile["EXTRAS"] - - raw_signal = raw_signal.to_json() - ref_signal = ref_signal.to_json() - accuracy = accuracy.to_json() - ipts = ipts.to_json() - binary_mus_firing = binary_mus_firing.to_json() - extras = extras.to_json() - - raw_signal = {"RAW_SIGNAL": raw_signal} - ref_signal = {"REF_SIGNAL": ref_signal} - accuracy = {"ACCURACY": accuracy} - ipts = {"IPTS": ipts} - binary_mus_firing = {"BINARY_MUS_FIRING": binary_mus_firing} - extras = {"EXTRAS": extras} - - raw_signal = json.dumps(raw_signal) - ref_signal = json.dumps(ref_signal) - accuracy = json.dumps(accuracy) - ipts = json.dumps(ipts) - binary_mus_firing = json.dumps(binary_mus_firing) - extras = json.dumps(extras) + # Access and convert the df to a json object. + # orient='split' is fundamental for performance. + raw_signal = emgfile["RAW_SIGNAL"].to_json(orient='split') + ref_signal = emgfile["REF_SIGNAL"].to_json(orient='split') + accuracy = emgfile["ACCURACY"].to_json(orient='split') + ipts = emgfile["IPTS"].to_json(orient='split') + binary_mus_firing = emgfile["BINARY_MUS_FIRING"].to_json(orient='split') + extras = emgfile["EXTRAS"].to_json(orient='split') # list of ndarray. # Every array has to be converted in a list; then, the list of lists @@ -1347,70 +1681,79 @@ def save_json_emgfile(emgfile, filepath): mupulses.insert(ind, array.tolist()) mupulses = json.dumps(mupulses) - # Convert a list of json objects to json. The result of the conversion + # Convert a dict of json objects to json. The result of the conversion # will be saved as the final json file. - # Don't alter this order unless you modify also the emg_from_json - # function. - list_to_save = [ - source, - filename, - raw_signal, - ref_signal, - accuracy, - ipts, - mupulses, - fsamp, - ied, - emg_length, - number_of_mus, - binary_mus_firing, - extras, - ] - json_to_save = json.dumps(list_to_save) + emgfile = { + "SOURCE": source, + "FILENAME": filename, + "RAW_SIGNAL": raw_signal, + "REF_SIGNAL": ref_signal, + "ACCURACY": accuracy, + "IPTS": ipts, + "MUPULSES": mupulses, + "FSAMP": fsamp, + "IED": ied, + "EMG_LENGTH": emg_length, + "NUMBER_OF_MUS": number_of_mus, + "BINARY_MUS_FIRING": binary_mus_firing, + "EXTRAS": extras, + } # Compress and write the json file - # From: https://stackoverflow.com/questions/39450065/python-3-read-write-compressed-json-objects-from-to-gzip-file - with gzip.open(filepath, "w") as f: + with gzip.open( + filepath, + "wt", + encoding="utf-8", + compresslevel=compresslevel + ) as f: + json.dump(emgfile, f) + + # Adapted from: + # https://stackoverflow.com/questions/39450065/python-3-read-write-compressed-json-objects-from-to-gzip-file + """ with gzip.open(filepath, "w", compresslevel=compresslevel) as f: # Encode json json_bytes = json_to_save.encode("utf-8") # Write to a file - f.write(json_bytes) - # To improve writing time, f.write is the bottleneck but it is - # hard to improve. + f.write(json_bytes) """ - elif emgfile["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]: + elif emgfile["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]: """ - refsig = { - "SOURCE" : SOURCE, - "FILENAME": FILENAME, - "FSAMP" : FSAMP, - "REF_SIGNAL" : REF_SIGNAL, - "EXTRAS": EXTRAS, - } + refsig = { + "SOURCE": SOURCE, + "FILENAME": FILENAME, + "FSAMP": FSAMP, + "REF_SIGNAL": REF_SIGNAL, + "EXTRAS": EXTRAS, + } """ - # str or int - source = {"SOURCE": emgfile["SOURCE"]} - filename = {"FILENAME": emgfile["FILENAME"]} - fsamp = {"FSAMP": emgfile["FSAMP"]} - source = json.dumps(source) - filename = json.dumps(filename) - fsamp = json.dumps(fsamp) + # str or float + # Directly convert str or float to a json format. + source = json.dumps(emgfile["SOURCE"]) + filename = json.dumps(emgfile["FILENAME"]) + fsamp = json.dumps(emgfile["FSAMP"]) + # df - ref_signal = emgfile["REF_SIGNAL"] - ref_signal = ref_signal.to_json() - ref_signal = {"REF_SIGNAL": ref_signal} - ref_signal = json.dumps(ref_signal) - extras = emgfile["EXTRAS"] - extras = extras.to_json() - extras = {"EXTRAS": extras} - extras = json.dumps(extras) - # Merge all the objects in one - list_to_save = [source, filename, fsamp, ref_signal, extras] - json_to_save = json.dumps(list_to_save) + # Access and convert the df to a json object. + ref_signal = emgfile["REF_SIGNAL"].to_json(orient='split') + extras = emgfile["EXTRAS"].to_json(orient='split') + + # Merge all the objects in one dict + refsig = { + "SOURCE": source, + "FILENAME": filename, + "FSAMP": fsamp, + "REF_SIGNAL": ref_signal, + "EXTRAS": extras, + } + # Compress and save - with gzip.open(filepath, "w") as f: - json_bytes = json_to_save.encode("utf-8") - f.write(json_bytes) + with gzip.open( + filepath, + "wt", + encoding="utf-8", + compresslevel=compresslevel + ) as f: + json.dump(refsig, f) else: raise ValueError("\nFile source not recognised\n") @@ -1434,16 +1777,13 @@ def emg_from_json(filepath): See also -------- - - emg_from_demuse : import the .mat file used in DEMUSE. - - emg_from_otb : import the .mat file exportable by OTBiolab+. - - refsig_from_otb : import REF_SIGNAL in the .mat file exportable by - OTBiolab+. - - emg_from_customcsv : import custom data from a .csv file. + - save_json_emgfile : Save the emgfile or emg_refsig as a JSON file. + - askopenfile : Select and open files with a GUI. Notes ----- The returned file is called ``emgfile`` for convention - (or ``emg_refsig`` if SOURCE in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]). + (or ``emg_refsig`` if SOURCE in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]). Examples -------- @@ -1456,94 +1796,69 @@ def emg_from_json(filepath): """ # Read and decompress json file - with gzip.open(filepath, "r") as fin: - json_bytes = fin.read() - # Decode json file - json_str = json_bytes.decode("utf-8") - jsonemgfile = json.loads(json_str) + with gzip.open(filepath, "rt", encoding="utf-8") as f: + jsonemgfile = json.load(f) """ print(type(jsonemgfile)) - - print(len(jsonemgfile)) - 13 + """ - # Access the dictionaries and extract the data - # jsonemgfile[0] contains the SOURCE in a dictionary - source_dict = json.loads(jsonemgfile[0]) - source = source_dict["SOURCE"] - # jsonemgfile[1] contains the FILENAME in all the sources - filename_dict = json.loads(jsonemgfile[1]) - filename = filename_dict["FILENAME"] - - if source in ["DEMUSE", "OTB", "CUSTOMCSV"]: - # jsonemgfile[2] contains the RAW_SIGNAL in a dictionary, it can be - # extracted in a new dictionary and converted into a pd.DataFrame. + + # Access the dictionaries and extract the data. + source = json.loads(jsonemgfile["SOURCE"]) + filename = json.loads(jsonemgfile["FILENAME"]) + + if source in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: + # RAW_SIGNAL + # df are stored in json as a dictionary, it can be directly extracted + # and converted into a pd.DataFrame. # index and columns are imported as str, we need to convert it to int. - raw_signal_dict = json.loads(jsonemgfile[2]) - raw_signal_dict = json.loads(raw_signal_dict["RAW_SIGNAL"]) - raw_signal = pd.DataFrame(raw_signal_dict) + raw_signal = pd.read_json(jsonemgfile["RAW_SIGNAL"], orient='split') + # Check dtypes for safety, little computational cost raw_signal.columns = raw_signal.columns.astype(int) raw_signal.index = raw_signal.index.astype(int) raw_signal.sort_index(inplace=True) - # jsonemgfile[3] contains the REF_SIGNAL to be treated as jsonemgfile[2] - ref_signal_dict = json.loads(jsonemgfile[3]) - ref_signal_dict = json.loads(ref_signal_dict["REF_SIGNAL"]) - ref_signal = pd.DataFrame(ref_signal_dict) + # REF_SIGNAL + ref_signal = pd.read_json(jsonemgfile["REF_SIGNAL"], orient='split') ref_signal.columns = ref_signal.columns.astype(int) ref_signal.index = ref_signal.index.astype(int) ref_signal.sort_index(inplace=True) - # jsonemgfile[4] contains the ACCURACY to be treated as jsonemgfile[2] - accuracy_dict = json.loads(jsonemgfile[4]) - accuracy_dict = json.loads(accuracy_dict["ACCURACY"]) - accuracy = pd.DataFrame(accuracy_dict) + # ACCURACY + accuracy = pd.read_json(jsonemgfile["ACCURACY"], orient='split') accuracy.columns = accuracy.columns.astype(int) accuracy.index = accuracy.index.astype(int) accuracy.sort_index(inplace=True) - # jsonemgfile[5] contains the IPTS to be treated as jsonemgfile[2] - ipts_dict = json.loads(jsonemgfile[5]) - ipts_dict = json.loads(ipts_dict["IPTS"]) - ipts = pd.DataFrame(ipts_dict) + # IPTS + ipts = pd.read_json(jsonemgfile["IPTS"], orient='split') ipts.columns = ipts.columns.astype(int) ipts.index = ipts.index.astype(int) ipts.sort_index(inplace=True) - # jsonemgfile[6] contains the MUPULSES which is a list of lists but - # has to be converted in a list of ndarrays. - mupulses = json.loads(jsonemgfile[6]) + # MUPULSES + # It is s list of lists but has to be converted in a list of ndarrays. + mupulses = json.loads(jsonemgfile["MUPULSES"]) for num, element in enumerate(mupulses): mupulses[num] = np.array(element) - # jsonemgfile[7] contains the FSAMP to be treated as jsonemgfile[0] - fsamp_dict = json.loads(jsonemgfile[7]) - fsamp = float(fsamp_dict["FSAMP"]) - # jsonemgfile[8] contains the IED to be treated as jsonemgfile[0] - ied_dict = json.loads(jsonemgfile[8]) - ied = float(ied_dict["IED"]) - # jsonemgfile[9] contains the EMG_LENGTH to be treated as - # jsonemgfile[0] - emg_length_dict = json.loads(jsonemgfile[9]) - emg_length = int(emg_length_dict["EMG_LENGTH"]) - # jsonemgfile[10] contains the NUMBER_OF_MUS to be treated as - # jsonemgfile[0] - number_of_mus_dict = json.loads(jsonemgfile[10]) - number_of_mus = int(number_of_mus_dict["NUMBER_OF_MUS"]) - # jsonemgfile[11] contains the BINARY_MUS_FIRING to be treated as - # jsonemgfile[2] - binary_mus_firing_dict = json.loads(jsonemgfile[11]) - binary_mus_firing_dict = json.loads( - binary_mus_firing_dict["BINARY_MUS_FIRING"] + # FSAMP + # Make sure to convert it to float + fsamp = float(json.loads(jsonemgfile["FSAMP"])) + # IED + ied = float(json.loads(jsonemgfile["IED"])) + # EMG_LENGTH + # Make sure to convert it to int + emg_length = int(json.loads(jsonemgfile["EMG_LENGTH"])) + # NUMBER_OF_MUS + number_of_mus = int(json.loads(jsonemgfile["NUMBER_OF_MUS"])) + # BINARY_MUS_FIRING + binary_mus_firing = pd.read_json( + jsonemgfile["BINARY_MUS_FIRING"], + orient='split', ) - binary_mus_firing = pd.DataFrame(binary_mus_firing_dict) binary_mus_firing.columns = binary_mus_firing.columns.astype(int) binary_mus_firing.index = binary_mus_firing.index.astype(int) - # jsonemgfile[12] contains the EXTRAS to be treated as - # jsonemgfile[2] - extras_dict = json.loads(jsonemgfile[12]) - extras_dict = json.loads(extras_dict["EXTRAS"]) - extras = pd.DataFrame(extras_dict) - # extras.columns = extras.columns.astype(int) - # extras.index = extras.index.astype(int) - # extras.sort_index(inplace=True) - # Don't alter extras, leave that to the user for maximum control + binary_mus_firing.sort_index(inplace=True) + # EXTRAS + # Don't alter index and columns as these could contain anything. + extras = pd.read_json(jsonemgfile["EXTRAS"], orient='split') emgfile = { "SOURCE": source, @@ -1561,21 +1876,16 @@ def emg_from_json(filepath): "EXTRAS": extras, } - elif source in ["OTB_REFSIG", "CUSTOMCSV_REFSIG"]: - # jsonemgfile[2] contains the fsamp - fsamp_dict = json.loads(jsonemgfile[2]) - fsamp = float(fsamp_dict["FSAMP"]) - # jsonemgfile[3] contains the REF_SIGNAL - ref_signal_dict = json.loads(jsonemgfile[3]) - ref_signal_dict = json.loads(ref_signal_dict["REF_SIGNAL"]) - ref_signal = pd.DataFrame(ref_signal_dict) + elif source in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]: + # FSAMP + fsamp = float(json.loads(jsonemgfile["FSAMP"])) + # REF_SIGNAL + ref_signal = pd.read_json(jsonemgfile["REF_SIGNAL"], orient='split') ref_signal.columns = ref_signal.columns.astype(int) ref_signal.index = ref_signal.index.astype(int) ref_signal.sort_index(inplace=True) - # jsonemgfile[4] contains the EXTRAS - extras_dict = json.loads(jsonemgfile[4]) - extras_dict = json.loads(extras_dict["EXTRAS"]) - extras = pd.DataFrame(extras_dict) + # EXTRAS + extras = pd.read_json(jsonemgfile["EXTRAS"], orient='split') emgfile = { "SOURCE": source, @@ -1603,21 +1913,27 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): initialdir : str or Path, default "/" The directory of the file to load (excluding file name). This can be a simple string, the use of Path is not necessary. - filesource : str {"OPENHDEMG", "DEMUSE", "OTB", "OTB_REFSIG", "CUSTOMCSV", CUSTOMCSV_REFSIG}, default "OPENHDEMG" + filesource : str {"OPENHDEMG", "DEMUSE", "OTB", "DELSYS", "CUSTOMCSV", "OTB_REFSIG", "DELSYS_REFSIG", CUSTOMCSV_REFSIG}, default "OPENHDEMG" The source of the file. See notes for how files should be exported - from OTB. + from other softwares or platforms. ``OPENHDEMG`` File saved from openhdemg (.json). ``DEMUSE`` File saved from DEMUSE (.mat). ``OTB`` - File exported from OTB with decomposition and reference signal + File exported from OTB with decomposition and EMG signal. (.mat). + ``DELSYS`` + Files exported from Delsys Neuromap and Neuromap explorer with + decomposition and EMG signal (.mat + .txt). + ``CUSTOMCSV`` + Custom file format (.csv) with decomposition and EMG signal. ``OTB_REFSIG`` File exported from OTB with only the reference signal (.mat). - ``CUSTOMCSV`` - Custom file format (.csv). + ``DELSYS_REFSIG`` + File exported from DELSYS Neuromap with the reference signal + (.mat). ``CUSTOMCSV_REFSIG`` Custom file format (.csv) containing only the reference signal. otb_ext_factor : int, default 8 @@ -1643,6 +1959,21 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): If your specific version is not available in the tested versions, trying with the closer one usually works, but please double check the results. Ignore if loading other files. + delsys_emg_sensor_name : str, default "Galileo sensor" + The name of the EMG sensor used to collect the data. We currently + support only the "Galileo sensor". + Ignore if loading other files or only the reference signal. + delsys_refsig_sensor_name : str, default "Trigno Load Cell" + The name of the sensor used to record the reference signal. This is by + default "Trigno Load Cell". However, since this can have any name (and + can also be renamed by the user), here you should pass the effective + name (or regex pattern) by which you identify the sensor. + Ignore if loading other files or if no reference signal was recorded. + delsys_filename_from : str {"rawemg_file", "mus_directory"}, default "mus_directory" + The source by which the imported file will be named. This can either be + the same name of the file containing the raw EMG signal or of the + folder containing the decomposition outcome. + Ignore if loading other files or only the reference signal. custom_ref_signal : str, default 'REF_SIGNAL' Label of the column(s) containing the reference signal of the custom file. @@ -1689,7 +2020,7 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): Notes ----- The returned file is called ``emgfile`` for convention (or ``emg_refsig`` - if SOURCE in ["OTB_REFSIG", CUSTOMCSV_REFSIG]). + if SOURCE in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]). The input .mat file exported from the OTBiolab+ software should have a specific content: @@ -1708,6 +2039,15 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): - NO OTHER ELEMENTS SHOULD BE PRESENT! unless an appropriate regex pattern is passed to 'extras='! + For Delsys files: + We collect the raw EMG and the reference signal from the .mat file + exported from the Delsys Neuromap software because the .csv doesn't + contain the information about sampling frequency. + Similarly, we collect the firing times, MUAPs and accuracy from the .txt + files exported from the Delsys Neuromap Explorer software because in the + .mat file, the accuracy is contained in a table, which is not compatible + with Python. + For custom .csv files: The variables of interest should be contained in columns. The name of the columns containing each variable can be specified by the user if different @@ -1733,7 +2073,7 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): "RAW_SIGNAL": RAW_SIGNAL, "REF_SIGNAL": REF_SIGNAL, "ACCURACY": accuracy score (depending on source file type), - "IPTS": IPTS, + "IPTS": IPTS (depending on source file type), "MUPULSES": MUPULSES, "FSAMP": FSAMP, "IED": IED, @@ -1774,12 +2114,22 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): root = Tk() root.withdraw() - if filesource in ["DEMUSE", "OTB", "OTB_REFSIG"]: + if filesource in ["DEMUSE", "OTB", "OTB_REFSIG", "DELSYS_REFSIG"]: file_toOpen = filedialog.askopenfilename( initialdir=initialdir, title=f"Select a {filesource} file to load", filetypes=[("MATLAB files", "*.mat")], ) + elif filesource == "DELSYS": + emg_file_toOpen = filedialog.askopenfilename( + initialdir=initialdir, + title="Select a DELSYS file with raw EMG to load", + filetypes=[("MATLAB files", "*.mat")], + ) + mus_file_toOpen = filedialog.askdirectory( + initialdir=initialdir, + title="Select the folder containing the DELSYS decomposition", + ) elif filesource == "OPENHDEMG": file_toOpen = filedialog.askopenfilename( initialdir=initialdir, @@ -1789,12 +2139,14 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): elif filesource in ["CUSTOMCSV", "CUSTOMCSV_REFSIG"]: file_toOpen = filedialog.askopenfilename( initialdir=initialdir, - title="Select a custom file to load", + title=f"Select a {filesource} file to load", filetypes=[("CSV files", "*.csv")], ) else: - raise Exception( - "\nfilesource not valid, it must be one of 'DEMUSE', 'OTB', 'OTB_REFSIG', 'OPENHDEMG', 'CUSTOMCSV', 'CUSTOMCSV_REFSIG'\n" + raise ValueError( + "\nfilesource not valid, it must be one of " + + "'DEMUSE', 'OTB', 'DELSYS', 'OTB_REFSIG', 'DELSYS_REFSIG', " + + "'OPENHDEMG', 'CUSTOMCSV', 'CUSTOMCSV_REFSIG'\n" ) # Destroy the root since it is no longer necessary @@ -1817,6 +2169,27 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): refsig=ref[1], version=kwargs.get("otb_version", "1.5.9.3"), ) + elif filesource == "DELSYS": + emgfile = emg_from_delsys( + rawemg_filepath=emg_file_toOpen, + mus_directory=mus_file_toOpen, + emg_sensor_name=kwargs.get( + "delsys_emg_sensor_name", "Galileo sensor" + ), + refsig_sensor_name=kwargs.get( + "delsys_refsig_sensor_name", "Trigno Load Cell" + ), + filename_from=kwargs.get( + "delsys_filename_from", "mus_directory" + ), + ) + elif filesource == "DELSYS_REFSIG": + emgfile = refsig_from_delsys( + filepath=file_toOpen, + refsig_sensor_name=kwargs.get( + "delsys_refsig_sensor_name", "Trigno Load Cell" + ), + ) elif filesource == "OPENHDEMG": emgfile = emg_from_json(filepath=file_toOpen) elif filesource == "CUSTOMCSV": @@ -1848,7 +2221,7 @@ def askopenfile(initialdir="/", filesource="OPENHDEMG", **kwargs): return emgfile -def asksavefile(emgfile): +def asksavefile(emgfile, compresslevel=4): """ Select where to save files with a GUI. @@ -1856,6 +2229,12 @@ def asksavefile(emgfile): ---------- emgfile : dict The dictionary containing the emgfile to save. + compresslevel : int, default 4 + An int from 0 to 9, where 0 is no compression and nine maximum + compression. Compressed files will take less space, but will require + more computation. The relationship between compression level and time + required for the compression is not linear. For optimised performance, + we suggest values between 2 and 6, with 4 providing the best balance. See also -------- @@ -1878,7 +2257,7 @@ def asksavefile(emgfile): print("\n-----------\nSaving file\n") - save_json_emgfile(emgfile, filepath) + save_json_emgfile(emgfile, filepath, compresslevel) print("File saved\n-----------\n") diff --git a/openhdemg/library/plotemg.py b/openhdemg/library/plotemg.py index 2f880ad..1c88a8d 100644 --- a/openhdemg/library/plotemg.py +++ b/openhdemg/library/plotemg.py @@ -1030,8 +1030,7 @@ def plot_muaps( if min_ < xmin: xmin = min_ - # Obtain number of columns and rows, this changes if we use - # differential derivations + # Obtain number of columns and rows cols = len(sta_dict[0]) rows = len(sta_dict[0]["col0"].columns) fig, axs = plt.subplots( @@ -1042,16 +1041,44 @@ def plot_muaps( sharex=True, ) - for thisdict in sta_dict: - # Plot all the MUAPs, c means matrix columns, r rows - for r in range(rows): - for pos, c in enumerate(thisdict.keys()): - axs[r, pos].plot(thisdict[c].iloc[:, r]) + # Manage exception of arrays instead of matrices and check that they + # are correctly oriented. + if cols > 1 and rows > 1: + # Matrices + for thisdict in sta_dict: + # Plot all the MUAPs, c means matrix columns, r rows + for r in range(rows): + for pos, c in enumerate(thisdict.keys()): + axs[r, pos].plot(thisdict[c].iloc[:, r]) + + axs[r, pos].set_ylim(xmin, xmax) + axs[r, pos].xaxis.set_visible(False) + axs[r, pos].set(yticklabels=[]) + axs[r, pos].tick_params(left=False) + + elif cols == 1 and rows > 1: + # Arrays + for thisdict in sta_dict: + # Plot all the MUAPs, c means matrix columns, r rows + for r in range(rows): + for pos, c in enumerate(thisdict.keys()): + axs[r].plot(thisdict[c].iloc[:, r]) + + axs[r].set_ylim(xmin, xmax) + axs[r].xaxis.set_visible(False) + axs[r].set(yticklabels=[]) + axs[r].tick_params(left=False) + + elif cols > 1 and rows == 1: + raise ValueError( + "Arrays should be organised as 1 column, multiple rows. " + + "Not as 1 row, multiple columns." + ) - axs[r, pos].set_ylim(xmin, xmax) - axs[r, pos].xaxis.set_visible(False) - axs[r, pos].set(yticklabels=[]) - axs[r, pos].tick_params(left=False) + else: + raise ValueError( + "Unacceptable number of rows and columns to plot" + ) showgoodlayout(tight_layout=False, despined=True) if showimmediately: @@ -1345,7 +1372,7 @@ def plot_muaps_for_cv( >>> fig = emg.plot_muaps_for_cv( ... sta_dict=sta[0], ... xcc_sta_dict=xcc_sta[0], - ... showimmediately=False, + ... showimmediately=True, ... ) """ @@ -1378,27 +1405,64 @@ def plot_muaps_for_cv( # Plot all the MUAPs, c means matrix columns, r rows keys = list(sta_dict.keys()) - for r in range(rows): - for pos, c in enumerate(keys): - axs[r, pos].plot(sta_dict[c].iloc[:, r]) - - axs[r, pos].set_ylim(ymin, ymax) - axs[r, pos].xaxis.set_visible(False) - axs[r, pos].set(yticklabels=[]) - axs[r, pos].tick_params(left=False) - - if r != 0: - xcc = round(xcc_sta_dict[c].iloc[:, r].iloc[0], 2) - title = xcc - color = "k" if xcc >= 0.8 else "r" - axs[r, pos].set_title( - title, fontsize=8, color=color, loc="left", pad=3 - ) - - else: - axs[r, pos].set_title(c, fontsize=12, pad=20) + # Manage exception of arrays instead of matrices and check that they + # are correctly oriented. + if cols > 1 and rows > 1: + for r in range(rows): + for pos, c in enumerate(keys): + axs[r, pos].plot(sta_dict[c].iloc[:, r]) + + axs[r, pos].set_ylim(ymin, ymax) + axs[r, pos].xaxis.set_visible(False) + axs[r, pos].set(yticklabels=[]) + axs[r, pos].tick_params(left=False) + + if r != 0: + xcc = round(xcc_sta_dict[c].iloc[:, r].iloc[0], 2) + title = xcc + color = "k" if xcc >= 0.8 else "r" + axs[r, pos].set_title( + title, fontsize=8, color=color, loc="left", pad=3 + ) + + else: + axs[r, pos].set_title(c, fontsize=12, pad=20) + + axs[r, pos].set_ylabel(r, fontsize=6, rotation=0, labelpad=0) + + elif cols == 1 and rows > 1: + for r in range(rows): + for pos, c in enumerate(keys): + axs[r].plot(sta_dict[c].iloc[:, r]) + + axs[r].set_ylim(ymin, ymax) + axs[r].xaxis.set_visible(False) + axs[r].set(yticklabels=[]) + axs[r].tick_params(left=False) + + if r != 0: + xcc = round(xcc_sta_dict[c].iloc[:, r].iloc[0], 2) + title = xcc + color = "k" if xcc >= 0.8 else "r" + axs[r].set_title( + title, fontsize=8, color=color, loc="left", pad=3 + ) + + else: + axs[r].set_title(c, fontsize=12, pad=20) + + axs[r].set_ylabel(r, fontsize=6, rotation=0, labelpad=0) + + elif cols > 1 and rows == 1: + raise ValueError( + "Arrays should be organised as 1 column, multiple rows. " + + "Not as 1 row, multiple columns." + ) - axs[r, pos].set_ylabel(r, fontsize=6, rotation=0, labelpad=0) + else: + raise ValueError( + "Unacceptable number of rows and columns to plot" + ) showgoodlayout(tight_layout=False, despined=True) if showimmediately: diff --git a/openhdemg/library/tools.py b/openhdemg/library/tools.py index 95bd094..712b5af 100644 --- a/openhdemg/library/tools.py +++ b/openhdemg/library/tools.py @@ -94,7 +94,8 @@ def create_binary_firings(emg_length, number_of_mus, mupulses): number_of_mus : int Number of MUs in the emg file. mupulses : list of ndarrays - Each ndarray should contain the times of firing of each MU. + Each ndarray should contain the times of firing (in samples) of each + MU. Returns ------- @@ -102,27 +103,21 @@ def create_binary_firings(emg_length, number_of_mus, mupulses): A pd.DataFrame containing the binary representation of MUs firing. """ - # skip the step if I don't have the mupulses (is nan) - if isinstance(mupulses, list): - # create an empty pd.DataFrame containing zeros - binary_MUs_firing = pd.DataFrame(np.zeros((emg_length, number_of_mus))) - # Loop through the columns (MUs) and isolate the data of interest - for i in range(number_of_mus): - this_mu_binary_firing = binary_MUs_firing[i] - this_mu_pulses = pd.DataFrame(mupulses[i]) - - # Loop through the rows (time) and assign 1 if the MU is firing - for position in range(len(this_mu_pulses)): - firing_point = int(this_mu_pulses.iat[position, 0]) - this_mu_binary_firing.iloc[firing_point] = 1 + # Skip the step if I don't have the mupulses (is nan) + if not isinstance(mupulses, list): + raise ValueError("mupulses is not a list of ndarrays") - # Merge the work done with the original pd.DataFrame of zeros - binary_MUs_firing[i] = this_mu_binary_firing + # Initialize a pd.DataFrame with zeros + binary_MUs_firing = pd.DataFrame( + np.zeros((emg_length, number_of_mus), dtype=int) + ) - return binary_MUs_firing + for mu in range(number_of_mus): + if len(mupulses[mu]) > 0: + firing_points = mupulses[mu].astype(int) + binary_MUs_firing.iloc[firing_points, mu] = 1 - else: - raise ValueError("mupulses is not a list of ndarrays") + return binary_MUs_firing def mupulses_from_binary(binarymusfiring): @@ -137,7 +132,7 @@ def mupulses_from_binary(binarymusfiring): Returns ------- MUPULSES : list - A list of ndarrays containing the firing time of each MU. + A list of ndarrays containing the firing time (in samples) of each MU. """ # Create empty list of lists to fill with ndarrays containing the MUPULSES @@ -145,15 +140,14 @@ def mupulses_from_binary(binarymusfiring): numberofMUs = len(binarymusfiring.columns) MUPULSES = [[] for _ in range(numberofMUs)] - for i in binarymusfiring: # Loop all the MUs + for mu in binarymusfiring: # Loop all the MUs my_ndarray = [] - for idx, x in binarymusfiring[i].items(): # Loop the MU firing times + for idx, x in binarymusfiring[mu].items(): # Loop the MU firing times if x > 0: my_ndarray.append(idx) # Take the firing time and add it to the ndarray - my_ndarray = np.array(my_ndarray) - MUPULSES[i] = my_ndarray + MUPULSES[mu] = np.array(my_ndarray) return MUPULSES @@ -207,70 +201,98 @@ def resize_emgfile(emgfile, area=None, accuracy="recalculate"): ) start_, end_ = points[0], points[1] + # Double check that start_, end_ are within the real range. + if start_ < 0: + start_ = 0 + if end_ > emgfile["REF_SIGNAL"].shape[0]: + end_ = emgfile["REF_SIGNAL"].shape[0] + # Create the object to store the resized emgfile. rs_emgfile = copy.deepcopy(emgfile) - """ - ACCURACY should be re-computed on the new portion of the file if possible. - Need to be resized: ==> - emgfile = { - "SOURCE": SOURCE, - ==> "RAW_SIGNAL": RAW_SIGNAL, - ==> "REF_SIGNAL": REF_SIGNAL, - ==> "ACCURACY": ACCURACY, - ==> "IPTS": IPTS, - ==> "MUPULSES": MUPULSES, - "FSAMP": FSAMP, - "IED": IED, - ==> "EMG_LENGTH": EMG_LENGTH, - "NUMBER_OF_MUS": NUMBER_OF_MUS, - ==> "BINARY_MUS_FIRING": BINARY_MUS_FIRING, - } - """ - - # Resize the reference signal and identify the first value of the index to - # resize the mupulses. Then, reset the index. - rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].loc[start_:end_] - first_idx = rs_emgfile["REF_SIGNAL"].index[0] - rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].reset_index(drop=True) - rs_emgfile["RAW_SIGNAL"] = ( - rs_emgfile["RAW_SIGNAL"].loc[start_:end_].reset_index(drop=True) - ) - rs_emgfile["IPTS"] = rs_emgfile["IPTS"].loc[start_:end_].reset_index(drop=True) - rs_emgfile["EMG_LENGTH"] = int(len(rs_emgfile["IPTS"].index)) - rs_emgfile["BINARY_MUS_FIRING"] = ( - rs_emgfile["BINARY_MUS_FIRING"].loc[start_:end_].reset_index(drop=True) - ) - for mu in range(rs_emgfile["NUMBER_OF_MUS"]): - # Mask the array based on a filter and return the values in an array - rs_emgfile["MUPULSES"][mu] = ( - rs_emgfile["MUPULSES"][mu][ - (rs_emgfile["MUPULSES"][mu] >= start_) - & (rs_emgfile["MUPULSES"][mu] < end_) - ] - - first_idx + if emgfile["SOURCE"] in ["DEMUSE", "OTB", "CUSTOMCSV", "DELSYS"]: + """ + ACCURACY should be re-computed on the new portion of the file if + possible. Need to be resized: ==> + emgfile = { + "SOURCE": SOURCE, + ==> "RAW_SIGNAL": RAW_SIGNAL, + ==> "REF_SIGNAL": REF_SIGNAL, + ==> "ACCURACY": ACCURACY, + ==> "IPTS": IPTS, + ==> "MUPULSES": MUPULSES, + "FSAMP": FSAMP, + "IED": IED, + ==> "EMG_LENGTH": EMG_LENGTH, + "NUMBER_OF_MUS": NUMBER_OF_MUS, + ==> "BINARY_MUS_FIRING": BINARY_MUS_FIRING, + } + """ + + # Resize the reference signal and identify the first value of the + # index to resize the mupulses. Then, reset the index. + rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].loc[start_:end_] + first_idx = rs_emgfile["REF_SIGNAL"].index[0] + rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].reset_index(drop=True) + rs_emgfile["RAW_SIGNAL"] = ( + rs_emgfile["RAW_SIGNAL"].loc[start_:end_].reset_index(drop=True) + ) + rs_emgfile["IPTS"] = rs_emgfile["IPTS"].loc[start_:end_].reset_index(drop=True) + rs_emgfile["EMG_LENGTH"] = int(len(rs_emgfile["RAW_SIGNAL"].index)) + rs_emgfile["BINARY_MUS_FIRING"] = ( + rs_emgfile["BINARY_MUS_FIRING"].loc[start_:end_].reset_index(drop=True) ) - # Compute SIL or leave original ACCURACY - if accuracy == "recalculate": - if rs_emgfile["NUMBER_OF_MUS"] > 0: - if not rs_emgfile["IPTS"].empty: - # Calculate SIL - to_append = [] - for mu in range(rs_emgfile["NUMBER_OF_MUS"]): - res = compute_sil( - ipts=rs_emgfile["IPTS"][mu], - mupulses=rs_emgfile["MUPULSES"][mu], + for mu in range(rs_emgfile["NUMBER_OF_MUS"]): + # Mask the array based on a filter and return the values in an array + rs_emgfile["MUPULSES"][mu] = ( + rs_emgfile["MUPULSES"][mu][ + (rs_emgfile["MUPULSES"][mu] >= start_) + & (rs_emgfile["MUPULSES"][mu] < end_) + ] + - first_idx + ) + + # Compute SIL or leave original ACCURACY + if accuracy == "recalculate": + if rs_emgfile["NUMBER_OF_MUS"] > 0: + if not rs_emgfile["IPTS"].empty: + # Calculate SIL + to_append = [] + for mu in range(rs_emgfile["NUMBER_OF_MUS"]): + res = compute_sil( + ipts=rs_emgfile["IPTS"][mu], + mupulses=rs_emgfile["MUPULSES"][mu], + ) + to_append.append(res) + rs_emgfile["ACCURACY"] = pd.DataFrame(to_append) + + else: + raise ValueError( + "Impossible to calculate ACCURACY (SIL). IPTS not found." + + " If IPTS is not present or empty, set accuracy='maintain'" ) - to_append.append(res) - rs_emgfile["ACCURACY"] = pd.DataFrame(to_append) - else: - raise ValueError( - "Impossible to calculate ACCURACY (SIL). IPTS not found" - ) + elif accuracy == "maintain": + # rs_emgfile["ACCURACY"] = rs_emgfile["ACCURACY"] + pass + + else: + raise ValueError( + f"Accuracy can only be 'recalculate' or 'maintain'. {accuracy} was passed instead." + ) + + return rs_emgfile, start_, end_ - return rs_emgfile, start_, end_ + elif emgfile["SOURCE"] in ["OTB_REFSIG", "CUSTOMCSV_REFSIG", "DELSYS_REFSIG"]: + rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].loc[start_:end_] + first_idx = rs_emgfile["REF_SIGNAL"].index[0] + rs_emgfile["REF_SIGNAL"] = rs_emgfile["REF_SIGNAL"].reset_index(drop=True) + + return rs_emgfile, start_, end_ + + else: + raise ValueError("\nFile source not recognised\n") def compute_idr(emgfile): @@ -680,6 +702,10 @@ def filter_rawemg(emgfile, order=2, lowcut=20, highcut=500): ------- filteredrawsig : dict The dictionary containing the emgfile with a filtered RAW_SIGNAL. + Currently, the returned filteredrawsig cannot be accurately compressed + when using the functions ``save_json_emgfile()`` and ``asksavefile()``. + We therefore suggest you to save the unfiltered emgfile if you want to + obtain maximum compression. See also -------- diff --git a/pyproject.toml b/pyproject.toml index 037585e..f54900d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,2 +1,2 @@ -[tool.black] -line-length = 100 \ No newline at end of file +[build-system] +requires = ["setuptools>=68.0"] diff --git a/reqs_for_devs.txt b/reqs_for_devs.txt index c59578a..2caca46 100644 Binary files a/reqs_for_devs.txt and b/reqs_for_devs.txt differ diff --git a/setup.py b/setup.py index af9026c..352d436 100644 --- a/setup.py +++ b/setup.py @@ -4,26 +4,29 @@ import openhdemg as emg # To read the content of the README or description file from pathlib import Path +# To install required dependencies +from setuptools import setup INSTALL_REQUIRES = [ - "customtkinter==5.2.0", - "matplotlib==3.7.1", - "numpy==1.25.0", + "customtkinter==5.2.1", + "matplotlib==3.8.1", + "numpy==1.26.1", "openpyxl==3.1.2", "pandas==2.0.3", "pandastable==0.13.1", "pyperclip==1.8.2", - "scipy==1.11.1", - "seaborn==0.12.2", - "joblib==1.3.1", + "scipy==1.11.3", + "seaborn==0.13.0", + "joblib==1.3.2", ] PACKAGES = [ "openhdemg", - "openhdemg.gui", - "openhdemg.gui.gui_files", "openhdemg.library", "openhdemg.library.decomposed_test_files", + "openhdemg.compatibility", + "openhdemg.gui", + "openhdemg.gui.gui_files", ] CLASSIFIERS = [ @@ -39,11 +42,6 @@ "Operating System :: MacOS", ] -try: - from setuptools import setup -except ImportError: - from distutils.core import setup - this_directory = Path(__file__).parent long_descr = (this_directory / "README.md").read_text() @@ -51,7 +49,7 @@ setup( name="openhdemg", maintainer="Giacomo Valli", - maintainer_email="giacomo.valli@phd.unipd.it", + maintainer_email="giacomo.valli@unibs.it", description="Open-source analysis of High-Density EMG data", long_description=long_descr, long_description_content_type='text/markdown',