-
Notifications
You must be signed in to change notification settings - Fork 1
/
paper.bib
170 lines (170 loc) · 15 KB
/
paper.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
@article {Bohnslav2021-deepethogram,
article_type = {journal},
title = {DeepEthogram, a machine learning pipeline for supervised behavior classification from raw pixels},
author = {Bohnslav, James P and Wimalasena, Nivanthika K and Clausing, Kelsey J and Dai, Yu Y and Yarmolinsky, David A and Cruz, Tomás and Kashlan, Adam D and Chiappe, M Eugenia and Orefice, Lauren L and Woolf, Clifford J and Harvey, Christopher D},
editor = {Mathis, Mackenzie W and Behrens, Timothy E and Mathis, Mackenzie W and Bohacek, Johannes},
volume = 10,
year = 2021,
month = {sep},
pub_date = {2021-09-02},
pages = {e63377},
citation = {eLife 2021;10:e63377},
doi = {10.7554/eLife.63377},
url = {https://doi.org/10.7554/eLife.63377},
abstract = {Videos of animal behavior are used to quantify researcher-defined behaviors of interest to study neural function, gene mutations, and pharmacological therapies. Behaviors of interest are often scored manually, which is time-consuming, limited to few behaviors, and variable across researchers. We created DeepEthogram: software that uses supervised machine learning to convert raw video pixels into an ethogram, the behaviors of interest present in each video frame. DeepEthogram is designed to be general-purpose and applicable across species, behaviors, and video-recording hardware. It uses convolutional neural networks to compute motion, extract features from motion and images, and classify features into behaviors. Behaviors are classified with above 90\% accuracy on single frames in videos of mice and flies, matching expert-level human performance. DeepEthogram accurately predicts rare behaviors, requires little training data, and generalizes across subjects. A graphical interface allows beginning-to-end analysis without end-user programming. DeepEthogram’s rapid, automatic, and reproducible labeling of researcher-defined behaviors of interest may accelerate and enhance supervised behavior analysis. Code is available at: https://github.com/jbohnslav/deepethogram.},
keywords = {behavior analysis, deep learning, computer vision},
journal = {eLife},
issn = {2050-084X},
publisher = {eLife Sciences Publications, Ltd},
}
@misc{Sun2020-TREBA,
doi = {10.48550/ARXIV.2011.13917},
url = {https://arxiv.org/abs/2011.13917},
author = {Sun, Jennifer J. and Kennedy, Ann and Zhan, Eric and Anderson, David J. and Yue, Yisong and Perona, Pietro},
keywords = {Computer Vision and Pattern Recognition (cs.CV), Machine Learning (cs.LG), FOS: Computer and information sciences, FOS: Computer and information sciences},
title = {Task Programming: Learning Data Efficient Behavior Representations},
publisher = {arXiv},
year = {2020},
copyright = {arXiv.org perpetual, non-exclusive license}
}
@article{Luxem2022vame,
title={Identifying behavioral structure from deep variational embeddings of animal motion},
author={Luxem, Kevin and Mocellin, Petra and Fuhrmann, Falko and K{\"u}rsch, Johannes and Miller, Stephanie R and Palop, Jorge J and Remy, Stefan and Bauer, Pavol},
journal={Communications Biology},
volume={5},
number={1},
pages={1267},
doi={10.1101/2020.05.14.095430},
year={2022},
publisher={Nature Publishing Group UK London}
}
@article {Nilsson2020-simba,
author = {Nilsson, Simon RO and Goodwin, Nastacia L. and Choong, Jia Jie and Hwang, Sophia and Wright, Hayden R and Norville, Zane C and Tong, Xiaoyu and Lin, Dayu and Bentzley, Brandon S. and Eshel, Neir and McLaughlin, Ryan J and Golden, Sam A.},
title = {Simple Behavioral Analysis (SimBA) {\textendash} an open source toolkit for computer classification of complex social behaviors in experimental animals},
elocation-id = {2020.04.19.049452},
year = {2020},
doi = {10.1101/2020.04.19.049452},
publisher = {Cold Spring Harbor Laboratory},
abstract = {Aberrant social behavior is a core feature of many neuropsychiatric disorders, yet the study of complex social behavior in freely moving rodents is relatively infrequently incorporated into preclinical models. This likely contributes to limited translational impact. A major bottleneck for the adoption of socially complex, ethology-rich, preclinical procedures are the technical limitations for consistently annotating detailed behavioral repertoires of rodent social behavior. Manual annotation is subjective, prone to observer drift, and extremely time-intensive. Commercial approaches are expensive and inferior to manual annotation. Open-source alternatives often require significant investments in specialized hardware and significant computational and programming knowledge. By combining recent computational advances in convolutional neural networks and pose-estimation with further machine learning analysis, complex rodent social behavior is primed for inclusion under the umbrella of computational neuroethology.Here we present an open-source package with graphical interface and workflow (Simple Behavioral Analysis, SimBA) that uses pose-estimation to create supervised machine learning predictive classifiers of rodent social behavior, with millisecond resolution and accuracies that can out-perform human observers. SimBA does not require specialized video acquisition hardware nor extensive computational background. Standard descriptive statistical analysis, along with graphical region of interest annotation, are provided in addition to predictive classifier generation. To increase ease-of-use for behavioural neuroscientists, we designed SimBA with accessible menus for pre-processing videos, annotating behavioural training datasets, selecting advanced machine learning options, robust classifier validation functions and flexible visualizations tools. This allows for predictive classifier transparency, explainability and tunability prior to, and during, experimental use. We demonstrate that this approach is flexible and robust in both mice and rats by classifying social behaviors that are commonly central to the study of brain function and social motivation. Finally, we provide a library of poseestimation weights and behavioral predictive classifiers for resident-intruder behaviors in mice and rats. All code and data, together with detailed tutorials and documentation, are available on the SimBA GitHub repository.Graphical abstract SimBA graphical interface (GUI) for creating supervised machine learning classifiers of rodent social behavior.(a) Pre-process videos. SimBA supports common video pre-processing functions (e.g., cropping, clipping, sampling, format conversion, etc.) that can be performed either on single videos, or as a batch.(b) Managing poseestimation data and creating classification projects. Pose-estimation tracking projects in DeepLabCut and DeepPoseKit can be either imported or created and managed within the SimBA graphical user interface, and the tracking results are imported into SimBA classification projects.SimBA also supports userdrawn region-of-interests (ROIs) for descriptive statistics of animal movements, or as features in machine learning classification projects.(c) Create classifiers, perform classifications, and analyze classification data. SimBA has graphical tools for correcting poseestimation tracking inaccuracies when multiple subjects are within a single frame, annotating behavioral events from videos, and optimizing machine learning hyperparameters and discrimination thresholds. A number of validation checkpoints and logs are included for increased classifier explainability and tunability prior to, and during, experimental use. Both detailed and summary data are provided at the end of classifier analysis. SimBA accepts behavioral annotations generated elsewhere (such as through JWatcher) that can be imported into SimBA classification projects.(d) Visualize classification results. SimBA has several options for visualizing machine learning classifications, animal movements and ROI data, and analyzing the durations and frequencies of classified behaviors.See the SimBA GitHub repository for a comprehensive documentation and user tutorials.Competing Interest StatementThe authors have declared no competing interest.},
URL = {https://www.biorxiv.org/content/early/2020/04/21/2020.04.19.049452},
eprint = {https://www.biorxiv.org/content/early/2020/04/21/2020.04.19.049452.full.pdf},
journal = {bioRxiv}
}
@software{Mathis2020-DLC2Kinematics,
author = {Mathis, Mackenzie and
Lauer, Jessy and
Nath, Tanmay and
Beauzile, Michael and
Hausmann, Sébastien and
Schneider, Steffen and
Mathis, Alexander},
title = {{DLC2Kinematics: a post-deeplabcut module for
kinematic analysis}},
month = feb,
year = 2020,
publisher = {Zenodo},
version = {v0.0.4},
doi = {10.5281/zenodo.6669074},
url = {https://doi.org/10.5281/zenodo.6669074}
}
@article{Sturman2020-dlcanalyzer,
author = {Sturman, Oliver and Ziegler, Lukas and Schläppi, Christa and Akyol, Furkan and Privitera, Mattia and Slominski, Daria and Grimm, Christina and Thieren, Laetitia and Zerbi, Valerio and Grewe, Benjamin and Bohacek, Johannes},
year = {2020},
month = {07},
pages = {},
title = {Deep learning-based behavioral analysis reaches human accuracy and is capable of outperforming commercial solutions},
volume = {45},
journal = {Neuropsychopharmacology},
doi = {10.1038/s41386-020-0776-y}
}
@article{Mathis2020DeepLT,
title={Deep learning tools for the measurement of animal behavior in neuroscience},
author={Mackenzie W. Mathis and Alexander Mathis},
journal={Current Opinion in Neurobiology},
year={2020},
doi={10.1016/j.conb.2019.10.008},
volume={60},
pages={1-11}
}
@article{Lauer2022MultianimalPE,
title={Multi-animal pose estimation, identification and tracking with DeepLabCut},
author={Jessy Lauer and Mu Zhou and Shaokai Ye and William Menegas and Steffen Schneider and Tanmay Nath and Mohammed Mostafizur Rahman and Valentina Di Santo and Daniel Soberanes and Guoping Feng and Venkatesh N. Murthy and George Lauder and Catherine Dulac and M. Mathis and Alexander Mathis},
journal={Nature Methods},
year={2022},
volume={19},
pages={496 - 504},
doi={10.1038/s41592-022-01443-0}
}
@article {Segalin2021-bento,
article_type = {journal},
title = {The Mouse Action Recognition System (MARS) software pipeline for automated analysis of social behaviors in mice},
author = {Segalin, Cristina and Williams, Jalani and Karigo, Tomomi and Hui, May and Zelikowsky, Moriel and Sun, Jennifer J and Perona, Pietro and Anderson, David J and Kennedy, Ann},
editor = {Berman, Gordon J and Wassum, Kate M and Gal, Asaf},
volume = 10,
year = 2021,
month = {nov},
pub_date = {2021-11-30},
pages = {e63720},
citation = {eLife 2021;10:e63720},
doi = {10.7554/eLife.63720},
url = {https://doi.org/10.7554/eLife.63720},
abstract = {The study of naturalistic social behavior requires quantification of animals’ interactions. This is generally done through manual annotation—a highly time-consuming and tedious process. Recent advances in computer vision enable tracking the pose (posture) of freely behaving animals. However, automatically and accurately classifying complex social behaviors remains technically challenging. We introduce the Mouse Action Recognition System (MARS), an automated pipeline for pose estimation and behavior quantification in pairs of freely interacting mice. We compare MARS’s annotations to human annotations and find that MARS’s pose estimation and behavior classification achieve human-level performance. We also release the pose and annotation datasets used to train MARS to serve as community benchmarks and resources. Finally, we introduce the Behavior Ensemble and Neural Trajectory Observatory (BENTO), a graphical user interface for analysis of multimodal neuroscience datasets. Together, MARS and BENTO provide an end-to-end pipeline for behavior data extraction and analysis in a package that is user-friendly and easily modifiable.},
keywords = {social behavior, pose estimation, machine learning, computer vision, microendoscopic imaging, software},
journal = {eLife},
issn = {2050-084X},
publisher = {eLife Sciences Publications, Ltd},
}
@article{Hsu2021bsoid,
title={B-SOiD, an open-source unsupervised algorithm for identification and fast prediction of behaviors},
author={Hsu, Alexander I and Yttri, Eric A},
journal={Nature communications},
volume={12},
number={1},
doi={10.1038/s41467-021-25420-x},
pages={5188},
year={2021},
publisher={Nature Publishing Group UK London}
}
@article {Schweihoff2022asoid,
author = {Schweihoff, Jens F. and Hsu, Alexander I. and Schwarz, Martin K. and Yttri, Eric A.},
title = {A-SOiD, an active learning platform for expert-guided, data efficient discovery of behavior},
elocation-id = {2022.11.04.515138},
year = {2022},
doi = {10.1101/2022.11.04.515138},
publisher = {Cold Spring Harbor Laboratory},
abstract = {Behavior identification and quantification techniques have undergone rapid development. To this end, supervised or unsupervised methods are chosen based upon their intrinsic strengths and weaknesses (e.g. user bias, training cost, complexity, action discovery). Here, a new active learning platform, A-SOiD, blends these strengths and in doing so, overcomes several of their inherent drawbacks. A-SOiD iteratively learns user-defined groups with a fraction of the usual training data while attaining expansive classification through directed unsupervised classification. In socially-interacting mice, A-SOiD outperformed standard methods despite requiring 85\% less training data. Additionally, it isolated two additional ethologically-distinct mouse interactions via unsupervised classification. Similar performance and efficiency was observed using non-human primate 3D pose data. In both cases, the transparency in A-SOiD{\textquoteright}s cluster definitions revealed the defining features of the supervised classification through a game-theoretic approach. To facilitate use, A-SOiD comes as an intuitive, open-source interface for efficient segmentation of user-defined behaviors and discovered subactions.Competing Interest StatementThe authors have declared no competing interest.},
URL = {https://www.biorxiv.org/content/early/2022/11/04/2022.11.04.515138},
eprint = {https://www.biorxiv.org/content/early/2022/11/04/2022.11.04.515138.full.pdf},
journal = {bioRxiv}
}
@ARTICLE{Pereira2022sleap,
title={SLEAP: A deep learning system for multi-animal pose tracking},
author={Pereira, Talmo D and
Tabris, Nathaniel and
Matsliah, Arie and
Turner, David M and
Li, Junyu and
Ravindranath, Shruthi and
Papadoyannis, Eleni S and
Normand, Edna and
Deutsch, David S and
Wang, Z. Yan and
McKenzie-Smith, Grace C and
Mitelut, Catalin C and
Castro, Marielisa Diez and
D'Uva, John and
Kislin, Mikhail and
Sanes, Dan H and
Kocher, Sarah D and
Samuel S-H and
Falkner, Annegret L and
Shaevitz, Joshua W and
Murthy, Mala},
journal={Nature Methods},
volume={19},
number={4},
year={2022},
publisher={Nature Publishing Group},
doi={10.1038/s41592-022-01426-1}
}
}