Skip to content

Commit

Permalink
add icmpc paper
Browse files Browse the repository at this point in the history
  • Loading branch information
lkfink authored Sep 2, 2023
1 parent 8634aaa commit c1820d0
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions assets/beatLab_website_publications.bib
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
@inproceedings{fink2023mobile,
title={Eye movement patterns when playing from memory: Examining consistency across repeated performances and the relationship between eyes and audio},
author={Fink, Lauren K},
booktitle={Proceedings of the International Conference on Music Perception and Cognition, ICMPC17-APSCOM7, Tokyo, August24-28,2023},
pages={},
year={2023},
organization={},
url_Link={https://doi.org/10.31234/osf.io/tecdv},
abstract={While the eyes serve an obvious function in the context of music reading, their role during memorized music performance (i.e., when there is no score) is currently unknown. Given previous work showing relationships between eye movements and body movements and eye movements and memory retrieval, here I ask 1) whether eye movements become a stable aspect of the memorized music (motor) performance, and 2) whether the structure of the music is reflected in eye movement patterns. In this case study, three pianists chose two pieces to play from memory. They came into the lab on four different days, separated by at least 12hrs, and played their two pieces three times each. To answer 1), I compared dynamic time warping cost within vs. between pieces, and found significantly lower warping costs within piece, for both horizontal and vertical eye movement time series, providing a first proof-of-concept that eye movement patterns are conserved across repeated memorized music performances. To answer 2), I used the Matrix Profiles of the eye movement time series to automatically detect motifs (repeated patterns). By then analyzing participants’ recorded audio at moments of detected ocular motifs, repeated sections of music could be identified (confirmed auditorily and with inspection of the extracted pitch and amplitude envelopes of the indexed audio snippets). Overall, the current methods provide a promising approach for future studies of music performance, enabling exploration of the relationship between body movements, eye movements, and musical processing.}
}

@article{saxena2023deep,
title={Deep learning models for webcam eye-tracking in online experiments},
author={Saxena, Shreshth and Fink, Lauren K and Lange, Elke B},
Expand Down

0 comments on commit c1820d0

Please sign in to comment.