@inproceedings{be19a830b750414090c3647a04a65b45,
title = "EEG-based measures of auditory saliency in a complex context",
abstract = "Auditory saliency is an important mechanism that helps humans extract relevant information from environments. Audio notifications of mobile devices with high saliency can increase users' receptivity, yet overly high saliency could cause annoyance. Accurately measuring auditory saliency of a notification is critical for evaluating its usability. Previous studies adopted behavioral methods. However, their results may not accurately reflect auditory saliency as humans' perception of auditory saliency often involves complicated cognitive processes. Thus, we propose an electroencephalography (EEG)-based approach that can complement behavioral studies to provide a more nuanced analysis of auditory saliency. We evaluated our method by conducting an EEG experiment that measured the mismatch negativity and P3a of the sounds in realistic scenarios. We also conducted a behavioral experiment to link the EEG-based method with the behavioral method. The results suggested that EEG can provide detailed information about how human perceive auditory saliency and complement the behavioral measures.",
keywords = "Auditory saliency, Brain-computer interface, Notification",
author = "Huang, {Xun Yi} and Cherng, {Fu Yin} and King, {Jung Tai} and Wen-Chieh Lin",
year = "2019",
month = oct,
day = "1",
doi = "10.1145/3338286.3340139",
language = "English",
series = "Proceedings of the 21st International Conference on Human-Computer Interaction with Mobile Devices and Services, MobileHCI 2019",
publisher = "Association for Computing Machinery, Inc",
booktitle = "Proceedings of the 21st International Conference on Human-Computer Interaction with Mobile Devices and Services, MobileHCI 2019",
note = "21st International Conference on Human-Computer Interaction with Mobile Devices and Services, MobileHCI 2019 ; Conference date: 01-10-2019 Through 04-10-2019",
}