@inproceedings{e27d101d575b46b2af6db24ee86159c5,
title = "Classifying emotions via analysis of facial physiological response without relying on expressions",
abstract = "Assessing a person{\textquoteright}s emotional state may be relevant to security in situations where it may be beneficial to assess one{\textquoteright}s intentions or mental state. In various situations, facial expressions that often indicate emotions, may not be communicated or may not necessarily correspond to the actual emotional state. Here we review our study, in which we classify emotional states from very short facial video signals. The emotion classification process does not rely on stereotypical facial expressions or contact-based methods. Our raw data are short facial videos obtained at some different known emotional states. A facial video includes a component of diffused light from the facial skin, affected by the cardiovascular activity that might be influenced by the emotional state. From the short facial videos, we extracted unique spatiotemporal physiological-affected features employed as input features into a deep-learning model. Results show average emotion classification accuracy of about 47.36%, compared to 20% chance accuracy given 5 emotion classes, which can be considered high for the cases where expressions are hardly observed.",
keywords = "Emotion classification, camera-based PPG, deep learning, pulsatile image, remote emotion recognition",
author = "Yitzhak Yitzhaky and Shaul Shvimmer and Shlomi Talala and Rotem Simhon and Michael Gilad",
note = "Publisher Copyright: {\textcopyright} 2024 SPIE.; Artificial Intelligence for Security and Defence Applications II 2024 ; Conference date: 17-09-2024 Through 19-09-2024",
year = "2024",
month = jan,
day = "1",
doi = "10.1117/12.3034867",
language = "English",
series = "Proceedings of SPIE - The International Society for Optical Engineering",
publisher = "SPIE",
editor = "Henri Bouma and Radhakrishna Prabhu and Yitzhak Yitzhaky and Kuijf, {Hugo J.}",
booktitle = "Artificial Intelligence for Security and Defence Applications II",
address = "United States",
}