{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2024,9,19]],"date-time":"2024-09-19T15:26:43Z","timestamp":1726759603140},"reference-count":205,"publisher":"Elsevier BV","issue":"2","license":[{"start":{"date-parts":[[2013,2,1]],"date-time":"2013-02-01T00:00:00Z","timestamp":1359676800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"}],"content-domain":{"domain":[],"crossmark-restriction":false},"short-container-title":["Image and Vision Computing"],"published-print":{"date-parts":[[2013,2]]},"DOI":"10.1016\/j.imavis.2012.06.016","type":"journal-article","created":{"date-parts":[[2012,7,20]],"date-time":"2012-07-20T07:02:15Z","timestamp":1342767735000},"page":"120-136","source":"Crossref","is-referenced-by-count":264,"title":["Categorical and dimensional affect analysis in continuous input: Current trends and future directions"],"prefix":"10.1016","volume":"31","author":[{"given":"Hatice","family":"Gunes","sequence":"first","affiliation":[]},{"given":"Bj\u00f6rn","family":"Schuller","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"issue":"12","key":"10.1016\/j.imavis.2012.06.016_bb0005","doi-asserted-by":"crossref","first-page":"1788","DOI":"10.1016\/j.imavis.2009.05.007","article-title":"The painful face \u2014 pain expression recognition using active appearance models","volume":"27","author":"Ashraf","year":"2009","journal-title":"Image Vision Comput."},{"issue":"3","key":"10.1016\/j.imavis.2012.06.016_bb0010","doi-asserted-by":"crossref","first-page":"664","DOI":"10.1109\/TSMCB.2010.2082525","article-title":"Automatically detecting pain in video through facial action units","volume":"41","author":"Lucey","year":"2011","journal-title":"IEEE Trans. Syst. Man Cybern. Part B Cybern."},{"key":"10.1016\/j.imavis.2012.06.016_bb0015","series-title":"Proc. Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"690","article-title":"The hinterland of emotions: facing the open-microphone challenge","author":"Steidl","year":"2009"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0020","doi-asserted-by":"crossref","first-page":"168","DOI":"10.1504\/IJAACS.2008.019799","article-title":"Human-centred intelligent human\u2013computer interaction (hci2): how far are we from attaining it?","author":"Pantic","year":"2008","journal-title":"Int. J. Auton. Adapt. Commun. Syst."},{"key":"10.1016\/j.imavis.2012.06.016_bb0025","first-page":"1","article-title":"Building autonomous sensitive artificial listeners","author":"Schr\u00f6der","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0030","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"646","article-title":"Have an emotional workout with sensitive artificial listeners!","author":"Schr\u00f6der","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0035","first-page":"263","article-title":"A demonstration of audiovisual sensitive artificial listeners","volume":"vol. 1","author":"Schr\u00f6der","year":"2009"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0040","doi-asserted-by":"crossref","first-page":"68","DOI":"10.4018\/jse.2010101605","article-title":"Automatic, dimensional and continuous emotion recognition","volume":"1","author":"Gunes","year":"2010","journal-title":"Int. J. Synth. Emotions"},{"key":"10.1016\/j.imavis.2012.06.016_bb0045","series-title":"Proc. of Measuring Behavior","first-page":"122","article-title":"Automatic measurement of affect in dimensional and continuous spaces: why, what, and how?","author":"Gunes","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0050","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"827","article-title":"Emotion representation, analysis and synthesis in continuous space: a survey","author":"Gunes","year":"2011"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0055","doi-asserted-by":"crossref","first-page":"484","DOI":"10.1016\/j.concog.2008.03.019","article-title":"Conscious emotional experience emerges as a function of multilevel, appraisal-driven response synchronization","volume":"17","author":"Grandjean","year":"2008","journal-title":"Conscious. Cogn."},{"key":"10.1016\/j.imavis.2012.06.016_bb0060","series-title":"Unmasking the Face: A Guide to Recognizing Emotions from Facial Clues","author":"Ekman","year":"1975"},{"key":"10.1016\/j.imavis.2012.06.016_bb0065","series-title":"Mind Reading: The Interactive Guide to Emotion","author":"Baron-Cohen","year":"2003"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0070","doi-asserted-by":"crossref","first-page":"66","DOI":"10.1109\/T-AFFC.2011.8","article-title":"Real-time recognition of affective states from nonverbal features of speech and its application for public speaking skill analysis","volume":"2","author":"Pfister","year":"2011","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0075","doi-asserted-by":"crossref","first-page":"1161","DOI":"10.1037\/h0077714","article-title":"A circumplex model of affect","volume":"39","author":"Russell","year":"1980","journal-title":"J. Pers. Soc. Psychol."},{"key":"10.1016\/j.imavis.2012.06.016_bb0080","doi-asserted-by":"crossref","first-page":"261","DOI":"10.1007\/BF02686918","article-title":"Pleasure\u2013arousal\u2013dominance: a general framework for describing and measuring individual differences in temperament","volume":"14","author":"Mehrabian","year":"1996","journal-title":"Curr. Psychol."},{"key":"10.1016\/j.imavis.2012.06.016_bb0085","series-title":"Appraisal Processes in Emotion: Theory, Methods, Research","author":"Scherer","year":"2001"},{"key":"10.1016\/j.imavis.2012.06.016_bb0090","series-title":"Proc. of 8th Int. Conf. on Spoken Language Processing","first-page":"1329","article-title":"Detecting user engagement in everyday conversations","author":"Yu","year":"2004"},{"key":"10.1016\/j.imavis.2012.06.016_bb0095","series-title":"Proc. of IEEE Int. Conf. on Multimedia and Expo","first-page":"208","article-title":"Music Emotion Classification: A Regression Approach","author":"Yang","year":"2007"},{"issue":"3","key":"10.1016\/j.imavis.2012.06.016_bb0100","doi-asserted-by":"crossref","first-page":"570","DOI":"10.1109\/TASL.2010.2052246","article-title":"Emotional audio-visual speech synthesis based on pad","volume":"19","author":"Jia","year":"2010","journal-title":"IEEE Trans. Audio Speech Lang. Process."},{"key":"10.1016\/j.imavis.2012.06.016_bb0105","series-title":"Proc. of IEEE Int. Conf. on Acoustics Speech and Signal Processing","first-page":"5138","article-title":"Features selection for primitives estimation on emotional speech","author":"Espinosa","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0110","doi-asserted-by":"crossref","first-page":"1050","DOI":"10.1111\/j.1467-9280.2007.02024.x","article-title":"The world of emotion is not two-dimensional","volume":"18","author":"Fontaine","year":"2007","journal-title":"Psychol. Sci."},{"key":"10.1016\/j.imavis.2012.06.016_bb0115","series-title":"Proc. of IEEE Int. Conf. Multimedia and Expo","first-page":"1079","article-title":"The semaine corpus of emotionally coloured character interactions","author":"McKeown","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0120","series-title":"Proc. of Cognitive Technology","article-title":"Affective agents: effects of agent affect on arousal, attention, liking and learning","author":"Dietz","year":"1999"},{"key":"10.1016\/j.imavis.2012.06.016_bb0125","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"792","article-title":"On dimensions in emotion psychology","author":"Kaernbach","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0130","series-title":"The Emotions","author":"Frijda","year":"1986"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0135","doi-asserted-by":"crossref","first-page":"317","DOI":"10.1016\/j.neunet.2005.03.001","article-title":"A systems approach to appraisal mechanisms in emotion","volume":"18","author":"Sander","year":"2005","journal-title":"Neural Networks"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0140","doi-asserted-by":"crossref","first-page":"18","DOI":"10.4018\/jse.2012010102","article-title":"Advocating a componential appraisal model to guide emotion recognition","volume":"3","author":"Mortillaro","year":"2012","journal-title":"J. Synth. Emotions"},{"issue":"12","key":"10.1016\/j.imavis.2012.06.016_bb0145","doi-asserted-by":"crossref","first-page":"1743","DOI":"10.1016\/j.imavis.2008.11.007","article-title":"Social signal processing: survey of an emerging domain","volume":"27","author":"Vinciarelli","year":"2009","journal-title":"Image Vision Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0150","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"841","article-title":"Benchmarking classification models for emotion recognition in natural speech: a multi-corporal study","author":"Tarasov","year":"2011"},{"issue":"9\u201310","key":"10.1016\/j.imavis.2012.06.016_bb0155","doi-asserted-by":"crossref","first-page":"1062","DOI":"10.1016\/j.specom.2011.01.011","article-title":"Recognising realistic emotions and affect in speech: state of the art and lessons learnt from the first challenge","volume":"53","author":"Schuller","year":"2011","journal-title":"J. Speech Commun."},{"issue":"8","key":"10.1016\/j.imavis.2012.06.016_bb0160","doi-asserted-by":"crossref","first-page":"607","DOI":"10.1016\/j.ijhcs.2009.03.005","article-title":"Short-term emotion assessment in a recall paradigm","volume":"67","author":"Chanel","year":"2009","journal-title":"Int. J. Hum. Comput. Stud."},{"issue":"6","key":"10.1016\/j.imavis.2012.06.016_bb0165","doi-asserted-by":"crossref","first-page":"623","DOI":"10.1111\/j.1469-8986.1997.tb02140.x","article-title":"Heart rate variability:origins, methods, and interpretive caveats","volume":"34","author":"Berntson","year":"1997","journal-title":"Psychophysiology"},{"key":"10.1016\/j.imavis.2012.06.016_bb0170","series-title":"Proc. of the IEEE Int. Conf. of Eng. Med. Biol. Soc.","first-page":"39","article-title":"Ultra short term analysis of heart rate variability for monitoring mental stress in mobile settings","author":"Salahuddin","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0175","series-title":"Proc. of the 5th Int. Workshop on Biosignal Interpretation","first-page":"219","article-title":"Emotion recognition from electromyography and skin conductance","author":"Nakasone","year":"2005"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0180","doi-asserted-by":"crossref","first-page":"92","DOI":"10.1109\/T-AFFC.2011.9","article-title":"Continuous prediction of spontaneous affect from multiple cues and modalities in valence\u2013arousal space","volume":"2","author":"Nicolaou","year":"2011","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0185","series-title":"Proc. of ACM Int. Conf. on Multimodal Interfaces","first-page":"23","article-title":"Static vs. dynamic modeling of human nonverbal behavior from multiple cues and modalities","author":"Petridis","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0190","series-title":"Proc. of INTERSPEECH","first-page":"801","article-title":"Incremental acoustic valence recognition: an inter-corpus perspective on features, matching, and performance in a gating paradigm","author":"Schuller","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0195","series-title":"Ch. From the Lab to the Real World: Affect Recognition using Multiple Cues and Modalities","first-page":"185","article-title":"Affective computing: focus on emotion expression, synthesis, and recognition","author":"Gunes","year":"2008"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0200","doi-asserted-by":"crossref","first-page":"39","DOI":"10.1109\/TPAMI.2008.52","article-title":"A survey of affect recognition methods: audio, visual, and spontaneous expressions","volume":"31","author":"Zeng","year":"2009","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"10.1016\/j.imavis.2012.06.016_bb0205","series-title":"Proc. of IEEE Int. Conf. on Systems, Man and Cybernetics","first-page":"2662","article-title":"Valence\u2013arousal evaluation using physiological signals in an emotion recall paradigm","author":"Chanel","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0210","first-page":"36","article-title":"Emotion recognition using bio-sensors: first steps towards an automatic system","volume":"3068","author":"Haag","year":"2004"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0215","doi-asserted-by":"crossref","first-page":"210","DOI":"10.1109\/TNSRE.2006.875544","article-title":"Brain\u2013computer interaction research at the computer vision and multimedia laboratory, University of Geneva","volume":"14","author":"Pun","year":"2006","journal-title":"IEEE Trans. Neural Syst. Rehabil. Eng."},{"key":"10.1016\/j.imavis.2012.06.016_bb0220","doi-asserted-by":"crossref","first-page":"1235","DOI":"10.1126\/science.7146906","article-title":"Asymmetrical brain activity discriminates between positive and negative affective stimuli in human infants","volume":"218","author":"Davidson","year":"1982","journal-title":"Science"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0225","doi-asserted-by":"crossref","first-page":"197","DOI":"10.1007\/s11263-006-6106-y","article-title":"Imaging facial physiology for the detection of deceit","volume":"71","author":"Tsiamyrtzis","year":"2007","journal-title":"Int. J. Comput. Vision"},{"key":"10.1016\/j.imavis.2012.06.016_bb0230","series-title":"Proc. of Int. Conf. on Body Area Networks","first-page":"1","article-title":"Bodyant: miniature wireless sensors for naturalistic monitoring of daily activity","author":"Kusserow","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0240","series-title":"Proc. of ISCA Workshop on Speech and Emotion","first-page":"19","article-title":"Feeltrace: an instrument for recording perceived emotion in real time","author":"Cowie","year":"2000"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0245","doi-asserted-by":"crossref","first-page":"33","DOI":"10.1109\/79.911197","article-title":"Emotion recognition in human\u2013computer interaction","volume":"18","author":"Cowie","year":"2001","journal-title":"IEEE Signal Process. Mag."},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0250","doi-asserted-by":"crossref","first-page":"18","DOI":"10.1109\/T-AFFC.2010.1","article-title":"Affect detection: an interdisciplinary review of models, methods, and their applications","volume":"1","author":"Calvo","year":"2010","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0255","doi-asserted-by":"crossref","first-page":"481","DOI":"10.1044\/jshr.1103.481","article-title":"Relations between prosodic variables and emotions in normal american english utterances","volume":"11","author":"Huttar","year":"1968","journal-title":"J. Speech Hear. Res."},{"key":"10.1016\/j.imavis.2012.06.016_bb0260","series-title":"Ch. Auditory Correlates of Vocal Expression of Emotional Feeling","first-page":"101","article-title":"The communication of emotional meaning","author":"Davitz","year":"1964"},{"key":"10.1016\/j.imavis.2012.06.016_bb0265","doi-asserted-by":"crossref","first-page":"331","DOI":"10.1007\/BF00992539","article-title":"Cue utilization in emotion attribution from auditory stimuli","volume":"1","author":"Scherer","year":"1977","journal-title":"Motiv. Emot."},{"key":"10.1016\/j.imavis.2012.06.016_bb0270","series-title":"Proc. of ISCA Workshop Speech Emotion","first-page":"86","article-title":"The prosody of excitement in horse race commentaries","author":"Trouvain","year":"2000"},{"key":"10.1016\/j.imavis.2012.06.016_bb0275","unstructured":"M. Schr\u00f6der, Speech and emotion research: An overview of research frameworks and a dimensional approach to emotional speech synthesis, Ph.D. dissertation, Univ. of Saarland, Germany, 2003."},{"key":"10.1016\/j.imavis.2012.06.016_bb0280","series-title":"Speech Prosody","first-page":"1","article-title":"Perception of non-verbal emotional listener feedback","author":"Schr\u00f6der","year":"2006"},{"issue":"5","key":"10.1016\/j.imavis.2012.06.016_bb0285","doi-asserted-by":"crossref","first-page":"867","DOI":"10.1109\/JSTSP.2010.2057200","article-title":"Combining long short-term memory and dynamic bayesian networks for incremental emotion-sensitive artificial listening","volume":"4","author":"W\u00f6llmer","year":"2010","journal-title":"IEEE J. Sel. Top. Sign. Proces."},{"issue":"12","key":"10.1016\/j.imavis.2012.06.016_bb0290","doi-asserted-by":"crossref","first-page":"1760","DOI":"10.1016\/j.imavis.2009.02.013","article-title":"Being bored? Recognising natural interest by extensive audiovisual integration for real-life application","volume":"27","author":"Schuller","year":"2009","journal-title":"Image Vision Comput. J."},{"key":"10.1016\/j.imavis.2012.06.016_bb0295","series-title":"The Phonetics of Laughing","article-title":"On laughter and speech laugh, based on observations of child-robot interaction","author":"Batliner","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0300","series-title":"Proc. of the ACL 2010 Conference","first-page":"331","article-title":"Last but definitely not least: on the role of the last sentence in automatic polarity-classification","author":"Becker","year":"2010"},{"issue":"5","key":"10.1016\/j.imavis.2012.06.016_bb0305","doi-asserted-by":"crossref","first-page":"1553","DOI":"10.1016\/j.chb.2010.10.028","article-title":"Estimation of word emotions based on part of speech and positional information","volume":"27","author":"Matsumoto","year":"2011","journal-title":"Comput. Hum. Behav."},{"key":"10.1016\/j.imavis.2012.06.016_bb0310","series-title":"Proc. Int. Conf. on Weblogs and Social Media","first-page":"1","article-title":"Sentiment analysis: adjectives and adverbs are better than adjectives alone","author":"Benamara","year":"2007"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0315","doi-asserted-by":"crossref","first-page":"43","DOI":"10.1109\/MIS.2008.57","article-title":"AVA: adjective\u2013verb\u2013adverb combinations for sentiment analysis","volume":"23","author":"Subrahmanian","year":"2008","journal-title":"Intell. Syst."},{"key":"10.1016\/j.imavis.2012.06.016_bb0320","series-title":"Face Recognition","first-page":"377","article-title":"Machine analysis of facial expressions","author":"Pantic","year":"2007"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0325","doi-asserted-by":"crossref","first-page":"97","DOI":"10.1007\/s10919-012-0130-0","article-title":"The body action and posture coding system (bap): Development and reliability","volume":"36","author":"Dael","year":"2012","journal-title":"J. Nonverbal Behav."},{"key":"10.1016\/j.imavis.2012.06.016_bb0330","series-title":"Ch. Bodily Expression for Automatic Affect Recognition","first-page":"1","article-title":"Advances in emotion recognition","author":"Gunes","year":"2012"},{"key":"10.1016\/j.imavis.2012.06.016_bb0335","series-title":"The Expression of the Emotions in Man and Animals","author":"Darwin","year":"1872"},{"key":"10.1016\/j.imavis.2012.06.016_bb0340","doi-asserted-by":"crossref","first-page":"879","DOI":"10.1002\/(SICI)1099-0992(1998110)28:6<879::AID-EJSP901>3.0.CO;2-W","article-title":"Bodily expression of emotion","volume":"28","author":"Wallbott","year":"1998","journal-title":"Eur. J. Soc. Psychol."},{"key":"10.1016\/j.imavis.2012.06.016_bb0345","doi-asserted-by":"crossref","first-page":"51","DOI":"10.1016\/S0010-0277(01)00147-0","article-title":"Perceiving affect from arm movement","volume":"82","author":"Pollick","year":"2001","journal-title":"Cognition"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0350","doi-asserted-by":"crossref","first-page":"117","DOI":"10.1023\/B:JONB.0000023655.25550.be","article-title":"Attributing emotion to static body postures: recognition accuracy, confusions, and viewpoint dependence","volume":"28","author":"Coulson","year":"2004","journal-title":"Nonverbal Behav."},{"key":"10.1016\/j.imavis.2012.06.016_bb0355","series-title":"Proc. of the Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"48","article-title":"Recognizing affective dimensions from body posture","author":"Kleinsmith","year":"2007"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0360","doi-asserted-by":"crossref","first-page":"64","DOI":"10.1109\/TSMCB.2008.927269","article-title":"Automatic temporal segment detection and affect recognition from face and body display","volume":"39","author":"Gunes","year":"2009","journal-title":"IEEE Trans. Syst. Man Cybern. B Cybern."},{"key":"10.1016\/j.imavis.2012.06.016_bb0365","series-title":"Proc. of the IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"129","article-title":"Multimodal coordination of facial action, head rotation, and eye motion during spontaneous smiles","author":"Cohn","year":"2004"},{"key":"10.1016\/j.imavis.2012.06.016_bb0370","series-title":"Proc. of LREC Int. Workshop on Emotion","first-page":"42","article-title":"The emotional and communicative significance of head nods and shakes in a naturalistic database","author":"Cowie","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0375","series-title":"Proc. of Int. Conf. on Intelligent Virtual Agents","first-page":"371","article-title":"Dimensional emotion prediction from spontaneous head gestures for interaction with sensitive artificial listeners","author":"Gunes","year":"2010"},{"issue":"PP","key":"10.1016\/j.imavis.2012.06.016_bb0380","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1109\/T-AFFC.2012.16","article-title":"Affective body expression perception and recognition: a survey","volume":"99","author":"Kleinsmith","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0385","doi-asserted-by":"crossref","first-page":"215","DOI":"10.1177\/0146167299025002007","article-title":"Consequences of automatic evaluation: immediate behavioral predispositions to approach or avoid the stimulus","volume":"25","author":"Chen","year":"1999","journal-title":"Pers. Soc. Psychol. Bull."},{"key":"10.1016\/j.imavis.2012.06.016_bb0390","doi-asserted-by":"crossref","first-page":"421","DOI":"10.1037\/0022-3514.71.3.421","article-title":"Influence of overt head movements on memory for valenced words: a case of conceptual\u2013motor compatibility","volume":"71","author":"Forster","year":"1996","journal-title":"J. Pers. Soc. Psychol."},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0395","doi-asserted-by":"crossref","first-page":"241","DOI":"10.1080\/02699930302294","article-title":"Pleasure as a sign you can attend to something else: placing positive feelings within a general model of affect","volume":"17","author":"Carver","year":"2003","journal-title":"Cogn. Emotion"},{"key":"10.1016\/j.imavis.2012.06.016_bb0400","doi-asserted-by":"crossref","first-page":"51","DOI":"10.1016\/j.biopsycho.2003.07.005","article-title":"Emotion and motivated behavior: postural adjustments to affective picture viewing","volume":"66","author":"Hillman","year":"2004","journal-title":"Biol. Psychol."},{"key":"10.1016\/j.imavis.2012.06.016_bb0405","doi-asserted-by":"crossref","first-page":"79","DOI":"10.1007\/s10919-007-0045-3","article-title":"Recognition of emotions in gait patterns by means of artificial neural nets","volume":"32","author":"Janssen","year":"2008","journal-title":"J. Nonverbal Behav."},{"key":"10.1016\/j.imavis.2012.06.016_bb0410","doi-asserted-by":"crossref","first-page":"1050","DOI":"10.1109\/TSMCB.2010.2044040","article-title":"Recognition of affect based on gait patterns","volume":"40","author":"Karg","year":"2010","journal-title":"IEEE Trans. Syst. Man Cybern. B Cybern."},{"key":"10.1016\/j.imavis.2012.06.016_bb0415","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"809","article-title":"Expression of emotional states during locomotion based on canonical parameters","author":"Inderbitzin","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0420","series-title":"Proc. of INTERSPEECH","first-page":"2362","article-title":"Context-sensitive multimodal emotion recognition from speech and facial expression using bidirectional LSTM modeling","author":"W\u00f6llmer","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0425","series-title":"Proc. of the Conf. on User Modeling","first-page":"50","article-title":"Recognizing emotion from postures: cross\u2013cultural differences in user modeling","author":"Kleinsmith","year":"2005"},{"key":"10.1016\/j.imavis.2012.06.016_bb0430","series-title":"Proc. of IEEE Int. Conf. on Acoustics, Speech and Signal Processing","first-page":"2288","article-title":"Tracking changes in continuous emotion states using body language and prosodic cues","author":"Metallinou","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0435","doi-asserted-by":"crossref","first-page":"16518","DOI":"10.1073\/pnas.0507650102","article-title":"Rapid perceptual integration of facial expression and emotional body language","volume":"102","author":"Meeren","year":"2005","journal-title":"Proc. Natl. Acad. Sci. U. S. A."},{"issue":"3","key":"10.1016\/j.imavis.2012.06.016_bb0440","doi-asserted-by":"crossref","first-page":"487","DOI":"10.1037\/1528-3542.7.3.487","article-title":"Body expressions influence recognition of emotions in the face and voice","volume":"7","author":"Van den Stock","year":"2007","journal-title":"Emotion"},{"key":"10.1016\/j.imavis.2012.06.016_bb0445","series-title":"Ch. Field Methods of the Project in Linguistic Change and Variation","first-page":"28","article-title":"Language in use","author":"Labov","year":"1984"},{"key":"10.1016\/j.imavis.2012.06.016_bb0450","series-title":"Proc. of Int. Conf. on Language Resources and Evaluation","first-page":"2065","article-title":"Annotating multi-media\/multi-modal resources with ELAN","author":"Brugman","year":"2004"},{"key":"10.1016\/j.imavis.2012.06.016_bb0455","series-title":"Proc. of the 7th European Conference on Speech Communication and Technology","first-page":"1367","article-title":"Anvil \u2014 a generic annotation tool for multimodal dialogue","author":"Kipp","year":"2001"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0460","doi-asserted-by":"crossref","first-page":"1","DOI":"10.4018\/jse.2012010101","article-title":"Tracing emotion: an overview","volume":"3","author":"Cowie","year":"2012","journal-title":"J. Synth. Emotions"},{"issue":"5","key":"10.1016\/j.imavis.2012.06.016_bb0465","doi-asserted-by":"crossref","first-page":"991","DOI":"10.1109\/TRO.2007.904899","article-title":"Affective state estimation for human\u2013robot interaction","volume":"23","author":"Kulic","year":"2007","journal-title":"IEEE Trans. Robot."},{"key":"10.1016\/j.imavis.2012.06.016_bb0470","series-title":"Proceedings 37th IEEE International Conference on Acoustics, Speech, and Signal Processing","first-page":"341","article-title":"Automatic recognition of emotion evoked by general sound events","author":"Schuller","year":"2012"},{"key":"10.1016\/j.imavis.2012.06.016_bb0475","series-title":"The Cognitive Psychophysiology of Emotion: Anxiety and the Anxiety Disorders","author":"Lang","year":"1985"},{"key":"10.1016\/j.imavis.2012.06.016_bb0480","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"The emotion slider: a self-report device for the continuous measurement of emotion","author":"Laurans","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0485","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"Perception of synthetic emotion expressions in speech: categorical and dimensional annotations","author":"Kessens","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0490","series-title":"Proc. ASRU","first-page":"381","article-title":"Evaluation of natural emotions using self assessment manikins","author":"Grimm","year":"2005"},{"key":"10.1016\/j.imavis.2012.06.016_bb0495","series-title":"Proc. of IEEE Int. Conf. on Multimedia and Expo","first-page":"865","article-title":"The Vera am Mittag German audio-visual emotional speech database","author":"Grimm","year":"2008"},{"key":"10.1016\/j.imavis.2012.06.016_bb0500","first-page":"128","article-title":"Advances in behavioral science using automated facial image analysis and synthesis","author":"Cohn","year":"2010","journal-title":"IEEE Signal Process. Mag."},{"key":"10.1016\/j.imavis.2012.06.016_bb0505","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"107","article-title":"Recording affect in the field: towards methods and metrics for improving ground truth labels","author":"Healey","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0510","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"57","article-title":"Painful data: the UNBC-McMaster shoulder pain expression archive database","author":"Lucey","year":"2011"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0515","doi-asserted-by":"crossref","first-page":"42","DOI":"10.1109\/T-AFFC.2011.25","article-title":"A multi-modal database for affect recognition and implicit tagging","volume":"3","author":"Soleymani","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0520","series-title":"Proc. of the Affective Brain\u2013Computer Interfaces Workshop","first-page":"27","article-title":"EEG analysis for implicit tagging of video data","author":"Koelstra","year":"2009"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0525","doi-asserted-by":"crossref","first-page":"18","DOI":"10.1109\/T-AFFC.2011.15","article-title":"DEAP: a database for emotion analysis using physiological signals","volume":"3","author":"Koelstra","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0530","doi-asserted-by":"crossref","first-page":"5","DOI":"10.1109\/T-AFFC.2011.20","article-title":"The SEMAINE database: annotated multimodal records of emotionally coloured conversations between a person and a limited agent","volume":"3","author":"McKeown","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0535","doi-asserted-by":"crossref","first-page":"32","DOI":"10.1109\/T-AFFC.2011.26","article-title":"The Belfast induced natural emotion database","volume":"3","author":"Sneddon","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0540","series-title":"Proc. of LREC Workshop on Multimodal Corpora","article-title":"The USC CreativeIT database: a multimodal database of theatrical improvisation","author":"Metallinou","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0545","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"367","article-title":"A multimodal database for mimicry analysis","author":"Sun","year":"2011"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0550","doi-asserted-by":"crossref","first-page":"3","DOI":"10.1109\/T-AFFC.2012.10","article-title":"Guest editorial: special section on naturalistic affect resources for system building and evaluation","volume":"3","author":"Schuller","year":"2012","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0555","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"786","article-title":"Bilingual acoustic feature selection for emotion estimation using a 3D continuous model","author":"Espinosa","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0560","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"780","article-title":"A novel perceptual feature set for audio emotion recognition","author":"Sezgin","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0565","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"835","article-title":"Obtaining speech assets for judgement analysis on low-pass filtered emotional speech","author":"Snel","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0570","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"16","article-title":"Output-associative RVM regression for dimensional and continuous emotion prediction","author":"Nicolaou","year":"2011"},{"issue":"13\u201315","key":"10.1016\/j.imavis.2012.06.016_bb0575","doi-asserted-by":"crossref","first-page":"2553","DOI":"10.1016\/j.neucom.2007.11.043","article-title":"User and context adaptive neural networks for emotion recognition","volume":"71","author":"Caridakis","year":"2008","journal-title":"Neurocomputer"},{"key":"10.1016\/j.imavis.2012.06.016_bb0580","unstructured":"I. Kanluan, M. Grimm, K. Kroschel, Audio-visual emotion recognition using an emotion recognition space concept, Proc. of the 16th European Signal Processing Conference, 2008."},{"key":"10.1016\/j.imavis.2012.06.016_bb0585","series-title":"Proc. of Human Language Technology Conf. North Am. Chapter of the Assoc. Computational Linguistics","first-page":"201","article-title":"Predicting emotion in spoken dialogue from multiple knowledge sources","author":"Forbes-Riley","year":"2004"},{"key":"10.1016\/j.imavis.2012.06.016_bb0590","first-page":"92","article-title":"Modeling naturalistic affective states via facial, vocal and bodily expressions recognition","volume":"vol. 4451","author":"Karpouzis","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0595","series-title":"Ch. Bimodal Emotion Recognition using Speech and Physiological Changes","first-page":"265","article-title":"Robust speech recognition and understanding","author":"Kim","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0600","series-title":"Proc. of IEEE Int. Conf. on Pattern Recognition","first-page":"3695","article-title":"Audio-visual classification and fusion of spontaneous affective data in likelihood space","author":"Nicolaou","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0605","series-title":"Proc. of IEEE\/RSJ Int. Conf. on Intelligent Robots and Systems","first-page":"2662","article-title":"An empirical study of machine learning techniques for affect recognition in human-robot interaction","author":"Liu","year":"2005"},{"issue":"10","key":"10.1016\/j.imavis.2012.06.016_bb0610","doi-asserted-by":"crossref","first-page":"1175","DOI":"10.1109\/34.954607","article-title":"Toward machine emotional intelligence: analysis of affective physiological state","volume":"23","author":"Picard","year":"2001","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"10.1016\/j.imavis.2012.06.016_bb0615","series-title":"Proc. of IEEE Int. Symp. on Consumer Electronics","first-page":"1","article-title":"Emotion-aware technologies for consumer electronics","author":"Gu","year":"2008"},{"key":"10.1016\/j.imavis.2012.06.016_bb0620","series-title":"Proc. of Int. Conf. on Pattern Recognition","first-page":"4242","article-title":"EEG-based emotion recognition using self-organizing map for boundary detection","author":"Khosrowabadi","year":"2010"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0625","doi-asserted-by":"crossref","first-page":"309","DOI":"10.1109\/TITB.2009.2038481","article-title":"On the classification of emotional biosignals evoked while viewing affective pictures: an integrated data-mining-based approach for healthcare applications","volume":"14","author":"Frantzidis","year":"2010","journal-title":"IEEE Trans. Inf. Technol. Biomed."},{"key":"10.1016\/j.imavis.2012.06.016_bb0630","series-title":"Proc. of ACII 2011 Affective Brain-Computer Interfaces Workshop","first-page":"436","article-title":"Online recognition of facial actions for natural EEG-based BCI applications","author":"Heger","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0635","series-title":"Proc. of ACII 2011 Affective Brain\u2013Computer Interfaces Workshop","first-page":"435","article-title":"Nijholt, affective brain\u2013computer interfaces","author":"Christian Mhl","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0640","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1088\/1741-2560\/8\/2\/025005","article-title":"Towards passive brain\u2013computer interfaces: applying brain\u2013computer interface technology to human-machine systems in general","volume":"8","author":"Zander","year":"2011","journal-title":"J. Neural Eng."},{"key":"10.1016\/j.imavis.2012.06.016_bb0645","series-title":"Proc. of IEEE Workshop on Automatic Speech Recognition and Understanding","first-page":"552","article-title":"Acoustic emotion recognition: a benchmark comparison of performances","author":"Schuller","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0650","series-title":"Proc. of 9th Interspeech Conf.","first-page":"597","article-title":"Abandoning emotion classes \u2014 towards continuous emotion recognition with modelling of long-range dependencies","author":"W\u00f6llmer","year":"2008"},{"key":"10.1016\/j.imavis.2012.06.016_bb0655","series-title":"Proc. of IEEE Automatic Speech Recognition and Understanding Workshop","first-page":"381","article-title":"Emotion estimation in speech using a 3D emotion space concept","author":"Grimm","year":"2005"},{"key":"10.1016\/j.imavis.2012.06.016_bb0660","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"488","article-title":"The humaine database: addressing the needs of the affective computing community","author":"Douglas-Cowie","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0665","series-title":"Proc. Int. Workshop on Opinion Mining and Sentiment Analysis","first-page":"205","article-title":"Linking humour to blogs analysis: affective traits in posts","author":"Reyes","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0670","series-title":"Intelligent Information Access, Vol. 301 of Studies in Computational Intelligence","first-page":"21","article-title":"Annotating and identifying emotions in text","author":"Strapparava","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0675","series-title":"Proc. International Symposium on Neural Networks","first-page":"611","article-title":"Detecting emotions in social affective situations using the EmotiNet knowledge base","volume":"vol. 3","author":"Balahur","year":"2011"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0680","doi-asserted-by":"crossref","first-page":"192","DOI":"10.1109\/T-AFFC.2011.17","article-title":"Recognizing affect from linguistic information in 3D continuous space","volume":"2","author":"Schuller","year":"2011","journal-title":"IEEE Trans. Affect. Comput."},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0685","doi-asserted-by":"crossref","first-page":"4","DOI":"10.1016\/j.csl.2009.12.003","article-title":"Whodunnit \u2014 searching for the most important feature types signalling emotion-related user states in speech","volume":"25","author":"Batliner","year":"2011","journal-title":"Comp. Speech Lang."},{"key":"10.1016\/j.imavis.2012.06.016_bb0690","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"322","article-title":"String-based audiovisual fusion of behavioural events for the assessment of dimensional affect","author":"Eyben","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0695","series-title":"Proc. INTERSPEECH","first-page":"478","article-title":"Emotion recognition using imperfect speech recognition","author":"Metze","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0700","first-page":"448","article-title":"Learning and knowledge-based sentiment analysis in movie review key excerpts","volume":"vol. 6456\/2010","author":"Schuller","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0705","series-title":"Knowledge-Based and Intelligent Information and Engineering Systems, Vol. 6279 of Lecture Notes in Computer Science","first-page":"385","article-title":"Senticspace: visualizing opinions and sentiments in a multi-dimensional vector space","author":"Cambria","year":"2010"},{"issue":"1\u20132","key":"10.1016\/j.imavis.2012.06.016_bb0710","doi-asserted-by":"crossref","first-page":"4","DOI":"10.1016\/j.cviu.2006.10.016","article-title":"Vision-based human motion analysis: an overview","volume":"108","author":"Poppe","year":"2007","journal-title":"Comput. Vision and Image Understanding"},{"issue":"6","key":"10.1016\/j.imavis.2012.06.016_bb0715","doi-asserted-by":"crossref","first-page":"976","DOI":"10.1016\/j.imavis.2009.11.014","article-title":"A survey on vision-based human action recognition","volume":"28","author":"Poppe","year":"2010","journal-title":"Image Vision Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0720","series-title":"Proc. of IEEE Conf. on Computer Vision and Pattern Recognition Workshops","first-page":"17","article-title":"Affect valence inference from facial action unit spectrograms","author":"McDuff","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0725","series-title":"Proc. of Computer Vision and Pattern Recognition Workshops","first-page":"1","article-title":"Technique for automatic emotion recognition by body gesture analysis","author":"Glowinski","year":"2008"},{"key":"10.1016\/j.imavis.2012.06.016_bb0730","doi-asserted-by":"crossref","first-page":"423","DOI":"10.1016\/j.neunet.2005.03.004","article-title":"Emotion recognition through facial expression analysis based on a neurofuzzy method","volume":"18","author":"Ioannou","year":"2005","journal-title":"J. Neural Networks"},{"issue":"7","key":"10.1016\/j.imavis.2012.06.016_bb0735","doi-asserted-by":"crossref","first-page":"1325","DOI":"10.1109\/TMM.2008.2004911","article-title":"Affective level video segmentation by utilizing the pleasure\u2013arousal\u2013dominance information","volume":"10","author":"Arifin","year":"2008","journal-title":"IEEE Trans. Multimedia"},{"key":"10.1016\/j.imavis.2012.06.016_bb0740","series-title":"Proc. of LREC Int. Workshop on Multimodal Corpora: Advances in Capturing, Coding and Analyzing Multimodality","first-page":"43","article-title":"Automatic segmentation of spontaneous data using dimensional labels from multiple coders","author":"Nicolaou","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0745","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"Gesture and emotion: can basic gestural form features discriminate emotions?","author":"Kipp","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0750","series-title":"IEEE Int. Workshop on Robot and Human Interactive Communication","first-page":"178","article-title":"Effect of sensor fusion for recognition of emotional states using voice, face image and thermal image of face","author":"Yoshitomi","year":"2000"},{"key":"10.1016\/j.imavis.2012.06.016_bb0755","series-title":"Proc. of the IEEE Int. Conf. on Robotics, Automation and Mechatronics","first-page":"1","article-title":"Infrared thermal sensing of positive and negative affective states","author":"Khan","year":"2006"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0760","doi-asserted-by":"crossref","first-page":"979","DOI":"10.1109\/TBME.2009.2035926","article-title":"Classifying affective states using thermal infrared imaging of the human face","volume":"57","author":"Nhan","year":"2010","journal-title":"IEEE Trans. Biomed. Eng."},{"key":"10.1016\/j.imavis.2012.06.016_bb0765","series-title":"Proc. of Int. Conf. of the IEEE Engineering in Medicine and Biology Society","first-page":"247","article-title":"Thermal signatures of emotional arousal: a functional infrared imaging study","author":"Merla","year":"2007"},{"key":"10.1016\/j.imavis.2012.06.016_bb0770","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"Pad-based multimodal affective fusion","author":"Gilroy","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0775","series-title":"Proc. of ACM Int. Conf. on Multimodal Interfaces","first-page":"146","article-title":"Modeling naturalistic affective states via facial and vocal expressions recognition","author":"Caridakis","year":"2006"},{"key":"10.1016\/j.imavis.2012.06.016_bb0780","series-title":"Proc. of the 22nd Annual Meeting of the Int. Society for Psychophysics","first-page":"245","article-title":"Joint model-parameter validation of self-estimates of valence and arousal: probing a differential-weighting model of affective intensity","author":"Oliveira","year":"2006"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0785","doi-asserted-by":"crossref","first-page":"323","DOI":"10.1023\/A:1024484306654","article-title":"Arousal and valence in the direct scaling of emotional response to film clips","volume":"21","author":"Alvarado","year":"1997","journal-title":"Motiv. Emot."},{"key":"10.1016\/j.imavis.2012.06.016_bb0790","series-title":"Proc. of 1st Int'l. Audio\/Visual Emotion Challenge and Workshop","first-page":"415","article-title":"Avec 2011 \u2014 the first audio\/visual emotion challenge and workshop \u2014 an introduction","author":"Schuller","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0795","series-title":"Proc. of IEEE CVPR Workshop on Gesture Recognition","first-page":"20","article-title":"Designing frameworks for automatic affect prediction and classification in dimensional space","author":"Nicolaou","year":"2011"},{"issue":"12","key":"10.1016\/j.imavis.2012.06.016_bb0800","doi-asserted-by":"crossref","first-page":"2067","DOI":"10.1109\/TPAMI.2008.26","article-title":"Emotion recognition based on physiological changes in music listening","volume":"30","author":"Kim","year":"2008","journal-title":"IEEE Trans. Pattern Anal. Mach. Intell."},{"key":"10.1016\/j.imavis.2012.06.016_bb0805","series-title":"Proc. of ACM Multimedia","first-page":"933","article-title":"A multi-layer hybrid framework for dimensional emotion classification","author":"Nicolaou","year":"2011"},{"issue":"10","key":"10.1016\/j.imavis.2012.06.016_bb0810","doi-asserted-by":"crossref","first-page":"1447","DOI":"10.1016\/S0893-6080(01)00106-X","article-title":"Oriented principal component analysis for large margin classifiers","volume":"14","author":"Bermejo","year":"2001","journal-title":"Neural Networks"},{"key":"10.1016\/j.imavis.2012.06.016_bb0815","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1155\/2010\/319406","article-title":"The SEMAINE API: Towards a standards-based framework for building emotion-oriented systems","volume":"2010","author":"Schr\u00f6der","year":"2010","journal-title":"Adv. Hum. Mach. Interact."},{"key":"10.1016\/j.imavis.2012.06.016_bb0820","series-title":"Proc. of ACM Multimedia","first-page":"1459","article-title":"OpenSMILE \u2014 the munich versatile and fast open-source audio feature extractor","author":"Eyben","year":"2010"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0825","doi-asserted-by":"crossref","first-page":"335","DOI":"10.1109\/TITB.2010.2042608","article-title":"A novel middleware solution to improve ubiquitous healthcare systems aided by affective information","volume":"14","author":"Taleb","year":"2010","journal-title":"IEEE Trans. Inf. Technol. Biomed."},{"key":"10.1016\/j.imavis.2012.06.016_bb0830","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"298","article-title":"The computer expression recognition toolbox (cert)","author":"Littlewort","year":"2011"},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0835","doi-asserted-by":"crossref","first-page":"106","DOI":"10.1109\/T-AFFC.2011.7","article-title":"Towards a minimal representation of affective gestures","volume":"2","author":"Glowinski","year":"2011","journal-title":"IEEE Trans. Affective Comput."},{"key":"10.1016\/j.imavis.2012.06.016_bb0840","unstructured":"Humaine: http:\/\/emotion\u2013research.net."},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0845","doi-asserted-by":"crossref","first-page":"67","DOI":"10.1109\/MSP.2009.932562","article-title":"Sensitive talking heads","volume":"26","author":"Huang","year":"2009","journal-title":"IEEE Signal Process. Mag."},{"key":"10.1016\/j.imavis.2012.06.016_bb0850","first-page":"1989","article-title":"Do different emotional valences have same effects on spatial attention?","volume":"vol. 4","author":"Shen","year":"2010"},{"issue":"4","key":"10.1016\/j.imavis.2012.06.016_bb0855","doi-asserted-by":"crossref","first-page":"82","DOI":"10.1109\/MMUL.2003.1237553","article-title":"Live soundscape composition based on synthetic emotions","volume":"10","author":"Wassermann","year":"2003","journal-title":"IEEE MultiMedia"},{"key":"10.1016\/j.imavis.2012.06.016_bb0860","series-title":"Proc. of IEEE Int. Symp. in Robot and Human Interactive Communication","first-page":"464","article-title":"Towards an affect space for robots to display emotional body language","author":"Beck","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0865","series-title":"Proc. of ACM\/IEEE Int. Conf. on Human-Robot Interaction","first-page":"305","article-title":"Automatic analysis of affective postures and body motion to detect engagement with a game companion","author":"Sanghvi","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0870","series-title":"Proc. of IEEE Int. Symp. in Robot and Human Interactive Communication","first-page":"258","article-title":"Towards mapping emotive gait patterns from human to robot","author":"Karg","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb0875","series-title":"Proc. of Int. Conf. on Virtual Rehabilitation","first-page":"160","article-title":"Emotion-aware system for upper extremity rehabilitation","author":"Mihelj","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0880","series-title":"Proc. of Int. Conf. on Mobile Data Management: Systems, Services and Middleware","first-page":"430","article-title":"Design and implementation of mobile personal emotion monitoring system","author":"Tsai","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0885","series-title":"Proc. of Int. Workshop on Wearable and Implantable Body Sensor Networks","first-page":"156","article-title":"The design and analysis of a real-time, continuous arousal monitor","author":"Grundlehner","year":"2009"},{"issue":"1535","key":"10.1016\/j.imavis.2012.06.016_bb0890","doi-asserted-by":"crossref","first-page":"3575","DOI":"10.1098\/rstb.2009.0143","article-title":"Future affective technology for autism and emotion communication","volume":"364","author":"Picard","year":"2009","journal-title":"Philos. Trans. R. Soc. Lond. B Biol. Sci."},{"key":"10.1016\/j.imavis.2012.06.016_bb0895","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"30","article-title":"Automated measurement of children's facial expressions during problem solving tasks","author":"Littlewort","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0900","series-title":"Proc. of IEEE Conf. on Computer Vision and Pattern Recognition Workshop on Human Communicative Behavior","first-page":"20","article-title":"Toward an optimal affect-sensitive instructional system of cognitive skills","author":"Whitehill","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0905","series-title":"Proc. of IEEE Symp. on Intelligent Agents","first-page":"23","article-title":"How emotional mechanism helps episodic learning in a cognitive agent","author":"Faghihi","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0910","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1155\/2010\/263593","article-title":"Emotion on the road \u2014 necessity, acceptance, and feasibility of affective computing in the car","volume":"2010","author":"Eyben","year":"2010","journal-title":"Adv. Hum. Mach. Interact."},{"key":"10.1016\/j.imavis.2012.06.016_bb0915","series-title":"Proc. of IEEE Int. Conf. on Multimedia and Expo","first-page":"566","article-title":"An improved valence\u2013arousal emotion space for video affective content representation and recognition","author":"Sun","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0920","series-title":"Proc. of IEEE Int. Conf. on Multimedia and Expo","first-page":"1436","article-title":"Queries and tags in affect-based multimedia retrieval","author":"Kierkels","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0925","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"A collaborative personalized affective video retrieval system","author":"Soleymani","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0930","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"803","article-title":"Continuous emotion detection in response to music videos","author":"Soleymani","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0935","first-page":"180","article-title":"Gamble\u2014a multiuser game with an embodied conversational agent","volume":"vol. 3711","author":"Rehm","year":"2005"},{"key":"10.1016\/j.imavis.2012.06.016_bb0940","doi-asserted-by":"crossref","first-page":"1027","DOI":"10.1109\/TSMCB.2010.2103557","article-title":"Automatic recognition of non-acted affective postures","volume":"41","author":"Kleinsmith","year":"2011","journal-title":"IEEE Trans. Syst. Man Cybern. B Cybern."},{"key":"10.1016\/j.imavis.2012.06.016_bb0945","unstructured":"Affectiva's homepage: http:\/\/www.affectiva.com\/."},{"key":"10.1016\/j.imavis.2012.06.016_bb0950","series-title":"Proceedings INTERSPEECH 2009, 10th Annual Conference of the International Speech Communication Association","first-page":"312","article-title":"The interspeech 2009 emotion challenge","author":"Schuller","year":"2009"},{"key":"10.1016\/j.imavis.2012.06.016_bb0955","series-title":"Proc. of INTERSPEECH","first-page":"2794","article-title":"The interspeech 2010 paralinguistic challenge","author":"Schuller","year":"2010"},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0960","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1109\/T-AFFC.2010.11","article-title":"Editorial","volume":"1","author":"Gratch","year":"2010","journal-title":"IEEE Trans. Affective Comput."},{"issue":"2","key":"10.1016\/j.imavis.2012.06.016_bb0965","doi-asserted-by":"crossref","first-page":"64","DOI":"10.1109\/T-AFFC.2011.16","article-title":"Introduction to the affect-based human behavior understanding special issue","volume":"2","author":"Salah","year":"2011","journal-title":"IEEE Trans. Affective Comput."},{"issue":"9\/10","key":"10.1016\/j.imavis.2012.06.016_bb0970","doi-asserted-by":"crossref","first-page":"1059","DOI":"10.1016\/j.specom.2011.07.003","article-title":"Introduction to the special issue on sensing emotion and affect \u2014 facing realism in speech processing","volume":"53","author":"Schuller","year":"2011","journal-title":"Speech Commun."},{"issue":"15","key":"10.1016\/j.imavis.2012.06.016_bb0975","first-page":"1","article-title":"Editorial emotion and mental state recognition from speech","volume":"2012","author":"Epps","year":"2012","journal-title":"EURASIP J. Adv. Signal Process."},{"issue":"1","key":"10.1016\/j.imavis.2012.06.016_bb0980","article-title":"Benefits and limitations of continuous representations of emotions in affective computing: introduction to the special issue","volume":"3","author":"Hudlicka","year":"2012","journal-title":"J. Synth. Emotions"},{"key":"10.1016\/j.imavis.2012.06.016_bb0985","series-title":"Affective Computing and Interaction: Psychological, Cognitive and Neuroscientific Perspectives","author":"Gokcay","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0990","series-title":"A Blueprint for Affective Computing","author":"Scherer","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb0995","series-title":"Advances in Emotion Recognition","author":"Konar","year":"2012"},{"key":"10.1016\/j.imavis.2012.06.016_bb1000","series-title":"Proc. of IEEE Int. Conf. on Automatic Face and Gesture Recognition","first-page":"354","article-title":"Acted vs. natural frustration and delight: many people smile in natural frustration","author":"Hoque","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb1005","series-title":"Proc. INTERSPEECH","first-page":"1553","article-title":"Using multiple databases for training in emotion recognition: to unite or to vote?","author":"Schuller","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb1010","series-title":"Proc. of IEEE Workshop on Automatic Speech Recognition and Understanding","first-page":"523","article-title":"Unsupervised learning in cross-corpus acoustic emotion recognition","author":"Zhang","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb1015","series-title":"Proc. IEEE Int. Conf. on Acoustics, Speech and Signal Processing","first-page":"5150","article-title":"Learning with synthesized speech for automatic emotion recognition","author":"Schuller","year":"2010"},{"key":"10.1016\/j.imavis.2012.06.016_bb1020","series-title":"Proc. of Int. Conf. on Affective Computing and Intelligent Interaction","first-page":"289","article-title":"Automatic understanding of affective and social signals by multimodal mimicry recognition","author":"Sun","year":"2011"},{"key":"10.1016\/j.imavis.2012.06.016_bb1025","first-page":"1566","article-title":"Performance modeling and prediction of face recognition systems","volume":"vol. 2","author":"Wang","year":"2006"},{"key":"10.1016\/j.imavis.2012.06.016_bb1030","series-title":"Proc. of Affective Computing and Intelligent Interaction Workshops","first-page":"1","article-title":"Social agents: the first generations","author":"Heylen","year":"2009"}],"container-title":["Image and Vision Computing"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0262885612001084?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0262885612001084?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2018,11,22]],"date-time":"2018-11-22T01:56:18Z","timestamp":1542851778000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S0262885612001084"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2013,2]]},"references-count":205,"journal-issue":{"issue":"2","published-print":{"date-parts":[[2013,2]]}},"alternative-id":["S0262885612001084"],"URL":"https:\/\/doi.org\/10.1016\/j.imavis.2012.06.016","relation":{},"ISSN":["0262-8856"],"issn-type":[{"value":"0262-8856","type":"print"}],"subject":[],"published":{"date-parts":[[2013,2]]}}}