{"status":"ok","message-type":"work","message-version":"1.0.0","message":{"indexed":{"date-parts":[[2025,2,21]],"date-time":"2025-02-21T06:16:15Z","timestamp":1740118575482,"version":"3.37.3"},"reference-count":60,"publisher":"Elsevier BV","license":[{"start":{"date-parts":[[2021,11,1]],"date-time":"2021-11-01T00:00:00Z","timestamp":1635724800000},"content-version":"tdm","delay-in-days":0,"URL":"https:\/\/www.elsevier.com\/tdm\/userlicense\/1.0\/"},{"start":{"date-parts":[[2021,8,11]],"date-time":"2021-08-11T00:00:00Z","timestamp":1628640000000},"content-version":"vor","delay-in-days":0,"URL":"http:\/\/creativecommons.org\/licenses\/by\/4.0\/"}],"funder":[{"DOI":"10.13039\/501100001602","name":"Science Foundation Ireland","doi-asserted-by":"publisher","id":[{"id":"10.13039\/501100001602","id-type":"DOI","asserted-by":"publisher"}]},{"DOI":"10.13039\/501100014826","name":"ADAPT - Centre for Digital Content Technology","doi-asserted-by":"publisher","award":["13\/CDA\/2135","13\/RC\/2016"],"id":[{"id":"10.13039\/501100014826","id-type":"DOI","asserted-by":"publisher"}]}],"content-domain":{"domain":["elsevier.com","sciencedirect.com"],"crossmark-restriction":true},"short-container-title":["Computers & Graphics"],"published-print":{"date-parts":[[2021,11]]},"DOI":"10.1016\/j.cag.2021.07.022","type":"journal-article","created":{"date-parts":[[2021,8,8]],"date-time":"2021-08-08T06:58:40Z","timestamp":1628405920000},"page":"81-92","update-policy":"https:\/\/doi.org\/10.1016\/elsevier_cm_policy","source":"Crossref","is-referenced-by-count":12,"special_numbering":"C","title":["Model for predicting perception of facial action unit activation using virtual humans"],"prefix":"10.1016","volume":"100","author":[{"given":"Rachel","family":"McDonnell","sequence":"first","affiliation":[]},{"given":"Katja","family":"Zibrek","sequence":"additional","affiliation":[]},{"given":"Emma","family":"Carrigan","sequence":"additional","affiliation":[]},{"given":"Rozenn","family":"Dahyot","sequence":"additional","affiliation":[]}],"member":"78","reference":[{"key":"10.1016\/j.cag.2021.07.022_bib0001","series-title":"International conference on intelligent virtual agents","first-page":"286","article-title":"Ada and grace: toward realistic and engaging virtual museum guides","author":"Swartout","year":"2010"},{"key":"10.1016\/j.cag.2021.07.022_bib0002","series-title":"Medicine meets virtual reality","first-page":"133","article-title":"The virtual standardized patient","author":"Hubal","year":"2000"},{"issue":"8","key":"10.1016\/j.cag.2021.07.022_bib0003","first-page":"2","article-title":"Practice and theory of blendshape facial models.","volume":"1","author":"Lewis","year":"2014","journal-title":"Eurographics (State Art Rep)"},{"year":"1978","series-title":"Facial action coding system: a technique for the measurement of facial movement","author":"Ekman","key":"10.1016\/j.cag.2021.07.022_bib0004"},{"key":"10.1016\/j.cag.2021.07.022_bib0005","series-title":"Proceedings of the 9th international conference on motion in games","first-page":"73","article-title":"Improving blendshape performance for crowds with gpu and gpgpu techniques","author":"Costigan","year":"2016"},{"year":"2018","series-title":"A preliminary investigation into the impact of training for example-based facial blendshape creation","author":"Carrigan","key":"10.1016\/j.cag.2021.07.022_bib0006"},{"key":"10.1016\/j.cag.2021.07.022_bib0007","series-title":"Proceedings of the 41st annual European association for computer graphics conference","first-page":"219","article-title":"Expression packing: as-few-as-possible training expressions for blendshape transfer","volume":"39","author":"Carrigan","year":"2020"},{"year":"2013","series-title":"Face perception","author":"Bruce","key":"10.1016\/j.cag.2021.07.022_bib0008"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0009","doi-asserted-by":"crossref","first-page":"482","DOI":"10.1037\/0033-295X.105.3.482","article-title":"What is \u201cspecial\u201d about face perception?","volume":"105","author":"Farah","year":"1998","journal-title":"Psychol Rev"},{"issue":"11","key":"10.1016\/j.cag.2021.07.022_bib0010","doi-asserted-by":"crossref","first-page":"4302","DOI":"10.1523\/JNEUROSCI.17-11-04302.1997","article-title":"The fusiform face area: a module in human extrastriate cortex specialized for face perception","volume":"17","author":"Kanwisher","year":"1997","journal-title":"J Neurosci"},{"issue":"1","key":"10.1016\/j.cag.2021.07.022_bib0011","doi-asserted-by":"crossref","first-page":"81","DOI":"10.1111\/j.1471-6402.2000.tb01024.x","article-title":"The gender stereotyping of emotions","volume":"24","author":"Plant","year":"2000","journal-title":"Psychol Women Q"},{"issue":"1","key":"10.1016\/j.cag.2021.07.022_bib0012","doi-asserted-by":"crossref","first-page":"87","DOI":"10.1037\/1528-3542.4.1.87","article-title":"Gender and culture differences in emotion.","volume":"4","author":"Fischer","year":"2004","journal-title":"Emotion"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0013","doi-asserted-by":"crossref","first-page":"378","DOI":"10.1037\/1528-3542.4.4.378","article-title":"Facial appearance, gender, and emotion expression.","volume":"4","author":"Hess","year":"2004","journal-title":"Emotion"},{"key":"10.1016\/j.cag.2021.07.022_bib0014","doi-asserted-by":"crossref","first-page":"208","DOI":"10.3389\/fpsyg.2020.00208","article-title":"The own-race bias for face recognition in a multiracial society","volume":"11","author":"Wong","year":"2020","journal-title":"Front Psychol"},{"key":"10.1016\/j.cag.2021.07.022_bib0015","series-title":"Investigating perceptually based models to predict importance of facial blendshapes","isbn-type":"print","author":"Carrigan","year":"2020","ISBN":"https:\/\/id.crossref.org\/isbn\/9781450381710"},{"key":"10.1016\/j.cag.2021.07.022_bib0016","doi-asserted-by":"crossref","first-page":"321","DOI":"10.1016\/S0079-6123(06)56018-2","article-title":"Processing of facial identity and expression: a psychophysical, physiological, and computational perspective","volume":"156","author":"Schwaninger","year":"2006","journal-title":"Prog Brain Res"},{"issue":"8","key":"10.1016\/j.cag.2021.07.022_bib0017","doi-asserted-by":"crossref","first-page":"1413","DOI":"10.1111\/j.1551-6709.2009.01059.x","article-title":"Two routes to face perception: evidence from psychophysics and computational modeling","volume":"33","author":"Schwaninger","year":"2009","journal-title":"Cogn Sci"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0018","doi-asserted-by":"crossref","first-page":"305","DOI":"10.1111\/j.2044-8295.1986.tb02199.x","article-title":"Understanding face recognition","volume":"77","author":"Bruce","year":"1986","journal-title":"Br J Psychol"},{"issue":"5","key":"10.1016\/j.cag.2021.07.022_bib0019","doi-asserted-by":"crossref","first-page":"222","DOI":"10.1111\/j.1467-8721.2006.00440.x","article-title":"Perception and emotion: how we recognize facial expressions","volume":"15","author":"Adolphs","year":"2006","journal-title":"Curr Dir Psychol Sci"},{"key":"10.1016\/j.cag.2021.07.022_bib0020","doi-asserted-by":"crossref","first-page":"1","DOI":"10.1016\/j.visres.2019.06.005","article-title":"Face perception: a brief journey through recent discoveries and current directions","volume":"157","author":"Oruc","year":"2019","journal-title":"Vision Res"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0021","doi-asserted-by":"crossref","first-page":"587","DOI":"10.1037\/0021-9010.76.4.587","article-title":"Other-race face perception.","volume":"76","author":"Lindsay","year":"1991","journal-title":"J Appl Psychol"},{"issue":"9","key":"10.1016\/j.cag.2021.07.022_bib0022","doi-asserted-by":"crossref","first-page":"1117","DOI":"10.1068\/p5098","article-title":"An encoding advantage for own-race versus other-race faces","volume":"32","author":"Walker","year":"2003","journal-title":"Perception"},{"issue":"6","key":"10.1016\/j.cag.2021.07.022_bib0023","doi-asserted-by":"crossref","first-page":"1099","DOI":"10.1080\/17470210802617654","article-title":"Short article: why mix-ups don\u2019t happen in the nursery: evidence for an experience-based interpretation of the other-age effect","volume":"62","author":"Cassia","year":"2009","journal-title":"Q J Exp Psychol"},{"issue":"2","key":"10.1016\/j.cag.2021.07.022_bib0024","doi-asserted-by":"crossref","first-page":"498","DOI":"10.1016\/j.neuropsychologia.2009.10.007","article-title":"The role of face shape and pigmentation in other-race face perception: an electrophysiological study","volume":"48","author":"Balas","year":"2010","journal-title":"Neuropsychologia"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0025","doi-asserted-by":"crossref","first-page":"241","DOI":"10.1023\/A:1024952730333","article-title":"The intensity of emotional facial expressions and decoding accuracy","volume":"21","author":"Hess","year":"1997","journal-title":"J Nonverbal Behav"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0026","doi-asserted-by":"crossref","first-page":"471","DOI":"10.1037\/a0012771","article-title":"Detection of emotional faces: salient physical features guide effective visual search.","volume":"137","author":"Calvo","year":"2008","journal-title":"J Exp Psychol: Gen"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0027","doi-asserted-by":"crossref","first-page":"634","DOI":"10.3758\/BF03206544","article-title":"Photographs of facial expression: accuracy, response times, and ratings of intensity","volume":"36","author":"Palermo","year":"2004","journal-title":"Behav Res Methods, Instrum Comput"},{"issue":"1\u20132","key":"10.1016\/j.cag.2021.07.022_bib0028","doi-asserted-by":"crossref","first-page":"22","DOI":"10.1007\/s00426-003-0157-2","article-title":"Positive facial expressions are recognized faster than negative facial expressions, but why?","volume":"69","author":"Lepp\u00e4nen","year":"2004","journal-title":"Psychol Res"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0029","doi-asserted-by":"crossref","first-page":"498","DOI":"10.1080\/13506285.2013.807901","article-title":"Anger superiority effect: the importance of dynamic emotional facial expressions","volume":"21","author":"Ceccarini","year":"2013","journal-title":"Vis Cogn"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0030","doi-asserted-by":"crossref","first-page":"184","DOI":"10.1111\/j.0956-7976.2005.00801.x","article-title":"Transmitting and decoding facial expressions","volume":"16","author":"Smith","year":"2005","journal-title":"Psychol Sci"},{"issue":"16","key":"10.1016\/j.cag.2021.07.022_bib0031","doi-asserted-by":"crossref","first-page":"4434","DOI":"10.1523\/JNEUROSCI.1704-15.2016","article-title":"A neural basis of facial action recognition in humans","volume":"36","author":"Srinivasan","year":"2016","journal-title":"J Neurosci"},{"issue":"5","key":"10.1016\/j.cag.2021.07.022_bib0032","doi-asserted-by":"crossref","DOI":"10.1371\/journal.pone.0177239","article-title":"Mapping the emotional face. how individual face parts contribute to successful emotion recognition","volume":"12","author":"Wegrzyn","year":"2017","journal-title":"PLoS One"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0033","doi-asserted-by":"crossref","first-page":"152","DOI":"10.1016\/j.cag.2011.12.002","article-title":"Perception-driven facial expression synthesis","volume":"36","author":"Yu","year":"2012","journal-title":"Comput Graph"},{"key":"10.1016\/j.cag.2021.07.022_bib0034","series-title":"International conference on automatic face & gesture recognition","first-page":"448","article-title":"Reverse engineering psychologically valid facial expressions of emotion into social robots","author":"Chen","year":"2018"},{"issue":"8","key":"10.1016\/j.cag.2021.07.022_sbref0035","doi-asserted-by":"crossref","DOI":"10.1167\/8.8.1","article-title":"The contribution of different facial regions to the recognition of conversational expressions","volume":"8","author":"Nusseck","year":"2008","journal-title":"J Vis"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0036","doi-asserted-by":"crossref","first-page":"860","DOI":"10.1037\/a0022758","article-title":"Happy mouth and sad eyes: scanning emotional facial expressions.","volume":"11","author":"Eisenbarth","year":"2011","journal-title":"Emotion"},{"key":"10.1016\/j.cag.2021.07.022_bib0037","article-title":"About brows: emotional and conversational signals","author":"Ekman","year":"1979","journal-title":"Hum Ethol"},{"issue":"5","key":"10.1016\/j.cag.2021.07.022_bib0038","doi-asserted-by":"crossref","first-page":"529","DOI":"10.1068\/p7823","article-title":"Eye shape illusions induced by eyebrow positions","volume":"44","author":"Matsushita","year":"2015","journal-title":"Perception"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0039","doi-asserted-by":"crossref","first-page":"285","DOI":"10.1068\/p5027","article-title":"The role of eyebrows in face recognition","volume":"32","author":"Sadr","year":"2003","journal-title":"Perception"},{"issue":"2","key":"10.1016\/j.cag.2021.07.022_bib0040","doi-asserted-by":"crossref","first-page":"97","DOI":"10.1109\/34.908962","article-title":"Recognizing action units for facial expression analysis","volume":"23","author":"Tian","year":"2001","journal-title":"IEEE Trans Pattern Anal Mach Intell"},{"issue":"1","key":"10.1016\/j.cag.2021.07.022_bib0041","doi-asserted-by":"crossref","first-page":"35","DOI":"10.1017\/S0048577299971184","article-title":"Automated face analysis by feature point tracking has high concurrent validity with manual FACS coding","volume":"36","author":"Cohn","year":"1999","journal-title":"Psychophysiology"},{"key":"10.1016\/j.cag.2021.07.022_bib0042","article-title":"Facial action unit detection using attention and relation learning","author":"Shao","year":"2019","journal-title":"IEEE Trans Affect Comput"},{"key":"10.1016\/j.cag.2021.07.022_bib0043","first-page":"1","article-title":"Multi-level uncorrelated discriminative shared gaussian process for multi-view facial expression recognition","author":"Kumar","year":"2020","journal-title":"Vis Comput"},{"year":"1978","series-title":"Facial action coding system","author":"Ekman","key":"10.1016\/j.cag.2021.07.022_bib0044"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0045","doi-asserted-by":"crossref","first-page":"399","DOI":"10.1145\/1015706.1015736","article-title":"Deformation transfer for triangle meshes","volume":"23","author":"Sumner","year":"2004","journal-title":"ACM Trans Graph (TOG)"},{"key":"10.1016\/j.cag.2021.07.022_bib0046","series-title":"ACM transactions on graphics (TOG)","first-page":"32","article-title":"Example-based facial rigging","volume":"29","author":"Li","year":"2010"},{"key":"10.1016\/j.cag.2021.07.022_bib0047","series-title":"SIGGRAPH ASIA 2016 technical briefs","first-page":"1","article-title":"Semantically-aware blendshape rigs from facial performance measurements","author":"Ma","year":"2016"},{"key":"10.1016\/j.cag.2021.07.022_bib0048","series-title":"Proceedings of the 24th annual conference on computer graphics and interactive techniques","first-page":"209","article-title":"Surface simplification using quadric error metrics","author":"Garland","year":"1997"},{"key":"10.1016\/j.cag.2021.07.022_bib0049","first-page":"53","article-title":"Directx 10 blend shapes: breaking the limits","volume":"3","author":"Lorach","year":"2007","journal-title":"GPU Gems"},{"key":"10.1016\/j.cag.2021.07.022_bib0050","article-title":"Skinned instancing","author":"Dudash","year":"2007","journal-title":"NVidia white paper"},{"key":"10.1016\/j.cag.2021.07.022_bib0051","series-title":"Computer graphics forum","first-page":"101","article-title":"Perceptual metrics for static and dynamic triangle meshes","volume":"32","author":"Corsini","year":"2013"},{"issue":"4","key":"10.1016\/j.cag.2021.07.022_bib0052","doi-asserted-by":"crossref","first-page":"600","DOI":"10.1109\/TIP.2003.819861","article-title":"Image quality assessment: from error visibility to structural similarity","volume":"13","author":"Wang","year":"2004","journal-title":"IEEE Trans Image Process"},{"issue":"2","key":"10.1016\/j.cag.2021.07.022_bib0053","doi-asserted-by":"crossref","first-page":"220","DOI":"10.1109\/TVCG.2010.38","article-title":"A perception correlated comparison method for dynamic meshes","volume":"17","author":"Vasa","year":"2010","journal-title":"IEEE Trans Vis Comput Graph"},{"issue":"3","key":"10.1016\/j.cag.2021.07.022_bib0054","doi-asserted-by":"crossref","first-page":"262","DOI":"10.1177\/1948550610389080","article-title":"Subtly different positive emotions can be distinguished by their facial expressions","volume":"2","author":"Mortillaro","year":"2011","journal-title":"Soc Psychol Personal Sci"},{"issue":"9","key":"10.1016\/j.cag.2021.07.022_bib0055","doi-asserted-by":"crossref","first-page":"3293","DOI":"10.1109\/TCYB.2018.2840090","article-title":"Improving speech related facial action unit recognition by audiovisual information fusion","volume":"49","author":"Meng","year":"2018","journal-title":"IEEE Trans Cybern"},{"key":"10.1016\/j.cag.2021.07.022_bib0056","series-title":"Applications and science of neural networks, fuzzy systems, and evolutionary computation VI","first-page":"64","article-title":"Realistic avatar eye and head animation using a neurobiological model of visual attention","volume":"5200","author":"Itti","year":"2003"},{"key":"10.1016\/j.cag.2021.07.022_bib0057","series-title":"Proceedings of the 6th international conference on multimodal interfaces","first-page":"205","article-title":"Analysis of emotion recognition using facial expressions, speech and multimodal information","author":"Busso","year":"2004"},{"key":"10.1016\/j.cag.2021.07.022_bib0058","doi-asserted-by":"crossref","first-page":"e453","DOI":"10.7717\/peerj.453","article-title":"Scikit-image: image processing in python","volume":"2","author":"van der Walt","year":"2014","journal-title":"PeerJ"},{"year":"2008","series-title":"An introduction to generalized linear models","author":"Dobson","key":"10.1016\/j.cag.2021.07.022_bib0059"},{"issue":"14","key":"10.1016\/j.cag.2021.07.022_bib0060","doi-asserted-by":"crossref","first-page":"3313","DOI":"10.1016\/j.neuropsychologia.2012.09.038","article-title":"Perception of emotions from facial expressions in high-functioning adults with autism","volume":"50","author":"Kennedy","year":"2012","journal-title":"Neuropsychologia"}],"container-title":["Computers & Graphics"],"original-title":[],"language":"en","link":[{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0097849321001631?httpAccept=text\/xml","content-type":"text\/xml","content-version":"vor","intended-application":"text-mining"},{"URL":"https:\/\/api.elsevier.com\/content\/article\/PII:S0097849321001631?httpAccept=text\/plain","content-type":"text\/plain","content-version":"vor","intended-application":"text-mining"}],"deposited":{"date-parts":[[2024,3,21]],"date-time":"2024-03-21T05:38:42Z","timestamp":1710999522000},"score":1,"resource":{"primary":{"URL":"https:\/\/linkinghub.elsevier.com\/retrieve\/pii\/S0097849321001631"}},"subtitle":[],"short-title":[],"issued":{"date-parts":[[2021,11]]},"references-count":60,"alternative-id":["S0097849321001631"],"URL":"https:\/\/doi.org\/10.1016\/j.cag.2021.07.022","relation":{},"ISSN":["0097-8493"],"issn-type":[{"type":"print","value":"0097-8493"}],"subject":[],"published":{"date-parts":[[2021,11]]},"assertion":[{"value":"Elsevier","name":"publisher","label":"This article is maintained by"},{"value":"Model for predicting perception of facial action unit activation using virtual humans","name":"articletitle","label":"Article Title"},{"value":"Computers & Graphics","name":"journaltitle","label":"Journal Title"},{"value":"https:\/\/doi.org\/10.1016\/j.cag.2021.07.022","name":"articlelink","label":"CrossRef DOI link to publisher maintained version"},{"value":"article","name":"content_type","label":"Content Type"},{"value":"\u00a9 2021 The Authors. Published by Elsevier Ltd.","name":"copyright","label":"Copyright"}]}}