<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Publishing DTD v1.1d3 20150301//EN" "http://jats.nlm.nih.gov/publishing/1.1d3/JATS-journalpublishing1.dtd">
<article article-type="research-article" dtd-version="1.1d3" xml:lang="en" xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">PLoS ONE</journal-id>
<journal-id journal-id-type="publisher-id">plos</journal-id>
<journal-id journal-id-type="pmc">plosone</journal-id>
<journal-title-group>
<journal-title>PLOS ONE</journal-title>
</journal-title-group>
<issn pub-type="epub">1932-6203</issn>
<publisher>
<publisher-name>Public Library of Science</publisher-name>
<publisher-loc>San Francisco, CA USA</publisher-loc>
</publisher>
</journal-meta>
<article-meta>
<article-id pub-id-type="doi">10.1371/journal.pone.0231756</article-id>
<article-id pub-id-type="publisher-id">PONE-D-19-09982</article-id>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Research Article</subject>
</subj-group>
<subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Head</subject><subj-group><subject>Face</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Head</subject><subj-group><subject>Face</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Emotions</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Emotions</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Behavior</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Behavior</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognition</subject><subj-group><subject>Memory</subject><subj-group><subject>Face recognition</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Learning and memory</subject><subj-group><subject>Memory</subject><subj-group><subject>Face recognition</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Neuroscience</subject><subj-group><subject>Cognitive science</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Face recognition</subject></subj-group></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Face recognition</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Cognitive psychology</subject><subj-group><subject>Perception</subject><subj-group><subject>Face recognition</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Head</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Head</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Physical sciences</subject><subj-group><subject>Mathematics</subject><subj-group><subject>Probability theory</subject><subj-group><subject>Statistical distributions</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Neck</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Medicine and health sciences</subject><subj-group><subject>Anatomy</subject><subj-group><subject>Neck</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Behavior</subject><subj-group><subject>Animal behavior</subject><subj-group><subject>Behavioral ecology</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Social sciences</subject><subj-group><subject>Psychology</subject><subj-group><subject>Behavior</subject><subj-group><subject>Animal behavior</subject><subj-group><subject>Behavioral ecology</subject></subj-group></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Zoology</subject><subj-group><subject>Animal behavior</subject><subj-group><subject>Behavioral ecology</subject></subj-group></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Biology and life sciences</subject><subj-group><subject>Ecology</subject><subj-group><subject>Behavioral ecology</subject></subj-group></subj-group></subj-group><subj-group subj-group-type="Discipline-v3">
<subject>Ecology and environmental sciences</subject><subj-group><subject>Ecology</subject><subj-group><subject>Behavioral ecology</subject></subj-group></subj-group></subj-group></article-categories>
<title-group>
<article-title>Are there non-verbal signals of guilt?</article-title>
<alt-title alt-title-type="running-head">Are there non-verbal signals of guilt?</alt-title>
</title-group>
<contrib-group>
<contrib contrib-type="author" corresp="yes" xlink:type="simple">
<contrib-id authenticated="true" contrib-id-type="orcid">http://orcid.org/0000-0002-6052-1073</contrib-id>
<name name-style="western">
<surname>Julle-Danière</surname>
<given-names>Eglantine</given-names>
</name>
<role content-type="http://credit.casrai.org/">Conceptualization</role>
<role content-type="http://credit.casrai.org/">Data curation</role>
<role content-type="http://credit.casrai.org/">Formal analysis</role>
<role content-type="http://credit.casrai.org/">Investigation</role>
<role content-type="http://credit.casrai.org/">Methodology</role>
<role content-type="http://credit.casrai.org/">Project administration</role>
<role content-type="http://credit.casrai.org/">Resources</role>
<role content-type="http://credit.casrai.org/">Software</role>
<role content-type="http://credit.casrai.org/">Visualization</role>
<role content-type="http://credit.casrai.org/">Writing – original draft</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="corresp" rid="cor001">*</xref>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<name name-style="western">
<surname>Whitehouse</surname>
<given-names>Jamie</given-names>
</name>
<role content-type="http://credit.casrai.org/">Formal analysis</role>
<role content-type="http://credit.casrai.org/">Resources</role>
<role content-type="http://credit.casrai.org/">Software</role>
<role content-type="http://credit.casrai.org/">Visualization</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<name name-style="western">
<surname>Mielke</surname>
<given-names>Alexander</given-names>
</name>
<role content-type="http://credit.casrai.org/">Formal analysis</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<name name-style="western">
<surname>Vrij</surname>
<given-names>Aldert</given-names>
</name>
<role content-type="http://credit.casrai.org/">Conceptualization</role>
<role content-type="http://credit.casrai.org/">Funding acquisition</role>
<role content-type="http://credit.casrai.org/">Supervision</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<name name-style="western">
<surname>Gustafsson</surname>
<given-names>Erik</given-names>
</name>
<role content-type="http://credit.casrai.org/">Conceptualization</role>
<role content-type="http://credit.casrai.org/">Supervision</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<contrib-id authenticated="true" contrib-id-type="orcid">http://orcid.org/0000-0002-4480-6781</contrib-id>
<name name-style="western">
<surname>Micheletta</surname>
<given-names>Jérôme</given-names>
</name>
<role content-type="http://credit.casrai.org/">Funding acquisition</role>
<role content-type="http://credit.casrai.org/">Visualization</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
<contrib contrib-type="author" xlink:type="simple">
<contrib-id authenticated="true" contrib-id-type="orcid">http://orcid.org/0000-0001-6303-7458</contrib-id>
<name name-style="western">
<surname>Waller</surname>
<given-names>Bridget M.</given-names>
</name>
<role content-type="http://credit.casrai.org/">Conceptualization</role>
<role content-type="http://credit.casrai.org/">Formal analysis</role>
<role content-type="http://credit.casrai.org/">Funding acquisition</role>
<role content-type="http://credit.casrai.org/">Supervision</role>
<role content-type="http://credit.casrai.org/">Writing – original draft</role>
<role content-type="http://credit.casrai.org/">Writing – review &amp; editing</role>
<xref ref-type="aff" rid="aff001"/>
</contrib>
</contrib-group>
<aff id="aff001"><addr-line>Department of Psychology, University of Portsmouth, Portsmouth, United Kingdom</addr-line></aff>
<contrib-group>
<contrib contrib-type="editor" xlink:type="simple">
<name name-style="western">
<surname>Livingstone</surname>
<given-names>Steven R.</given-names>
</name>
<role>Editor</role>
<xref ref-type="aff" rid="edit1"/>
</contrib>
</contrib-group>
<aff id="edit1"><addr-line>University of Otago, NEW ZEALAND</addr-line></aff>
<author-notes>
<fn fn-type="conflict" id="coi001">
<p>The author(s) declared that there were no conflicts of interest with respect to the authorship or the publication of this article.</p>
</fn>
<corresp id="cor001">* E-mail:<email xlink:type="simple">eglantine.julle-daniere@port.ac.uk</email></corresp>
</author-notes>
<pub-date pub-type="epub">
<day>24</day>
<month>4</month>
<year>2020</year>
</pub-date>
<pub-date pub-type="collection">
<year>2020</year>
</pub-date>
<volume>15</volume>
<issue>4</issue>
<elocation-id>e0231756</elocation-id>
<history>
<date date-type="received">
<day>8</day>
<month>4</month>
<year>2019</year>
</date>
<date date-type="accepted">
<day>31</day>
<month>3</month>
<year>2020</year>
</date>
</history>
<permissions>
<copyright-year>2020</copyright-year>
<copyright-holder>Julle-Danière et al</copyright-holder>
<license xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">
<license-p>This is an open access article distributed under the terms of the <ext-link ext-link-type="uri" xlink:href="http://creativecommons.org/licenses/by/4.0/" xlink:type="simple">Creative Commons Attribution License</ext-link>, which permits unrestricted use, distribution, and reproduction in any medium, provided the original author and source are credited.</license-p>
</license>
</permissions>
<self-uri content-type="pdf" xlink:href="info:doi/10.1371/journal.pone.0231756"/>
<abstract>
<p>Guilt is a complex emotion with a potentially important social function of stimulating cooperative behaviours towards and from others, but whether the feeling of guilt is associated with a recognisable pattern of nonverbal behaviour is unknown. We examined the production and perception of guilt in two different studies, with a total of 238 participants with various places of origin. Guilt was induced experimentally, eliciting patterns of movement that were associated with both the participants’ self-reported feelings of guilt and judges’ impressions of their guilt. Guilt was most closely associated with frowning and neck touching. While there were differences between self-reported guilt and perception of guilt the findings suggest that there are consistent patterns that could be considered a non-verbal signal of guilt in humans.</p>
</abstract>
<funding-group>
<award-group id="award001">
<funding-source>
<institution-wrap>
<institution-id institution-id-type="funder-id">http://dx.doi.org/10.13039/501100000275</institution-id>
<institution>Leverhulme Trust</institution>
</institution-wrap>
</funding-source>
<award-id>RPG-2016-206</award-id>
<principal-award-recipient>
<contrib-id authenticated="true" contrib-id-type="orcid">http://orcid.org/0000-0001-6303-7458</contrib-id>
<name name-style="western">
<surname>Waller</surname>
<given-names>Bridget M.</given-names>
</name>
</principal-award-recipient>
</award-group>
<award-group id="award002">
<funding-source>
<institution-wrap>
<institution-id institution-id-type="funder-id">http://dx.doi.org/10.13039/501100000275</institution-id>
<institution>Leverhulme Trust</institution>
</institution-wrap>
</funding-source>
<award-id>RPG-2016-206</award-id>
<principal-award-recipient>
<name name-style="western">
<surname>Vrij</surname>
<given-names>Aldert</given-names>
</name>
</principal-award-recipient>
</award-group>
<award-group id="award003">
<funding-source>
<institution-wrap>
<institution-id institution-id-type="funder-id">http://dx.doi.org/10.13039/501100000275</institution-id>
<institution>Leverhulme Trust</institution>
</institution-wrap>
</funding-source>
<award-id>RPG-2018-334</award-id>
<principal-award-recipient>
<contrib-id authenticated="true" contrib-id-type="orcid">http://orcid.org/0000-0002-4480-6781</contrib-id>
<name name-style="western">
<surname>Micheletta</surname>
<given-names>Jérôme</given-names>
</name>
</principal-award-recipient>
</award-group>
<funding-statement>The studies were funded by a Leverhulme Trust Research Project Grant Cultural variation in the social function and expression of guilt awarded to the seventh and fourth authors (RPG-2016-206) and the Leverhulme Trust Research Project Grant Rethinking complexity in facial communication systems awarded to the sixth author (RPG-2018-334). The funders had no role in study design, data collection and analysis, decision to publish, or preparation of the manuscript.</funding-statement>
</funding-group>
<counts>
<fig-count count="2"/>
<table-count count="5"/>
<page-count count="27"/>
</counts>
<custom-meta-group>
<custom-meta id="data-availability">
<meta-name>Data Availability</meta-name>
<meta-value>All relevant data used for analysis are within the paper and its Supporting Information files. The R codes used to obtain the results presented in this paper are also included in the Supporting Information files.</meta-value>
</custom-meta>
</custom-meta-group>
</article-meta>
</front>
<body>
<sec id="sec001" sec-type="intro">
<title>Introduction</title>
<p>Humans are highly cooperative with both relatives and strangers [<xref ref-type="bibr" rid="pone.0231756.ref001">1</xref>], and the need for cooperation may have provided a powerful selection pressure behind many of the behaviours that we consider uniquely human. Guilt is an emotional and cognitive experience arising when someone feels that they did something wrong. It could have evolved due to its potential adaptive function, within social interaction, of stimulating pro-social behaviours towards and from others. Guilt is classified as a moral, self-conscious emotion, along with pride, shame and embarrassment [<xref ref-type="bibr" rid="pone.0231756.ref002">2</xref>], and is one of the most social, other-oriented emotions that people experience throughout life [<xref ref-type="bibr" rid="pone.0231756.ref003">3</xref>,<xref ref-type="bibr" rid="pone.0231756.ref004">4</xref>]. Despite a growing literature on the social consequences of feeling guilty [<xref ref-type="bibr" rid="pone.0231756.ref004">4</xref>–<xref ref-type="bibr" rid="pone.0231756.ref006">6</xref>], little is known about the behavioural mechanisms underpinning the social functions of guilt, specifically whether guilt is associated with a specific facial expression or nonverbal signal that others can recognise. If people can recognise guilt in others, this might explain how guilt can facilitate cooperation and pro-social behaviours within social interaction. People often state that they can detect a feeling of guilt in others [<xref ref-type="bibr" rid="pone.0231756.ref007">7</xref>], but with some notable exceptions [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>], to date, a facial expression of guilt has not been identified scientifically.</p>
<p>Moral emotions are thought to facilitate the complex navigation of social interactions and relationships [<xref ref-type="bibr" rid="pone.0231756.ref009">9</xref>–<xref ref-type="bibr" rid="pone.0231756.ref011">11</xref>], allowing one to consider behaviour in light of social norms and the differing perspective of others. Early accounts of guilt cast it primarily as a self-regulatory emotion prompting individuals to reflect on their mistakes and ultimately feel better about themselves [<xref ref-type="bibr" rid="pone.0231756.ref004">4</xref>]. It has been shown to have a potentially positive function within social interaction of stimulating pro-social behaviours towards and from others, promoting actions towards those who have been wronged specifically [<xref ref-type="bibr" rid="pone.0231756.ref004">4</xref>–<xref ref-type="bibr" rid="pone.0231756.ref006">6</xref>]. Experiments have shown that guilt can prompt people to specific actions towards others, such as helping behaviours [<xref ref-type="bibr" rid="pone.0231756.ref012">12</xref>] and prejudice-reducing behaviours [<xref ref-type="bibr" rid="pone.0231756.ref013">13</xref>]. The virtue of apologies and verbal admittance of wrong-doing has been well studied [<xref ref-type="bibr" rid="pone.0231756.ref014">14</xref>,<xref ref-type="bibr" rid="pone.0231756.ref015">15</xref>], revealing that people (victims of wrong-doing or by-standers) would be more lenient towards a wrong-doer that recognise their faults. For example, in legal contexts, judges and jurors claim that they know when a defendant is sorry for the crime they have committed [<xref ref-type="bibr" rid="pone.0231756.ref007">7</xref>], which can then impact on sentencing. Guilt, however, is not just a social emotion. Indeed, a <italic>Dobby Effect</italic> has been highlighted [<xref ref-type="bibr" rid="pone.0231756.ref016">16</xref>], refuting the all-social aspect of guilt, and showing that guilty people sometimes punish themselves in the absence of opportunity to make amends to the victim of their wrong-doing. The social aspect of guilt seems then linked to the context the guilty person finds themselves in: they will act pro-socially and make amends in social contexts but will engage in self-punishment when socially isolated [<xref ref-type="bibr" rid="pone.0231756.ref016">16</xref>]. Finally, guilt can be experienced automatically after committing a social transgression (self-induced), but can also be induced by others as a method of control to gain power within relationships [other induced; <xref ref-type="bibr" rid="pone.0231756.ref017">17</xref>]. Guilt can thus be a complex and powerful phenomenon within social negotiations, but whether guilt can be observed by others without being explicitly declared is unknown. If guilt can be detected in this way, the potential to affect social outcomes between individuals is increased.</p>
<p>Whether emotions (and which emotions) are associated with universally produced and recognised facial expressions is debated. The classic and largely dominant view, the Basic Emotion Theory [BET; <xref ref-type="bibr" rid="pone.0231756.ref018">18</xref>,<xref ref-type="bibr" rid="pone.0231756.ref019">19</xref>,<xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>], is that primary, basic emotions [happiness, sadness, anger, surprise, disgust and fear - <xref ref-type="bibr" rid="pone.0231756.ref019">19</xref>] are considered innate to all human populations and universally expressed [<xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>,<xref ref-type="bibr" rid="pone.0231756.ref021">21</xref>], and so likely resulting from specific functional adaptations [<xref ref-type="bibr" rid="pone.0231756.ref022">22</xref>]. In contrast, secondary emotions (of which guilt is one, along with embarrassment, shame, and contempt) are thought to differ significantly between cultures [<xref ref-type="bibr" rid="pone.0231756.ref023">23</xref>,<xref ref-type="bibr" rid="pone.0231756.ref024">24</xref>], their expressions subject to specific cultural display rules [<xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>,<xref ref-type="bibr" rid="pone.0231756.ref024">24</xref>], and acquired and developed gradually during childhood [<xref ref-type="bibr" rid="pone.0231756.ref025">25</xref>]. Secondary emotions are more idiosyncratic and context-dependent, which is why it has been difficult to identify specific facial movements associated with the experience of those emotional states. The later ontogeny led scientists to explore the possible influence of environment on the development of secondary emotions [<xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>], and through the impact of these variable environments, they are not thought to have a prototypical universal expression [<xref ref-type="bibr" rid="pone.0231756.ref026">26</xref>]. Within a Behavioural Ecological View of facial expressions [BEV; <xref ref-type="bibr" rid="pone.0231756.ref027">27</xref>,<xref ref-type="bibr" rid="pone.0231756.ref028">28</xref>], however, the distinction between primary and secondary emotions is less rigid. BEV argues that facial expressions indicate the sender’s most likely future behaviours (i.e., action tendencies) and thus function as important social signals in social interaction. Facial expressions benefit both the sender and receiver by reducing the need for conflict when interests are declared openly [<xref ref-type="bibr" rid="pone.0231756.ref001">1</xref>,<xref ref-type="bibr" rid="pone.0231756.ref017">17</xref>,<xref ref-type="bibr" rid="pone.0231756.ref027">27</xref>–<xref ref-type="bibr" rid="pone.0231756.ref030">30</xref>]. As such, both primary and secondary emotions <italic>can</italic> be associated with specific, readable, and recognisable facial signals, as it is not the emotion per se that is being transmitted, but instead the potential social action [<xref ref-type="bibr" rid="pone.0231756.ref028">28</xref>,<xref ref-type="bibr" rid="pone.0231756.ref030">30</xref>]. Therefore, if guilt is associated with a specific social outcome (e.g. making amends, increased likelihood to cooperate in the future), people could detect this from nonverbal behaviour, specifically from a facial signal with communicative value. Signals can therefore be understood as a way for an individual to manipulate or alter the behaviour of another individual [<xref ref-type="bibr" rid="pone.0231756.ref031">31</xref>–<xref ref-type="bibr" rid="pone.0231756.ref033">33</xref>]. Signals can also be used by others when deciding if and how to respond to a given situation [<xref ref-type="bibr" rid="pone.0231756.ref034">34</xref>]. The potentially important role of the face in social interactions led us to hypothesise that guilt would be associated with an identifiable facial signal (i.e., facial expression), and that non-verbal signals (i.e., self-directed behaviours) could also be present.</p>
<p>A non-verbal signal can include not only facial expressions (i.e., resulting from the contraction of specific facial muscles), but also head position, behaviours directed towards the head (e.g., touching the face or hair), body postures and gestures. Non-verbal behaviours (focussing here on facial expressions and actions directed towards the face) can be considered a signal if those behaviours are reliably associated with the experience of guilt and are accurately perceived by observers as an indication of guilt, as well as influencing the observers’ behaviours [<xref ref-type="bibr" rid="pone.0231756.ref033">33</xref>]. Here, we tried to identify non-verbal signals resulting from a specific cognitive appraisal (i.e., a situation designed to induce guilt; [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>]), occurring concomitantly with a self-reported feeling of guilt. By doing this, we are following Scherer et al. [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>]’s view that non-verbal signals can carry emotional meaning, as well as action tendencies which can both be perceived and interpreted by observers. Moreover, some researchers argue that the concept of emotion is constructed [<xref ref-type="bibr" rid="pone.0231756.ref036">36</xref>–<xref ref-type="bibr" rid="pone.0231756.ref038">38</xref>] as the result of a given experience, at a specific time, in a specific context [<xref ref-type="bibr" rid="pone.0231756.ref037">37</xref>]. As such, both theories [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>,<xref ref-type="bibr" rid="pone.0231756.ref037">37</xref>] advocate for a less direct link between non-verbal signals and emotional states than previously argued by the Basic Emotion Theory [<xref ref-type="bibr" rid="pone.0231756.ref019">19</xref>], while still expecting non-verbal signals to have potential function and meaning.</p>
<p>Some secondary emotions [e.g. shame and embarrassment; <xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>,<xref ref-type="bibr" rid="pone.0231756.ref039">39</xref>] have been associated with recognisable facial movements, but these emotions are often confused with each other. Guilt can also be mistaken or mislabelled as shame, and sometimes embarrassment, and research has tried to differentiate between those, not only in terms of the psychological meaning but also in terms of the behavioural signal [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>,<xref ref-type="bibr" rid="pone.0231756.ref025">25</xref>,<xref ref-type="bibr" rid="pone.0231756.ref040">40</xref>]. The specific social context in which the facial expression is placed therefore can be important in the interpretation of these expressions [<xref ref-type="bibr" rid="pone.0231756.ref041">41</xref>,<xref ref-type="bibr" rid="pone.0231756.ref042">42</xref>]. Nevertheless, there must be some key physical elements to such expressions that underpin their recognition to make them in some way identifiable to others.</p>
<p>For instance, the action tendencies of shame, embarrassment and guilt, are rather different, and may thus manifest as physical differences in a behavioural signal. Behavioural responses to embarrassment and shame have been identified over the years [<xref ref-type="bibr" rid="pone.0231756.ref043">43</xref>–<xref ref-type="bibr" rid="pone.0231756.ref045">45</xref>]: embarrassment displays are marked by gaze down, controlled smiles, gaze shifts, and face touches [<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>], whereas a shameful display is marked with head and gaze down [<xref ref-type="bibr" rid="pone.0231756.ref043">43</xref>–<xref ref-type="bibr" rid="pone.0231756.ref045">45</xref>]. Embarrassment serves a reconciliatory and appeasement function, reconciling in social relations following transgressions [see <xref ref-type="bibr" rid="pone.0231756.ref046">46</xref> for review], whereas shame serves a reconciliatory and appeasement function following hierarchical transgressions. In contrast, a facial expression of guilt has not been clearly described. Guilt may have evolved in humans due to the value in indicating one’s willingness to make amends. Only one study has tried to identify a recognisable set of facial movements associated with the experience of guilt [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>]. Three potential displays of guilt were presented on still photographs: a facial expression representing self-contempt, which has been shown to be associated with the experience of guilt [<xref ref-type="bibr" rid="pone.0231756.ref047">47</xref>]; a non-verbal display of sympathy [<xref ref-type="bibr" rid="pone.0231756.ref048">48</xref>], which could be part of the experience of guilt; and finally a facial expression of pain, considered as one antecedent of guilt [<xref ref-type="bibr" rid="pone.0231756.ref049">49</xref>]. Following the presentation of the still photographs, participants in this study had to select one emotion word among 14 different options (including a “no emotion” option). None of these conceptualised displays of guilt were identified as such by observers [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>]. The authors speculated that participants may have struggled with identifying fixed displays compared to spontaneous dynamic stimuli of the same emotions [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>,<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>]. This study motivated us to try a new methodology, with a bottom-up approach to try inducing guilt in the laboratory to collect spontaneous dynamic displays associated with the experience of guilt that we could then present to naïve observers.</p>
<sec id="sec002">
<title>Present investigation</title>
<p>Here, we examined variation in both the production and perception of the specific facial movements associated with guilt in a culturally diverse sample including participants with different geographic backgrounds, recruiting people from WEIRD and non-WEIRD countries [Western, Educated, Industrialised, Rich and Democratic societies; <xref ref-type="bibr" rid="pone.0231756.ref050">50</xref>]. We examined the production and perception of spontaneous facial expressions using a bottom-up approach to identify dynamic patterns in facial behaviour, departing from the classic method of coding the apex of an expression or movements of interest only [<xref ref-type="bibr" rid="pone.0231756.ref051">51</xref>,<xref ref-type="bibr" rid="pone.0231756.ref052">52</xref>]. We looked at the production of facial movements in individuals currently living in the UK but belonging to different cultures and originating from different countries to assess overall patterns produced in association with guilt and gain general knowledge, regardless of the origin of individuals. Firstly, we identified facial movements based on what people displayed when experiencing guilt. Secondly, we identified facial movements based on what people perceived as guilt. This study looked at the production of a facial expression of guilt using for the first time an experimental induction approach and an extensive dynamic facial movement coding system.</p>
</sec>
</sec>
<sec id="sec003">
<title>Study 1 –production of guilt</title>
<sec id="sec004" sec-type="materials|methods">
<title>Methods</title>
<sec id="sec005">
<title>Participants</title>
<p>One hundred and thirty-one participants took part in this study (94 females; <italic>M</italic><sub><italic>age</italic></sub> = 25.41, <italic>SD</italic> = 9.47; see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> <italic>of Table 1</italic> in <italic>Supplementary Materials</italic> for details). Participants were recruited based on an opportunistic sampling method and were all UK resident at the time of the experiment (but included both UK and non-UK nationals). All of them received either course credit (if student) or £5 for their time. The whole experiment lasted 45 minutes on average. Participants had various ethnicities and nationalities, constituting a sample made of individuals with various Places of Origin [PoO—see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 1 for details; <xref ref-type="bibr" rid="pone.0231756.ref050">50</xref>]. The project has been reviewed and approved by the Science Faculty Ethics Committee (SFEC) from the University of Portsmouth. Each participant signed an informed consent form granting authorisation for the use of the data for research purposes. The individuals pictured in in this manuscript (<xref ref-type="fig" rid="pone.0231756.g002">Fig 2</xref> and <xref ref-type="supplementary-material" rid="pone.0231756.s004">S1 Video</xref>) have provided written informed consent (as outlined in PLOS consent form) to publish their image alongside the manuscript.</p>
</sec>
<sec id="sec006">
<title>General procedure</title>
<p>To begin, participants were given general instructions regarding the experiment and written consent was obtained. Participants were originally told that this study had a different aim—to assess how personality affects behaviour and facial expressions. Following these instructions, the rest of the tasks were displayed on a computer using the OpenSesame© software [<xref ref-type="bibr" rid="pone.0231756.ref053">53</xref>], and the participant was filmed for the remaining time (using a JVC Everio GZ-MG750, 25 frames/second, placed approximately 50 cm away from their face). The experiment consisted of 5 key steps, as outlined in <xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref> and explained in more detail below. Participants were fully debriefed at the end of the experiment.</p>
<fig id="pone.0231756.g001" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.g001</object-id>
<label>Fig 1</label>
<caption>
<title>General procedure.</title>
<p>A flowchart representing the procedure of the experiment in Study 1.</p>
</caption>
<graphic mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.g001" xlink:type="simple"/>
</fig>
<p>First (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 1), participants were required to complete two personality questionnaires, the Ten-Item Personality Index [TIPI; <xref ref-type="bibr" rid="pone.0231756.ref054">54</xref>] and the Dirty Dozen [DD; <xref ref-type="bibr" rid="pone.0231756.ref055">55</xref>], followed by a mood-check questionnaire [Positive and Negative Affect Scales—PANAS; <xref ref-type="bibr" rid="pone.0231756.ref056">56</xref>]. Question order was randomised between individuals. Personality questionnaires were used as part of our cover-up story (examining the impact of personality on behaviours and facial expressions); to investigate whether main personality traits correlated with self-reported guilt (see Supplementary Material for the results). Second (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 2), participants were prompted to pose six emotional facial expressions (anger, fear, disgust, happiness, surprise, and sadness) in succession and hold each of them for 8 seconds. This acted as a filler task to disassociate the participants from the previous questionnaires. We used a video camera to record facial expressions of emotional states produced in this task. The experimenter then pretended to turn off the camera, but in reality, kept recording the rest of the experiment. Third (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 3), participants were asked to recall an autobiographical event, and write about this in some detail. Participants were prompted to either recall an event where they felt guilty regarding something they did for a relative (guilt condition), or an event where they felt proud (control condition). Participants were randomly assigned to a single condition (guilt or control). This third task was used as both a priming task to start inducing either guilt or pride in participants [as used in previous research; <xref ref-type="bibr" rid="pone.0231756.ref057">57</xref>], and was a necessary component of the following induction task. Fourth (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 4), in participants who were assigned to the guilt condition, and recalled a guilty autobiographical event, guilt was induced further experimentally. Here, the experimenter asked the participant to save their written recall on a USB flash drive. Once returned to the experimenter, the participant was informed that the USB flash drive had become corrupt, and their data, among all the other data of other participants in the study, had been lost. During this social interaction between the experimenter and participant, it was clearly implied that it was the participant’s fault. They were told that this would be reported to the Principal Investigator and that there was nothing to be done at present. They were finally invited to resume the experiment. In the control condition, guilt was not induced, and participants were told that their written recall was saved correctly on the USB flash-drive and were asked to continue. Finally, (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 5), participants completed a second PANAS questionnaire, after which they were informed about the true aim of the experiment, told that the camera had kept on recording and debriefed.</p>
<p>As mentioned, previous research used autobiographical recall as an induction task, relying on the fact that remembering a previous emotional state can elicit said emotion again [<xref ref-type="bibr" rid="pone.0231756.ref057">57</xref>]. Using this methodology, group differences have been found between guilt-recall and control-recall [<xref ref-type="bibr" rid="pone.0231756.ref006">6</xref>,<xref ref-type="bibr" rid="pone.0231756.ref058">58</xref>]. However, inducing an emotional state in the present is more ecologically valid and might standardise the feelings of guilt across participants to a greater extent [<xref ref-type="bibr" rid="pone.0231756.ref057">57</xref>]. Building on previous research [<xref ref-type="bibr" rid="pone.0231756.ref005">5</xref>], we therefore decided to use the autobiographical recall as a priming task, to get participants to start thinking about one of two emotions (pride or guilt), and then the experimental induction as a standardised induction of state guilt (i.e., feelings of guilt). We chose a positive secondary emotion for the control condition to make participants think about the recalled event in both conditions. We chose pride for the control condition as both pride and guilt are categorised as secondary emotions [<xref ref-type="bibr" rid="pone.0231756.ref002">2</xref>], presenting similar levels of arousal but opposite valence [pride being positive whereas guilt is negative; <xref ref-type="bibr" rid="pone.0231756.ref002">2</xref>]. Asking participants to recall an event of their day (e.g., their breakfast) did not seem as strenuous or emotional as recalling a time they felt guilt for something they did. The analysis of the autobiographical recalls is not presented here, but the length of the recalls was similar in the guilt (<italic>M</italic> = 147.4 words; SD = 99.36) and pride (<italic>M</italic> = 136.5 words; SD = 79.54) conditions (<italic>p</italic> = 0.5). This made us confident that the involvement in writing the recalls was similar between the conditions.</p>
</sec>
<sec id="sec007">
<title>Coding of facial movements</title>
<p>Videos collected during the experiment were first cropped to extract the relevant moment only: the induction task (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 4); and were coded for facial movements using the Facial Action Coding System [FACS; <xref ref-type="bibr" rid="pone.0231756.ref059">59</xref>,<xref ref-type="bibr" rid="pone.0231756.ref060">60</xref>]. Self-directed behaviours (face and neck touch) were also coded due to their direct links to negative affective states [<xref ref-type="bibr" rid="pone.0231756.ref061">61</xref>]. All facial movements produced by the participant during the induction task (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>: 4) were coded for Action Units (single muscle movements; AUs) or Action Descriptors (one or more unspecified muscle movements; ADs), in both the guilt (average duration = 73.66 sec; <italic>SD</italic> = 46.56 sec; see Video1) and the control (average duration = 7.90 sec; <italic>SD</italic> = 4.27 sec) conditions. The bootstrapping approach chosen for statistical analyses (detailed below) accounts for the fact that the responses in the two conditions differ in duration. Videos of posed emotional facial expressions were FACS coded and used for the reliability, but were not analysed further. A full list of coded movements, defined by the FACS, can be found in <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 2. A heat map presenting AUs produced through time is presented in <xref ref-type="supplementary-material" rid="pone.0231756.s001">S1 Fig</xref>. Blushing was originally part of the ethogram but as no occurrence was observed, it was removed from further analysis. Coding was conducted on each frame of the videos by the first author. During coding, the first author was blind to the conditions.</p>
<p>For every participant, we obtained the total number of frames of different AU/ADs produced (i.e., the number of instances, from start to end, for each given AU/AD) in a given condition as well as the overall duration (i.e., the total time the AU/AD was expressed on a face) an AU/AD was produced for [<xref ref-type="bibr" rid="pone.0231756.ref062">62</xref>]. We were also able to extract temporal data, giving us the state of each AU/AD at a given frame in the video (absent, present at small intensity or present at high intensity; 25 frames per second). All coding was conducted using the Interact© software [<xref ref-type="bibr" rid="pone.0231756.ref063">63</xref>].</p>
<p>We used a binomial exact test as criteria for exclusion of specific facial muscles from subsequent analyses—if any AU/AD was produced by fewer participants than the calculated criteria (here, the criteria given by the binomial test was a minimum of 39 participants in both conditions), this AU/AD was not explored further in an attempt to maintain a robust dataset. The binomial exact test allowed us to keep facial movements produced significantly more than chance: if at least 39 participants produced the movement, then this movement reliably occurs across participants and did not result from individual differences. Based on previous literature associating the experience of guilt with the experience of self-contempt, and pain [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>] and the common confusion between shame and guilt [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>,<xref ref-type="bibr" rid="pone.0231756.ref025">25</xref>,<xref ref-type="bibr" rid="pone.0231756.ref040">40</xref>], we also kept AUs previously shown to be associated with shame [AUs 54+64; <xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>] and self-directed behaviours, previously associated with stress/pain and discomfort (neck and face touching). After the binomial test, 24AU/ADs out of a possible 39 observed in our data (see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 2 and <xref ref-type="supplementary-material" rid="pone.0231756.s001">S1 Fig</xref>) and 117,781 frames were left in the guilt condition (12,472 frames in the control condition) for further analysis.</p>
<p>We conducted inter-rater reliability testing between the main coder (first author) and a second coder, both of which are trained FACS coders). Reliability analysis on these 15 AUs was conducted on 5% of the video clips extracted from the videos collected during the study (42 of 820 videos, half of which were from the posed facial expression task and half from spontaneous facial expressions during the induction task, from both control and guilt conditions). Reliability analysis is important for FACS coding to ensure that the coding is unbiased, and all the produced movements were observed and reported by the main coder. For analysis, we calculated the Krippendorff’s alpha [<xref ref-type="bibr" rid="pone.0231756.ref064">64</xref>,<xref ref-type="bibr" rid="pone.0231756.ref065">65</xref>] using the “KAlpha” macro for use with IBM SPSS version 24 [<xref ref-type="bibr" rid="pone.0231756.ref066">66</xref>]. Krippendorff's alpha coefficients are considered reliable if the 95% confidence was greater than chance (i.e., if the lower bound was &gt;0). According to this index, the reliability coefficient was significantly greater than chance (α = 0.740; K-α 95% LCI: 0.684; K-α 95% UCI: 0.788), indicating that the two FACS coders shared a good reliability in their coding judgements given the coding scheme used here (full FACS coding, with duration and intensity). This K’s alpha is higher than the lowest acceptable limit (α ≥ 0.667) but is under the customary required benchmark (α ≥ 0.800); our results should thus be interpreted with caution and provide preliminary results regarding facial movements associated with the experience of guilt [<xref ref-type="bibr" rid="pone.0231756.ref067">67</xref>,<xref ref-type="bibr" rid="pone.0231756.ref068">68</xref>].</p>
</sec>
<sec id="sec008">
<title>Statistical analysis</title>
<p><italic>Guilt induction</italic>. To test for the success of the induction of guilt during the guilt induction task, we compared the affect data collected through the PANAS questionnaires (before vs. after induction) using a within-subjects <italic>t</italic>-test. We tested for a change in positive and negative affect before vs. after induction, and additionally, some specific emotional changes in guilt, shame, distress, and pride, which were all measured in the PANAS questionnaire.</p>
<p><italic>Facial expressions</italic>. The likelihood of an action unit to be active during any communication event is likely influenced by a several interdependent factors. Among those, we can find the information that is transmitted and the context; inter-individual and cultural differences; temporal effects; the intensity of stimuli; random variation in expression; duration of expressions; interdependence in the co-occurrence of action units; and anatomical limits in which action units can be used at the same time. Furthermore, the likelihood of occurrence of any action unit is interdependent from the likelihood of using a certain number of action units at the same time. Statistical approaches that are often used when analysing FACS data make assumptions about the distribution of the underlying data (continuous variables, independence of cases) that are rarely met in facial expression datasets. Rather than testing whether the distributions of action units differ in samples within the confines of existing variance tests, researchers can use permutation and bootstrapping procedures that allow for controlling some of the aforementioned factors and provide statistically accurate measures of significance [<xref ref-type="bibr" rid="pone.0231756.ref069">69</xref>]. Here, we employ a bootstrapping approach to test whether action units differ between the experimental conditions (guilt or control) of this study.</p>
<p>The FACS coding information of the participant videos was structured frame-by-frame (as it was coded), with each selected action unit representing one column and their presence or absence coded as 1 or 0, respectively. Retaining the frame information means that facial expressions that are shown for longer influence the results accordingly. Frames in which it was not possible to see the whole face were removed. Frames presented between 0 and 15 of the selected action units (see AU selection above) active at the same time. All statistical tests here are direct comparisons of two distributions, to see whether they stem from the same or different underlying populations: a control distribution (e.g., control condition) and a test distribution (e.g., guilt condition), with the question invariably being whether the frequency of occurrence of any given action unit differs between the former and latter.</p>
<p>We applied a bootstrapping procedure to create the probability distribution of the occurrence of each action unit under the null hypothesis that they are from the same distribution as the control condition: by repeatedly taking random subsets of the control data, we establish a range of values the frequency of occurrence of an action unit could take if it was drawn from this population. We randomly selected individuals in the control condition [sampling with replacement approach; <xref ref-type="bibr" rid="pone.0231756.ref069">69</xref>] to account for the fact that there might be inter-individual differences in expressivity or use of action units. Thus, each individual in the control condition was sometimes included and sometimes excluded in generating the control distribution, ascertaining that the distribution was not skewed due to the properties of certain individuals. Cultural differences in the use of facial expressions [<xref ref-type="bibr" rid="pone.0231756.ref070">70</xref>–<xref ref-type="bibr" rid="pone.0231756.ref072">72</xref>] and the self-report of emotions [<xref ref-type="bibr" rid="pone.0231756.ref073">73</xref>,<xref ref-type="bibr" rid="pone.0231756.ref074">74</xref>] might exist and data might thus be following a hidden structure due to participants' PoO. Participants were clustered into two regions for PoO: European and East Asian. We balanced the assignment of individuals to the control distribution. We established the ratio of PoOs of participants in the test dataset and applied the same ratio to the control distribution. Thus, if for example the test dataset included ten participants who reported East Asian origins and five participants of European origins, then each randomised control dataset would maintain the 2-to-1 ratio between the two groups.</p>
<p>Using this procedure, we created 1000 bootstrapped control distributions for each statistical test that have the appropriate underlying data structure and address potential problems arising from inter-individual and cultural differences. We established the frequency of occurrence for each action unit for the test data (observed frequency) and for each action unit over all 1000 bootstraps (expected frequency if the data would arise from the same population). To test whether the frequency of occurrence was significantly higher or lower than expected, we report the z-value of the observed frequency compared to the control distribution (i.e., how many standard deviations does it differ from the mean). We assumed that the null hypothesis (the observed value for the test data is part of the same distribution that created the control condition) was rejected if the observed value was more extreme than 99% of bootstrapped values (two-sided testing). The p-value represents the likelihood of the observed frequency of an action unit in the test condition being lower or higher than the expected frequency of each bootstrap. We set our significance level at 0.01 to account for multiple testing while avoiding false rejections [<xref ref-type="bibr" rid="pone.0231756.ref075">75</xref>]. A p-value of 0.01 and a positive z-value indicates that in 990 out of 1000 bootstrapped selections of the control data, the action unit occurred less frequently than in the test data.</p>
<p>We tested four questions using this approach: first, we tested the overall difference between the control condition and the guilt condition of the experiment, to see whether there were differences in the facial expressions between experimental interventions. However, there were considerable differences between individuals in their reported feeling of guilt before and after the intervention in the guilt condition, with some individuals not reporting an increase in guilt (<italic>self-reported guilt after induction—self-reported guilt before induction ≤ 0</italic>). Thus, secondly, we investigated individuals who did not show any change in reported guilt (‘weak guilt’ sample, N = 19) and individuals who showed an increase in reported guilt (‘strong guilt’ sample, N = 45) separately to test whether these differences in reported guilt also showed in the facial activity. We tested both datasets against the control dataset (to establish whether guilt induction worked in the former group), and we finally tested the two guilt samples against each other to see if stronger reported guilt led to increased production of some action units. As with the bootstrapping approach we are testing whether the distribution of each AU in the weak guilt sample and the strong guilt sample could stem from the control group, having more participants reporting an increased feeling of guilt will not impact the analysis conducted.</p>
</sec>
</sec>
<sec id="sec009" sec-type="results">
<title>Results</title>
<sec id="sec010">
<title>Guilt induction</title>
<p>In our guilt condition, participants reported more negative affect after (<italic>M</italic> = 21.89, <italic>SD</italic> = 8.23) the guilt induction task (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>.1:4) compared to before (<italic>M</italic> = 18.61, <italic>SD</italic> = 8.56; t(65) = -2.68, p &lt; 0.001). They also experienced a decrease in positive affect after the induction (<italic>M</italic> = 20.27, <italic>SD</italic> = 8.08) compared to before (<italic>M</italic> = 29.73, <italic>SD</italic> = 6.12; t(65) = 9.02, p &lt; 0.001).</p>
<p>More specifically, we found an increase in guilty feelings after the induction task (<italic>M</italic> = 2.7, <italic>SD</italic> = 1.23) compared to before (<italic>M</italic> = 1.35, <italic>SD</italic> = 0.79; t() = -8.31, p &lt; 0.001; see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 3). Participants also reported higher levels of shame after the induction (<italic>M</italic> = 2.24, <italic>SD</italic> = 1.12) compared to before the interaction with the experimenter (<italic>M</italic> = 1.33, <italic>SD</italic> = 0.83; t(65) = -5.91, p &lt; 0.001). Participants reported a significantly higher level of guilt than shame after the induction task (t(65) = -3.00, p = 0.0038).</p>
<p>Finally, participants reported an increase in distress after (<italic>M</italic> = 2.42, <italic>SD</italic> = 1.15) the induction task compared to before (<italic>M</italic> = 1.58, <italic>SD</italic> = 0.95; t(65) = -5.29, p &lt; 0.001), as well as a decreased pride after (<italic>M</italic> = 1.89, <italic>SD</italic> = 1.89) the induction compared to before (<italic>M</italic> = 2.35, <italic>SD</italic> = 1.06; t(65) = 3.84, p &lt; 0.001).</p>
<p>In the control condition, participants reported less positive affect after the interaction with the researcher (<italic>M</italic> = 25.18, <italic>SD</italic> = 11.06) compared to before (<italic>M</italic> = 30.46, <italic>SD</italic> = 8.47; t(64) = 4.11, p &lt; 0.001), but they also experienced a decrease in negative affect after the induction (<italic>M</italic> = 12.88, <italic>SD</italic> = 4.97) compared to before (<italic>M</italic> = 21.2, <italic>SD</italic> = 10.96; t(64) = 6.44, p &lt; 0.001; see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 3).</p>
<p>More specifically, we found an decrease in distress after (<italic>M</italic> = 1.48, <italic>SD</italic> = 0.89) the induction task compared to before (<italic>M</italic> = 1.72, <italic>SD</italic> = 0.98; t(64) = 2.34, p = 0.0225), as well as increased pride after (<italic>M</italic> = 3.4, <italic>SD</italic> = 1.2) the induction compared to before (<italic>M</italic> = 2.46, <italic>SD</italic> = 1.15; t(64) = -6.78, p &lt; 0.001).</p>
<p>When comparing the affect data collected after induction between the control and the guilt conditions, participants reported higher positive affect (<italic>M</italic> = 4.82, <italic>SE</italic> = 1.65; t(65) = 2.93, p = 0.004) and pride (<italic>M</italic> = 1.26, <italic>SE</italic> = 0.20; t(65) = 6.28, p &lt; 0.001) in the control condition. Moreover, they reported lower negative affect (<italic>M</italic> = -5.72, <italic>SE</italic> = 1.33; t(65) = -4.30, p &lt; 0.001) in the control condition than in the guilt condition.</p>
<p>More specifically, they reported lower guilt (<italic>M</italic> = -1.42, <italic>SE</italic> = 0.174; t(65) = -8.34, p &lt; 0.001), distress (<italic>M</italic> = -0.52, <italic>SE</italic> = 0.19; t(65) = -2.70, p &lt; 0.001), shame (<italic>M</italic> = -0.37, <italic>SE</italic> = 0.18; t(65) = -2.17, p &lt; 0.001), and nervousness (<italic>M</italic> = -0.63, <italic>SE</italic> = 0.17; t(65) = -3.62, p &lt; 0.001) in the control condition compared to the guilt condition [<italic>means and SE presented characterise the difference between the values in the control and the values in the guilt conditions</italic>]. These results confirmed the effectiveness of the guilt induction method used; participants exposed to the guilt induction task reported higher levels of guilt and associated negative affect compared to those that were in the control condition.</p>
</sec>
<sec id="sec011">
<title>Comparison of guilt and control conditions</title>
<p>The results of the bootstrap test, creating expected distributions for action units based on the control condition and comparing those with the observed distribution of action units in the guilt condition, revealed that participants in the guilt condition exhibited facial muscle activation that was significantly different from the control condition. <xref ref-type="table" rid="pone.0231756.t001">Table 1</xref> presents the summary of the comparison for the entire guilt dataset. In the upper face, AU4 (Brow Lowerer) was more active in the guilt condition, produced more than twice as often as in the control condition. In the lower face, AU20 (Lip Stretch) was active more often than would have been predicted based on the control condition. Participants in the guilt condition turned their eyes and heads to the right (AU52 –Head Turn Right, AU62 –Eyes Turn Right) more than predicted. Most striking was the difference in the likelihood of participants to touch their neck, being almost twenty times more likely in the guilt condition than expected (see <xref ref-type="fig" rid="pone.0231756.g002">Fig 2</xref>). There was a trend for participants to touch their face more than expected. Participants in the guilt condition were significantly less likely to show activation of AU12 (Lip Corner Puller), AU14 (Dimpler), AU17 (Chin Raiser), AU51 (Head Turn Left), AU57 (Head Forward), AU61 (Eyes Turn Left), and AU64 (Eyes Down). Thus, those movements (presented in italics in <xref ref-type="table" rid="pone.0231756.t001">Table 1</xref>) were consistently more produced in the control condition and are not specific to the experience of guilt.</p>
<fig id="pone.0231756.g002" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.g002</object-id>
<label>Fig 2</label>
<caption>
<title>Images of guilty expressions taken from a video.</title>
<p>AUs 1+4+10+12+(20+)25+26+Neck Touch produced in this image; the perceived production of AU20 might be due to speech at the same time (participant apologising). <italic>The individual pictured in Fig 2 has provided written informed consent (as outlined in PLOS consent form) to publish their image alongside the manuscript</italic>.</p>
</caption>
<graphic mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.g002" xlink:type="simple"/>
</fig>
<table-wrap id="pone.0231756.t001" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.t001</object-id>
<label>Table 1</label> <caption><title>Comparison of the full dataset for the guilt condition (observed frequency) with the predicted distribution based on the control condition (expected frequency), after controlling for differences in the PoOs of participants.</title></caption>
<alternatives>
<graphic id="pone.0231756.t001g" mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.t001" xlink:type="simple"/>
<table>
<colgroup>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
</colgroup>
<thead>
<tr>
<th align="justify">AU</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="justify">1</td>
<td align="right">0.52</td>
<td align="right">0.51</td>
<td align="right">0.33</td>
<td align="right">0.357</td>
</tr>
<tr>
<td align="justify">2</td>
<td align="right">0.50</td>
<td align="right">0.50</td>
<td align="right">0.18</td>
<td align="right">0.418</td>
</tr>
<tr>
<td align="justify"><bold>4</bold></td>
<td align="right"><bold>0.19</bold></td>
<td align="right"><bold>0.08</bold></td>
<td align="right"><bold>8.82</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">5</td>
<td align="right">0.12</td>
<td align="right">0.11</td>
<td align="right">0.31</td>
<td align="right">0.381</td>
</tr>
<tr>
<td align="justify">7</td>
<td align="right">0.26</td>
<td align="right">0.32</td>
<td align="right">-1.48</td>
<td align="right">0.071</td>
</tr>
<tr>
<td align="justify">10</td>
<td align="right">0.26</td>
<td align="right">0.23</td>
<td align="right">1.12</td>
<td align="right">0.114</td>
</tr>
<tr>
<td align="justify"><italic>12</italic></td>
<td align="right"><italic>0</italic>.<italic>14</italic></td>
<td align="right"><italic>0</italic>.<italic>32</italic></td>
<td align="right"><italic>-5</italic>.<italic>78</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify"><italic>14</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>0</italic>.<italic>24</italic></td>
<td align="right"><italic>-3</italic>.<italic>66</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify"><italic>17</italic></td>
<td align="right"><italic>0</italic>.<italic>14</italic></td>
<td align="right"><italic>0</italic>.<italic>21</italic></td>
<td align="right"><italic>-3</italic>.<italic>05</italic></td>
<td align="right"><italic>0</italic>.<italic>003</italic></td>
</tr>
<tr>
<td align="justify">18</td>
<td align="right">0.13</td>
<td align="right">0.13</td>
<td align="right">0.03</td>
<td align="right">0.493</td>
</tr>
<tr>
<td align="justify"><bold>20</bold></td>
<td align="right"><bold>0.03</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>2.37</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">24</td>
<td align="right">0.15</td>
<td align="right">0.13</td>
<td align="right">0.78</td>
<td align="right">0.234</td>
</tr>
<tr>
<td align="justify"><italic>51</italic></td>
<td align="right"><italic>0</italic>.<italic>09</italic></td>
<td align="right"><italic>0</italic>.<italic>29</italic></td>
<td align="right"><italic>-5</italic>.<italic>66</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify"><bold>52</bold></td>
<td align="right"><bold>0.29</bold></td>
<td align="right"><bold>0.22</bold></td>
<td align="right"><bold>3.22</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">54</td>
<td align="right">0.15</td>
<td align="right">0.18</td>
<td align="right">-1.42</td>
<td align="right">0.072</td>
</tr>
<tr>
<td align="justify">55</td>
<td align="right">0.19</td>
<td align="right">0.23</td>
<td align="right">-1.23</td>
<td align="right">0.112</td>
</tr>
<tr>
<td align="justify">56</td>
<td align="right">0.17</td>
<td align="right">0.21</td>
<td align="right">-1.09</td>
<td align="right">0.132</td>
</tr>
<tr>
<td align="justify"><italic>57</italic></td>
<td align="right"><italic>0</italic>.<italic>16</italic></td>
<td align="right"><italic>0</italic>.<italic>25</italic></td>
<td align="right"><italic>-3</italic>.<italic>45</italic></td>
<td align="right"><italic>0</italic>.<italic>001</italic></td>
</tr>
<tr>
<td align="justify">59</td>
<td align="right">0.03</td>
<td align="right">0.03</td>
<td align="right">-1.11</td>
<td align="right">0.135</td>
</tr>
<tr>
<td align="justify"><italic>61</italic></td>
<td align="right"><italic>0</italic>.<italic>09</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>-4</italic>.<italic>97</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify"><bold>62</bold></td>
<td align="right"><bold>0.21</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>4.20</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify"><italic>64</italic></td>
<td align="right"><italic>0</italic>.<italic>30</italic></td>
<td align="right"><italic>0</italic>.<italic>39</italic></td>
<td align="right"><italic>-3</italic>.<italic>58</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify">FaceTouch</td>
<td align="right">0.13</td>
<td align="right">0.10</td>
<td align="right">1.60</td>
<td align="right">0.041</td>
</tr>
<tr>
<td align="justify"><bold>NeckTouch</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>0.00</bold></td>
<td align="right"><bold>25.69</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
</tbody>
</table>
</alternatives>
<table-wrap-foot>
<fn id="t001fn001"><p>P-values denote the likelihood that the observed frequency of occurrence for an AU was more extreme than the predicted frequency. AU with significantly increased occurrence in bold, AU with significantly reduced occurrence in italics.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>We found no significant relationship between self-reported shame or pride and AU4 (shame: ß = 3.36e-4; SE = 3.81e-4; p = 0.381; pride: ß = 3.46e-5; SE = 3.28e-4; p = 0.916), AU20 (shame: ß = -8.37e-4; SE = 1.56e-3; p = 0.593; pride: ß = 1.20e-3; SE = 1.34e-3; p = 0.376), AU52 (shame: ß = -4.11e-4;SE = 2014e-4; p = 0.210; pride: ß = 1.88e-4; SE = 2.78e-4; p = 0.502), AU62 (shame: ß = 1.04e-3; SE = 6.36e-4; p = 0.109; pride: ß = -2.58e-4; SE = 5.47e-4; p = 0.639), or Neck Touch (shame: ß = 1.37e-3; SE = 1.45e-3; p = 0.347; pride: ß = -1.37e-3; SE = 1.24e-3; p = 0.273). Thus, when participants reported high level of shame (or pride), the facial movements produced were not the ones they produced when reporting high level of guilt, confirming that the movements identified here are more specific to the experience of guilt that to the experience of shame.</p>
</sec>
<sec id="sec012">
<title>Comparison of weak guilt and strong guilt samples</title>
<p>Participants reported different levels in how much their feeling of guilt changed due to the experimental intervention, with several participants not reporting any increase in feelings of guilt. Thus, we tested whether participants who reported no increase in feeling guilty still differed significantly from the control condition, whether the observed changes were equivalent to those in participants who reported increased feelings of guilt, and how the two subgroups differed from each other.</p>
<p>As reported in <xref ref-type="table" rid="pone.0231756.t002">Table 2</xref>, while there were some differences between the participants who reported guilt and those who did not, both groups showed increased activation in AU4, AU20, AU62, and both touched their necks more than expected given the control condition. Face touching only occurred significantly more than expected in the weak guilt condition, while AU52 occurred more frequently in individuals who reported an increase in guilt after induction.</p>
<table-wrap id="pone.0231756.t002" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.t002</object-id>
<label>Table 2</label> <caption><title>Comparison of the dataset for participants who reported no change in feelings of guilt (‘weak guilt’) and the dataset for participants who reported changes in feelings of guilt (‘strong guilt’) with the predicted distribution based on the control condition (expected frequency) after controlling for differences in the PoOs of participants.</title></caption>
<alternatives>
<graphic id="pone.0231756.t002g" mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.t002" xlink:type="simple"/>
<table>
<colgroup>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
</colgroup>
<thead>
<tr>
<th align="right" colspan="5">Weak Guilt</th>
<th align="right" colspan="4">Strong Guilt</th>
</tr>
<tr>
<th align="right">AU</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="left">1</td>
<td align="right">0.55</td>
<td align="right">0.52</td>
<td align="right">0.84</td>
<td align="right">0.199</td>
<td align="right">0.51</td>
<td align="right">0.51</td>
<td align="right">-0.04</td>
<td align="right">0.472</td>
</tr>
<tr>
<td align="left">2</td>
<td align="right">0.57</td>
<td align="right">0.51</td>
<td align="right">1.43</td>
<td align="right">0.090</td>
<td align="right">0.48</td>
<td align="right">0.50</td>
<td align="right">-0.52</td>
<td align="right">0.297</td>
</tr>
<tr>
<td align="left"><bold>4</bold></td>
<td align="right"><bold>0.29</bold></td>
<td align="right"><bold>0.08</bold></td>
<td align="right"><bold>10.56</bold></td>
<td align="right"><bold>0.000</bold></td>
<td align="right"><bold>0.17</bold></td>
<td align="right"><bold>0.08</bold></td>
<td align="right"><bold>5.77</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="left"><italic>5</italic></td>
<td align="right"><italic>0</italic>.<italic>07</italic></td>
<td align="right"><italic>0</italic>.<italic>12</italic></td>
<td align="right"><italic>-2</italic>.<italic>32</italic></td>
<td align="right"><italic>0</italic>.<italic>009</italic></td>
<td align="right">0.15</td>
<td align="right">0.11</td>
<td align="right">1.53</td>
<td align="right">0.064</td>
</tr>
<tr>
<td align="left"><italic>7</italic></td>
<td align="right"><italic>0</italic>.<italic>21</italic></td>
<td align="right"><italic>0</italic>.<italic>32</italic></td>
<td align="right"><italic>-2</italic>.<italic>42</italic></td>
<td align="right"><italic>0</italic>.<italic>008</italic></td>
<td align="right">0.29</td>
<td align="right">0.31</td>
<td align="right">-0.51</td>
<td align="right">0.308</td>
</tr>
<tr>
<td align="left"><bold>10</bold></td>
<td align="right">0.20</td>
<td align="right">0.23</td>
<td align="right">-0.94</td>
<td align="right">0.177</td>
<td align="right"><bold>0.29</bold></td>
<td align="right"><bold>0.23</bold></td>
<td align="right"><bold>1.80</bold></td>
<td align="right"><bold>0.027</bold></td>
</tr>
<tr>
<td align="left"><italic>12</italic></td>
<td align="right"><italic>0</italic>.<italic>14</italic></td>
<td align="right"><italic>0</italic>.<italic>31</italic></td>
<td align="right"><italic>-4</italic>.<italic>96</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>0</italic>.<italic>32</italic></td>
<td align="right"><italic>-4</italic>.<italic>59</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="left"><italic>14</italic></td>
<td align="right"><italic>0</italic>.<italic>16</italic></td>
<td align="right"><italic>0</italic>.<italic>24</italic></td>
<td align="right"><italic>-3</italic>.<italic>00</italic></td>
<td align="right"><italic>0</italic>.<italic>001</italic></td>
<td align="right"><italic>0</italic>.<italic>16</italic></td>
<td align="right"><italic>0</italic>.<italic>24</italic></td>
<td align="right"><italic>-2</italic>.<italic>83</italic></td>
<td align="right"><italic>0</italic>.<italic>002</italic></td>
</tr>
<tr>
<td align="left"><italic>17</italic></td>
<td align="right"><italic>0</italic>.<italic>12</italic></td>
<td align="right"><italic>0</italic>.<italic>21</italic></td>
<td align="right"><italic>-3</italic>.<italic>56</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>0</italic>.<italic>21</italic></td>
<td align="right"><italic>-2</italic>.<italic>10</italic></td>
<td align="right"><italic>0</italic>.<italic>016</italic></td>
</tr>
<tr>
<td align="left">18</td>
<td align="right">0.15</td>
<td align="right">0.13</td>
<td align="right">0.51</td>
<td align="right">0.307</td>
<td align="right">0.13</td>
<td align="right">0.13</td>
<td align="right">-0.15</td>
<td align="right">0.442</td>
</tr>
<tr>
<td align="left"><bold>20</bold></td>
<td align="right"><bold>0.04</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>2.38</bold></td>
<td align="right"><bold>0.000</bold></td>
<td align="right"><bold>0.03</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>1.76</bold></td>
<td align="right"><bold>0.008</bold></td>
</tr>
<tr>
<td align="left">24</td>
<td align="right">0.13</td>
<td align="right">0.14</td>
<td align="right">-0.55</td>
<td align="right">0.290</td>
<td align="right">0.16</td>
<td align="right">0.14</td>
<td align="right">1.15</td>
<td align="right">0.128</td>
</tr>
<tr>
<td align="left"><italic>51</italic></td>
<td align="right"><italic>0</italic>.<italic>11</italic></td>
<td align="right"><italic>0</italic>.<italic>31</italic></td>
<td align="right"><italic>-5</italic>.<italic>22</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
<td align="right"><italic>0</italic>.<italic>09</italic></td>
<td align="right"><italic>0</italic>.<italic>29</italic></td>
<td align="right"><italic>-4</italic>.<italic>68</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="left"><bold>52</bold></td>
<td align="right">0.25</td>
<td align="right">0.21</td>
<td align="right">1.49</td>
<td align="right">0.057</td>
<td align="right"><bold>0.32</bold></td>
<td align="right"><bold>0.22</bold></td>
<td align="right"><bold>3.16</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="left">54</td>
<td align="right">0.17</td>
<td align="right">0.18</td>
<td align="right">-0.30</td>
<td align="right">0.387</td>
<td align="right">0.15</td>
<td align="right">0.19</td>
<td align="right">-1.60</td>
<td align="right">0.054</td>
</tr>
<tr>
<td align="left">55</td>
<td align="right">0.20</td>
<td align="right">0.23</td>
<td align="right">-0.88</td>
<td align="right">0.188</td>
<td align="right">0.19</td>
<td align="right">0.23</td>
<td align="right">-1.05</td>
<td align="right">0.151</td>
</tr>
<tr>
<td align="left">56</td>
<td align="right">0.20</td>
<td align="right">0.21</td>
<td align="right">-0.23</td>
<td align="right">0.412</td>
<td align="right">0.16</td>
<td align="right">0.21</td>
<td align="right">-1.29</td>
<td align="right">0.094</td>
</tr>
<tr>
<td align="left"><italic>57</italic></td>
<td align="right"><italic>0</italic>.<italic>13</italic></td>
<td align="right"><italic>0</italic>.<italic>27</italic></td>
<td align="right"><italic>-4</italic>.<italic>59</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
<td align="right"><italic>0</italic>.<italic>19</italic></td>
<td align="right"><italic>0</italic>.<italic>25</italic></td>
<td align="right"><italic>-2</italic>.<italic>12</italic></td>
<td align="right"><italic>0</italic>.<italic>016</italic></td>
</tr>
<tr>
<td align="left"><italic>59</italic></td>
<td align="right"><italic>0</italic>.<italic>02</italic></td>
<td align="right"><italic>0</italic>.<italic>04</italic></td>
<td align="right"><italic>-2</italic>.<italic>88</italic></td>
<td align="right"><italic>0</italic>.<italic>002</italic></td>
<td align="right">0.04</td>
<td align="right">0.04</td>
<td align="right">0.08</td>
<td align="right">0.493</td>
</tr>
<tr>
<td align="left"><italic>61</italic></td>
<td align="right"><italic>0</italic>.<italic>06</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>-6</italic>.<italic>37</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
<td align="right"><italic>0</italic>.<italic>11</italic></td>
<td align="right"><italic>0</italic>.<italic>15</italic></td>
<td align="right"><italic>-2</italic>.<italic>87</italic></td>
<td align="right"><italic>0</italic>.<italic>003</italic></td>
</tr>
<tr>
<td align="left"><bold>62</bold></td>
<td align="right"><bold>0.24</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>5.31</bold></td>
<td align="right"><bold>0.000</bold></td>
<td align="right"><bold>0.20</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>2.52</bold></td>
<td align="right"><bold>0.003</bold></td>
</tr>
<tr>
<td align="left"><italic>64</italic></td>
<td align="right"><italic>0</italic>.<italic>30</italic></td>
<td align="right"><italic>0</italic>.<italic>37</italic></td>
<td align="right"><italic>-2</italic>.<italic>32</italic></td>
<td align="right"><italic>0</italic>.<italic>015</italic></td>
<td align="right"><italic>0</italic>.<italic>30</italic></td>
<td align="right"><italic>0</italic>.<italic>39</italic></td>
<td align="right"><italic>-2</italic>.<italic>95</italic></td>
<td align="right"><italic>0</italic>.<italic>001</italic></td>
</tr>
<tr>
<td align="left"><bold>FaceTouch</bold></td>
<td align="right"><bold>0.16</bold></td>
<td align="right"><bold>0.11</bold></td>
<td align="right"><bold>2.25</bold></td>
<td align="right"><bold>0.009</bold></td>
<td align="right">0.12</td>
<td align="right">0.10</td>
<td align="right">0.86</td>
<td align="right">0.214</td>
</tr>
<tr>
<td align="left"><bold>NeckTouch</bold></td>
<td align="right"><bold>0.01</bold></td>
<td align="right"><bold>0.00</bold></td>
<td align="right"><bold>10.15</bold></td>
<td align="right"><bold>0.000</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>0.00</bold></td>
<td align="right"><bold>26.69</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
</tbody>
</table>
</alternatives>
<table-wrap-foot>
<fn id="t002fn001"><p>P-values denote the likelihood that the observed frequency of occurrence for an AU was more extreme than the predicted frequency. AU with significantly increased occurrence in bold, AU with reduced occurrence in italics.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>In the direct comparison between the two groups (individuals who expressed changes in feeling of guilt and those who did not; <xref ref-type="table" rid="pone.0231756.t003">Table 3</xref>), individuals who reported strong feelings of guilt were more likely than expected to show activation of AU5 (Upper Lid Raise), AU10 (Upper Lip Raiser), AU 57, AU59 (up-down head shake—nodding) and AU61 (corroborating the results of comparing each of the two with the control condition–<xref ref-type="table" rid="pone.0231756.t002">Table 2</xref>). AU5 was reduced in participants who did report weak guilt compared to control (<xref ref-type="table" rid="pone.0231756.t002">Table 2</xref>). Activation of AU61 was reduced in both subgroups of the guilt condition (compared to control; <xref ref-type="table" rid="pone.0231756.t003">Table 3</xref>) but was significantly more likely to occur in participants who reported strong guilt. AU57 and AU59 also occurred more in the strong guilt condition (<xref ref-type="table" rid="pone.0231756.t003">Table 3</xref>). No action unit occurred significantly more in the weak guilt than in the strong guilt condition: the action units identified were thus associated with feelings of self-reported guilt.</p>
<table-wrap id="pone.0231756.t003" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.t003</object-id>
<label>Table 3</label> <caption><title>Comparison of the dataset for participant who reported changes in feelings of guilt (observed frequency) with the predicted distribution based on participants who reported no changes in feelings of guilt (expected frequency) after controlling for differences in the PoOs of participants.</title></caption>
<alternatives>
<graphic id="pone.0231756.t003g" mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.t003" xlink:type="simple"/>
<table>
<colgroup>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
</colgroup>
<thead>
<tr>
<th align="justify">AU</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="justify">1</td>
<td align="right">0.51</td>
<td align="right">0.54</td>
<td align="right">-0.58</td>
<td align="right">0.261</td>
</tr>
<tr>
<td align="justify">2</td>
<td align="right">0.48</td>
<td align="right">0.56</td>
<td align="right">-1.54</td>
<td align="right">0.071</td>
</tr>
<tr>
<td align="justify">4</td>
<td align="right">0.17</td>
<td align="right">0.24</td>
<td align="right">-1.56</td>
<td align="right">0.058</td>
</tr>
<tr>
<td align="justify"><bold>5</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>0.08</bold></td>
<td align="right"><bold>4.34</bold></td>
<td align="right"><bold>0.001</bold></td>
</tr>
<tr>
<td align="justify">7</td>
<td align="right">0.29</td>
<td align="right">0.21</td>
<td align="right">1.25</td>
<td align="right">0.103</td>
</tr>
<tr>
<td align="justify"><bold>10</bold></td>
<td align="right"><bold>0.29</bold></td>
<td align="right"><bold>0.19</bold></td>
<td align="right"><bold>2.63</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">12</td>
<td align="right">0.15</td>
<td align="right">0.15</td>
<td align="right">0.03</td>
<td align="right">0.490</td>
</tr>
<tr>
<td align="justify">14</td>
<td align="right">0.16</td>
<td align="right">0.15</td>
<td align="right">0.13</td>
<td align="right">0.467</td>
</tr>
<tr>
<td align="justify">17</td>
<td align="right">0.15</td>
<td align="right">0.14</td>
<td align="right">0.49</td>
<td align="right">0.282</td>
</tr>
<tr>
<td align="justify">18</td>
<td align="right">0.13</td>
<td align="right">0.15</td>
<td align="right">-0.73</td>
<td align="right">0.228</td>
</tr>
<tr>
<td align="justify">20</td>
<td align="right">0.03</td>
<td align="right">0.04</td>
<td align="right">-0.25</td>
<td align="right">0.380</td>
</tr>
<tr>
<td align="justify">24</td>
<td align="right">0.16</td>
<td align="right">0.13</td>
<td align="right">0.95</td>
<td align="right">0.181</td>
</tr>
<tr>
<td align="justify">51</td>
<td align="right">0.09</td>
<td align="right">0.10</td>
<td align="right">-0.30</td>
<td align="right">0.393</td>
</tr>
<tr>
<td align="justify">52</td>
<td align="right">0.32</td>
<td align="right">0.25</td>
<td align="right">1.50</td>
<td align="right">0.066</td>
</tr>
<tr>
<td align="justify">54</td>
<td align="right">0.15</td>
<td align="right">0.17</td>
<td align="right">-0.74</td>
<td align="right">0.255</td>
</tr>
<tr>
<td align="justify">55</td>
<td align="right">0.19</td>
<td align="right">0.20</td>
<td align="right">-0.17</td>
<td align="right">0.434</td>
</tr>
<tr>
<td align="justify">56</td>
<td align="right">0.16</td>
<td align="right">0.20</td>
<td align="right">-0.51</td>
<td align="right">0.402</td>
</tr>
<tr>
<td align="justify"><bold>57</bold></td>
<td align="right"><bold>0.19</bold></td>
<td align="right"><bold>0.13</bold></td>
<td align="right"><bold>2.57</bold></td>
<td align="right"><bold>0.002</bold></td>
</tr>
<tr>
<td align="justify"><bold>59</bold></td>
<td align="right"><bold>0.04</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>2.39</bold></td>
<td align="right"><bold>0.004</bold></td>
</tr>
<tr>
<td align="justify"><bold>61</bold></td>
<td align="right"><bold>0.11</bold></td>
<td align="right"><bold>0.07</bold></td>
<td align="right"><bold>2.18</bold></td>
<td align="right"><bold>0.009</bold></td>
</tr>
<tr>
<td align="justify">62</td>
<td align="right">0.20</td>
<td align="right">0.24</td>
<td align="right">-1.36</td>
<td align="right">0.079</td>
</tr>
<tr>
<td align="justify">64</td>
<td align="right">0.30</td>
<td align="right">0.30</td>
<td align="right">-0.17</td>
<td align="right">0.392</td>
</tr>
<tr>
<td align="justify">FaceTouch</td>
<td align="right">0.12</td>
<td align="right">0.16</td>
<td align="right">-0.75</td>
<td align="right">0.214</td>
</tr>
<tr>
<td align="justify">NeckTouch</td>
<td align="right">0.02</td>
<td align="right">0.01</td>
<td align="right">1.47</td>
<td align="right">0.065</td>
</tr>
</tbody>
</table>
</alternatives>
<table-wrap-foot>
<fn id="t003fn001"><p>P-values denote the likelihood that the observed frequency of occurrence for an AU was more extreme than the predicted frequency. AU with significantly increased occurrence in bold, AU with significantly decreased occurrence in italics.</p></fn>
</table-wrap-foot>
</table-wrap>
<p>Thus, in summary, there is strong evidence that AU4, AU20, AU52, and AU62, as well as the touching of the neck, were significantly produced more frequently in all participants in the guilt condition (<xref ref-type="table" rid="pone.0231756.t001">Table 1</xref>).</p>
</sec>
</sec>
<sec id="sec013" sec-type="conclusions">
<title>Discussion</title>
<p>This first study aimed at eliciting an emotional response associated with the experience of guilt. We identified a pattern of facial movements produced more when experiencing guilt and reporting higher levels of guilt; people frowned (AU4, Brow Lowerer), stretched their lips (AU20, Lip Stretched) and touched their neck (Neck Touching), as well as looking towards the laptop from which they pulled out the USB stick (AUs52+62, Head Turn Right and Eyes Turn Right). Those specific behaviours were not associated with self-reported feelings of shame or pride. Previous research that identified behavioural displays associated with embarrassment (gaze down, controlled smiles, gaze shifts, and face touches [<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>]), shame (head and gaze down [<xref ref-type="bibr" rid="pone.0231756.ref043">43</xref>–<xref ref-type="bibr" rid="pone.0231756.ref045">45</xref>]) and pride (expanded posture, head tilted back, low-intensity non-Duchenne smile [<xref ref-type="bibr" rid="pone.0231756.ref076">76</xref>]) did not report AUs4+20, gaze right and Neck Touch as part of those displays. Therefore, these components seem specific to the experience of self-reported guilt that the participants reported in our study. This is the first study to identify a potential pattern of movements associated with felt guilt.</p>
<p>To determine whether these movements were also identified as guilt by observers when presented with contextual information, we conducted a second study. We asked new participants to examine the videos of participants from this first study during the induction of guilt (<xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref>:4) and rate them for emotion. We also asked these new participants to identify specific times when they thought they could see these specific emotions. This study aimed at identifying which specific facial movements were most closely associated with the perception of guilt.</p>
</sec>
</sec>
<sec id="sec014">
<title>Study 2 –Judgement of guilt</title>
<sec id="sec015" sec-type="materials|methods">
<title>Methods</title>
<sec id="sec016">
<title>Participants</title>
<p>One hundred and fourteen new participants (hereafter referred to as “judges”) were recruited for this second study (82 females; <italic>M</italic><sub><italic>age</italic></sub> = 29.96, <italic>SD</italic> = 11.48; see SM3 for details). None of the participants that took part in Study 1 was recruited for Study 2. As in Study 1, judges were recruited based on an opportunistic sampling method and were all UK residents at the time of the experiment (but included both UK and non-UK nationals; see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 1 for details). All of them received either course credit (if a student) or £5 for their time. The whole experiment lasted approximately 35 minutes. The project has been reviewed and approved by the Science Faculty Ethics Committee (SFEC) from the University of Portsmouth. Each participant signed an informed consent form granting authorisation for the use of the data for research purposes.</p>
</sec>
<sec id="sec017">
<title>General procedure</title>
<p>At the start, participants were given general instructions regarding the experiment and written consent was obtained. Judges were originally told that this study had a different aim—to assess their abilities to detect facial expressions of emotion. We wanted to test whether people could see guilt on a face without actively probing guilt detection (i.e., asking only about guilt). Following this, the rest of the instructions and tasks were displayed on a computer through Qualtrics Survey Software [<xref ref-type="bibr" rid="pone.0231756.ref077">77</xref>]. This experiment consisted of two successive tasks. Firstly, judges were asked to complete a personality questionnaire, the Guilt and Shame Proneness Scale [GASP, <xref ref-type="bibr" rid="pone.0231756.ref078">78</xref>; order of questions was randomised between subjects]. Secondly, judges were asked to watch 20 consecutive videos of faces, and make a series of ratings regarding the emotional state of the stimulus individual (see below for more detail). Finally, the judges were informed about the true aim of the experiment and debriefed. The experiment was presented on desktop computers in one of the laboratories available at the University. Judges sat in front of the computer, the screen situated approximately 60cm away from their faces (face stimuli visual angle: 10° x 14°). The Qualtrics survey was presented in full-screen mode; videos were uploaded on Youtube, on a private account, and presented on Qualtrics as an embedded file. Judges had the opportunity to watch each video as many times as they wished to, and they could view it full screen. They could slow down the video but not watch it frame by frame and could scroll through the video.</p>
</sec>
<sec id="sec018">
<title>Stimuli</title>
<p>All experimental video stimuli were taken from Study 1 (participants experiencing the guilt induction task, in the guilt condition). Of the 64 participants allocated to the guilt condition in Study 1, we used 57 individuals (seven participants were omitted for spending 50% of the time or more out of sight). Control video stimuli were also taken from Study 1 (participants from the control condition). For this, 12 individuals were chosen randomly. All 57 guilt videos were clipped to 30–90 seconds, all 12 control videos were 7 seconds long on average, and audio was removed. The stimuli were generally centred in the video but participants in Study 1 were free to move their head and body (see <xref ref-type="supplementary-material" rid="pone.0231756.s004">S1 Video</xref>).</p>
</sec>
<sec id="sec019">
<title>Guilt judgements</title>
<p>Each judge watched 20 videos in succession (30- to 90-second-long videos), 16 guilt videos, and 4 control videos, out of the 69 videos selected for this study. The videos presented were randomised for each participant. Before viewing the videos, judges were provided with the following contextual information—‘the individual in the video had just been told they had wiped some important information from a USB flash drive’. The same contextual information was provided for all videos, guilt and control. This allowed us to test how many emotional expressions participants perceived in a specific guilt context. As previous research emphasised the importance of context in understanding facial expressions [<xref ref-type="bibr" rid="pone.0231756.ref041">41</xref>,<xref ref-type="bibr" rid="pone.0231756.ref079">79</xref>,<xref ref-type="bibr" rid="pone.0231756.ref080">80</xref>], we included contextual information to collect accurate, genuine, ecologically valid judgements. If participants were relying solely on the written context, they would see guilt on every face. However, if they looked at the face and used facial expressions to identify the emotional states, they would be able to see other emotions as well as guilt. While watching each video or right after viewing, judges were required to indicate how they thought the individual was feeling overall, using a sliding-scale (from 0–100%) for the five following emotional states: “uncomfortable”, “embarrassed”, “guilty”, “surprised”, and “other” (see <xref ref-type="supplementary-material" rid="pone.0231756.s003">S3 Fig</xref>). Those judgements were collected for the entire video, as a measure of the different emotional states the individual in the video seemed to experience, providing the judged guilt variable used in further analysis. Those five emotional states were selected based on the results from Study 1 [AUs indicative of these emotions; <xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>,<xref ref-type="bibr" rid="pone.0231756.ref039">39</xref>].</p>
<p>In addition to the sliding-scale rating, judges were encouraged to report any instances of emotion, i.e. any moment within the video were the emotion occurred (hereafter, a pinpoint), allowing for their judgements to be localised to an exact time point. They could have reported that overall the individual in the video experienced 20% of discomfort; this allowed them to indicate when exactly in the video was the individual experiencing discomfort. They were encouraged to report times when the indicated emotion was the most clearly expressed on the face (i.e., apexes of emotional expressions). To do so, judges could stop the videos whenever they wanted, watch the video multiple times, and even slow down the videos. Judges could not report a specific frame in the video due to the format of the stimuli, but they could report specific time (min:sec). Judges could make multiple pinpoints for multiple emotions, and multiple pinpoints per emotion. For example, they could report that in a video, the individual appears 50% guilty at 15 and 25 seconds in the video; or a judge could provide us with the information that an individual in a given video appears 50% embarrassed and 10% surprised at 35 seconds in the video, and 30% guilty at 40 seconds in the video.</p>
<p>When looking at the raw data, 623 instances of guilt were identified by all judges. This gave us a gross overview of the pinpoints reported. Some of these instances might be the same pinpoint (or unique instance), as multiple judges might have reported the same specific time. Moreover, 1,077 instances of surprise were reported, as well as 825 instances of discomfort and 676 instances of embarrassment. Judges seemed able to conceptually differentiate between those four emotional states as very few overlaps were made between them (see the “<italic>Descriptive analysis”</italic> sub-section in Results for details on guilt pinpoints).</p>
</sec>
<sec id="sec020">
<title>Compiling the dataset</title>
<p><italic>Guilt</italic>. Before analysis, the judgement data collected was combined with the FACS data produced in Study 1. The judges in this study reported 403 unique instances of guilt across the guilt videos and 36 unique instances across control videos, as identified by time-specific pinpoints on the video. We allowed for 0.5 seconds (or 12 video frames) of error around pinpoints, providing us with one second of video data per pinpoint in which judged guilt could have occurred. These pinpoints were synchronised with the FACS coding of the videos, to match judged guilt with any possible facial movements. We created these windows as the actual pinpoints reported by the judges were lacking precision; when synchronising the pinpoints with the FACS coding, we reported pinpoints in the middle of the second identified. For instance, a pinpoint identified by judges at 5 sec would be reported in the FACS coding at 5 sec 500 msec. The 1-second window allowed us to capture the movements they perceived as reporting guilt. Moreover, as genuine expressions have been shown to have onsets ranging from 0.50 to 0.70 sec [<xref ref-type="bibr" rid="pone.0231756.ref081">81</xref>,<xref ref-type="bibr" rid="pone.0231756.ref082">82</xref>], creating 1-second window around the identified pinpoints allowed us to capture the facial movements identified as conveying guilt by the judges. Multiple guilt windows could thus be created for a given video. We were not interested in capturing the unfolding of the entire expressions associated with guilty feelings, from onset to offset; rather we wanted to explore facial movements people associate with guilt. Thus, we allowed judges to watch the entire video and decide when guilt was most present on the face. Finally, any video data that occurred outside of these pinpoints (i.e. any part of the video that was not judged as guilty by any judge) was removed, providing us with a reduced dataset containing only judged guilt video frames. The creation of the pinpoints and removing all frames occurring outside the pinpoints resulted in 8,934 video frames of FACS data (present/absence of AU/ADs) from the guilt videos and 850 video frames from the control videos. This step was conducted to focus our data more on facial movements the judges could be considering guilty, and to reduce noise in the dataset. All the selected frames were retained for further analysis. We used the same 15 AUs as identified in Study 1 to run the following analysis (see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 2 for details).</p>
</sec>
<sec id="sec021">
<title>Statistical analysis</title>
<p><italic>Guilt</italic>. First, to examine the judges’ ability to accurately perceive guilt on a face, we ran a Pearson’s correlation between the self-reported feeling of guilt of each participant and the averaged judged guilt per participant (i.e., video).</p>
<p><italic>Analysis of pinpoints</italic>. To test how the frames chosen by the judges as displaying guilt differed from those frames that were not judged to display guilt, we conducted analyses following the same method as described in Study 1. Here, we compared a) the action units in the pinpoints for the twelve videos from the control condition, which were rated with all other frames of the same videos, and b) the action units in the pinpoints for the 57 videos from the experimental condition with all other frames from the same videos. Again, we created control distributions based on bootstraps for the control data (in both cases, non-pinpointed frames of the control and experimental condition, respectively) and we tested whether the occurrence of action units in the test data differed. Again the randomisation was based on the level of individuals and we controlled for the PoO of participants chosen for the control distribution. We removed all frames that did not contain any action units from these analyses, as they would not be chosen by judges as displaying guilt.</p>
<p><italic>Judged guilt</italic>. To test whether participants judged to display an overall higher level of guilt differed in their properties or facial activity from those that were not judged to display guilt, we fitted a linear mixed model [<xref ref-type="bibr" rid="pone.0231756.ref083">83</xref>] with Gaussian error structure. Analyses were conducted in R v.3.6.1 [<xref ref-type="bibr" rid="pone.0231756.ref084">84</xref>]. We focused on the presence of four action units (AU4, AU10, AU20, Neck Touch) that were consistently produced more often in the different guilt conditions (see Study 1), and tested whether increased production of these signals in a video also increased how guilty the participant looked. The average guilt rating of judges (range 9.5–54.4, mean 32.8) was set as the response variable, and followed a normal distribution. Guilt ratings were available for 69 videos. Given that the guilt ratings for each video were averaged across judges, there were no random effects. As predictor variables, we set the ratio of frames in each video that contained AU4, AU10, AU20, and neck touching; a variable indicating how many of these four action units were observed in a video; the condition (control, experiment); and PoO of the participant.</p>
<p>All continuous variables were z-standardized to facilitate interpretation [<xref ref-type="bibr" rid="pone.0231756.ref085">85</xref>]. We compared the full model against a null model only containing the PoO, the condition, and the self-reported guilt change, to test whether the facial activity influenced perceived guilt at all [<xref ref-type="bibr" rid="pone.0231756.ref086">86</xref>]. To establish the significance of each predictor variable, we tested the full model against a reduced model not containing the variable [<xref ref-type="bibr" rid="pone.0231756.ref087">87</xref>] using the ‘drop1’ function in R. We tested for collinearity using Variance Inflation Factors [<xref ref-type="bibr" rid="pone.0231756.ref088">88</xref>] with the ‘vif’ function in the ‘car’ package [<xref ref-type="bibr" rid="pone.0231756.ref089">89</xref>]; collinearity of test variables was not an issue (maximum VIF 1.84).</p>
</sec>
</sec>
<sec id="sec022" sec-type="results">
<title>Results</title>
<sec id="sec023">
<title>Descriptive statistics</title>
<p>Overall, judges attributed a higher level of guilt to participants in the guilt videos (<italic>M</italic> = 35.65, <italic>SD</italic> = 9.46) compared to participants in control videos (<italic>M</italic> = 19.16, <italic>SD</italic> = 7.47; t(19.246) = -6.61, p &lt; 0.001). They also attributed higher level of surprise to the guilt videos (<italic>M</italic> = 42.46, <italic>SD</italic> = 16.18) compared to the control videos (<italic>M</italic> = 21.48, <italic>SD</italic> = 12.93; t(2.051) = -4.88, p &lt; 0.001).</p>
<p>The judges reported 403 instances of guilt across the guilt videos, with an average of seven pinpoints per video, and 36 instances across the control videos. In 40 of those instances (10% of the total amount of guilt pinpoints identified across all videos), guilt was associated with one other emotion (guilt was associated with embarrassment in 45% of these 58 occurrences, with discomfort for 47.5% and surprise for 7%; see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 2).</p>
<p>Judges made reliable ratings regarding the level of felt guilt: we found a positive correlation between the averaged judged guilt per individual and the individual self-reported guilt (r = 0.465, n = 69, p &lt; 0.001).</p>
</sec>
<sec id="sec024">
<title>Analysis of pinpoints</title>
<p><italic>Control videos</italic>. For the frames that were identified by judges in the videos belonging to the control condition of the experiment, the pinpointed frames did only differ significantly from other frames by showing more activity in AU20 and in face touching (<xref ref-type="table" rid="pone.0231756.t004">Table 4</xref>). They also showed less activity of AU10 and AU 61.</p>
<table-wrap id="pone.0231756.t004" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.t004</object-id>
<label>Table 4</label> <caption><title>Comparison of the dataset for the frames from control videos that were selected as displaying guilt by judges (observed frequency) with the dataset containing the remaining frames for the control videos (expected frequency), after controlling for differences in the reported PoOs of participants.</title></caption>
<alternatives>
<graphic id="pone.0231756.t004g" mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.t004" xlink:type="simple"/>
<table>
<colgroup>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
</colgroup>
<thead>
<tr>
<th align="justify">AU</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="justify">1</td>
<td align="right">0.41</td>
<td align="right">0.53</td>
<td align="right">-1.25</td>
<td align="right">0.096</td>
</tr>
<tr>
<td align="justify">2</td>
<td align="right">0.41</td>
<td align="right">0.53</td>
<td align="right">-1.25</td>
<td align="right">0.096</td>
</tr>
<tr>
<td align="justify">4</td>
<td align="right">0.05</td>
<td align="right">0.02</td>
<td align="right">1.86</td>
<td align="right">0.033</td>
</tr>
<tr>
<td align="justify">5</td>
<td align="right">0.05</td>
<td align="right">0.04</td>
<td align="right">1.14</td>
<td align="right">0.122</td>
</tr>
<tr>
<td align="justify">7</td>
<td align="right">0.34</td>
<td align="right">0.44</td>
<td align="right">-0.87</td>
<td align="right">0.184</td>
</tr>
<tr>
<td align="justify"><italic>10</italic></td>
<td align="right"><italic>0</italic>.<italic>03</italic></td>
<td align="right"><italic>0</italic>.<italic>22</italic></td>
<td align="right"><italic>-2</italic>.<italic>22</italic></td>
<td align="right"><italic>0</italic>.<italic>005</italic></td>
</tr>
<tr>
<td align="justify">12</td>
<td align="right">0.35</td>
<td align="right">0.36</td>
<td align="right">-0.12</td>
<td align="right">0.414</td>
</tr>
<tr>
<td align="justify">14</td>
<td align="right">0.29</td>
<td align="right">0.45</td>
<td align="right">-1.51</td>
<td align="right">0.059</td>
</tr>
<tr>
<td align="justify">17</td>
<td align="right">0.10</td>
<td align="right">0.26</td>
<td align="right">-1.52</td>
<td align="right">0.109</td>
</tr>
<tr>
<td align="justify">18</td>
<td align="right">0.01</td>
<td align="right">0.07</td>
<td align="right">-1.80</td>
<td align="right">0.035</td>
</tr>
<tr>
<td align="justify"><bold>20</bold></td>
<td align="right"><bold>0.04</bold></td>
<td align="right"><bold>0.01</bold></td>
<td align="right"><bold>7.82</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">24</td>
<td align="right">0.05</td>
<td align="right">0.07</td>
<td align="right">-0.69</td>
<td align="right">0.288</td>
</tr>
<tr>
<td align="justify">51</td>
<td align="right">0.21</td>
<td align="right">0.37</td>
<td align="right">-1.83</td>
<td align="right">0.037</td>
</tr>
<tr>
<td align="justify">52</td>
<td align="right">0.19</td>
<td align="right">0.38</td>
<td align="right">-1.94</td>
<td align="right">0.049</td>
</tr>
<tr>
<td align="justify">54</td>
<td align="right">0.20</td>
<td align="right">0.14</td>
<td align="right">1.56</td>
<td align="right">0.067</td>
</tr>
<tr>
<td align="justify">55</td>
<td align="right">0.11</td>
<td align="right">0.21</td>
<td align="right">-1.57</td>
<td align="right">0.068</td>
</tr>
<tr>
<td align="justify">56</td>
<td align="right">0.35</td>
<td align="right">0.22</td>
<td align="right">1.82</td>
<td align="right">0.044</td>
</tr>
<tr>
<td align="justify">57</td>
<td align="right">0.20</td>
<td align="right">0.18</td>
<td align="right">0.34</td>
<td align="right">0.342</td>
</tr>
<tr>
<td align="justify">59</td>
<td align="right">0.01</td>
<td align="right">0.04</td>
<td align="right">-1.54</td>
<td align="right">0.079</td>
</tr>
<tr>
<td align="justify"><italic>61</italic></td>
<td align="right"><italic>0</italic>.<italic>16</italic></td>
<td align="right"><italic>0</italic>.<italic>26</italic></td>
<td align="right"><italic>-2</italic>.<italic>55</italic></td>
<td align="right"><italic>0</italic>.<italic>003</italic></td>
</tr>
<tr>
<td align="justify">62</td>
<td align="right">0.12</td>
<td align="right">0.18</td>
<td align="right">-1.48</td>
<td align="right">0.065</td>
</tr>
<tr>
<td align="justify">64</td>
<td align="right">0.51</td>
<td align="right">0.49</td>
<td align="right">0.37</td>
<td align="right">0.334</td>
</tr>
<tr>
<td align="justify"><bold>FaceTouch</bold></td>
<td align="right"><bold>0.10</bold></td>
<td align="right"><bold>0.04</bold></td>
<td align="right"><bold>3.43</bold></td>
<td align="right"><bold>0.002</bold></td>
</tr>
<tr>
<td align="justify">NeckTouch</td>
<td align="right">0.00</td>
<td align="right">0.00</td>
<td align="right">0.00</td>
<td align="right">1.000</td>
</tr>
</tbody>
</table>
</alternatives>
<table-wrap-foot>
<fn id="t004fn001"><p>P-values denote the likelihood that the observed frequency of occurrence for an AU was more extreme than the predicted frequency. AU with significantly increased occurrence in bold, AU with significantly decreased occurrence in italics.</p></fn>
</table-wrap-foot>
</table-wrap>
<p><italic>Guilt videos</italic>. For the pinpoints selected by judges in guilt videos, these frames differed substantially from other frames in the same videos (<xref ref-type="table" rid="pone.0231756.t005">Table 5</xref>). They had increased activity for AU4, AU5, AU17, AU54, AU61, AU62, AU64, and for neck touching. They had decreased activity AU57 and AU59. These results mirror some of the signals of guilt in Study 1, further evidence that AU4, AU20, and self-directed behaviour (neck touching) showed differences in production and perception of guilt. Judges seemed to use increased eye movement (AU61, AU62, AU64) as a sign of guilt.</p>
<table-wrap id="pone.0231756.t005" position="float">
<object-id pub-id-type="doi">10.1371/journal.pone.0231756.t005</object-id>
<label>Table 5</label> <caption><title>Comparison of the dataset for the frames from guilt videos that were selected as displaying guilt by judges (observed frequency) with the dataset containing the remaining frames for the same guilt videos (expected frequency), after controlling for differences in the reported PoO of participants.</title></caption>
<alternatives>
<graphic id="pone.0231756.t005g" mimetype="image" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.t005" xlink:type="simple"/>
<table>
<colgroup>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
<col align="left" valign="middle"/>
</colgroup>
<thead>
<tr>
<th align="justify">AU</th>
<th align="right">Observed Frequency</th>
<th align="right">Expected Frequency</th>
<th align="right">z</th>
<th align="right">p-value</th>
</tr>
</thead>
<tbody>
<tr>
<td align="justify">1</td>
<td align="right">0.56</td>
<td align="right">0.52</td>
<td align="right">1.11</td>
<td align="right">0.142</td>
</tr>
<tr>
<td align="justify">2</td>
<td align="right">0.53</td>
<td align="right">0.51</td>
<td align="right">0.53</td>
<td align="right">0.287</td>
</tr>
<tr>
<td align="justify"><bold>4</bold></td>
<td align="right"><bold>0.26</bold></td>
<td align="right"><bold>0.20</bold></td>
<td align="right"><bold>2.54</bold></td>
<td align="right"><bold>0.006</bold></td>
</tr>
<tr>
<td align="justify"><bold>5</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>0.11</bold></td>
<td align="right"><bold>2.98</bold></td>
<td align="right"><bold>0.004</bold></td>
</tr>
<tr>
<td align="justify">7</td>
<td align="right">0.29</td>
<td align="right">0.26</td>
<td align="right">0.59</td>
<td align="right">0.276</td>
</tr>
<tr>
<td align="justify">10</td>
<td align="right">0.33</td>
<td align="right">0.27</td>
<td align="right">2.00</td>
<td align="right">0.017</td>
</tr>
<tr>
<td align="justify">12</td>
<td align="right">0.16</td>
<td align="right">0.15</td>
<td align="right">0.42</td>
<td align="right">0.355</td>
</tr>
<tr>
<td align="justify">14</td>
<td align="right">0.15</td>
<td align="right">0.17</td>
<td align="right">-0.76</td>
<td align="right">0.230</td>
</tr>
<tr>
<td align="justify"><bold>17</bold></td>
<td align="right"><bold>0.20</bold></td>
<td align="right"><bold>0.13</bold></td>
<td align="right"><bold>5.25</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">18</td>
<td align="right">0.15</td>
<td align="right">0.14</td>
<td align="right">0.87</td>
<td align="right">0.194</td>
</tr>
<tr>
<td align="justify">20</td>
<td align="right">0.04</td>
<td align="right">0.04</td>
<td align="right">0.19</td>
<td align="right">0.429</td>
</tr>
<tr>
<td align="justify">24</td>
<td align="right">0.16</td>
<td align="right">0.15</td>
<td align="right">0.67</td>
<td align="right">0.238</td>
</tr>
<tr>
<td align="justify">51</td>
<td align="right">0.08</td>
<td align="right">0.10</td>
<td align="right">-1.22</td>
<td align="right">0.101</td>
</tr>
<tr>
<td align="justify">52</td>
<td align="right">0.31</td>
<td align="right">0.30</td>
<td align="right">0.36</td>
<td align="right">0.370</td>
</tr>
<tr>
<td align="justify"><bold>54</bold></td>
<td align="right"><bold>0.22</bold></td>
<td align="right"><bold>0.15</bold></td>
<td align="right"><bold>3.66</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">55</td>
<td align="right">0.20</td>
<td align="right">0.21</td>
<td align="right">-0.14</td>
<td align="right">0.419</td>
</tr>
<tr>
<td align="justify">56</td>
<td align="right">0.16</td>
<td align="right">0.19</td>
<td align="right">-0.97</td>
<td align="right">0.193</td>
</tr>
<tr>
<td align="justify"><italic>57</italic></td>
<td align="right"><italic>0</italic>.<italic>10</italic></td>
<td align="right"><italic>0</italic>.<italic>18</italic></td>
<td align="right"><italic>-3</italic>.<italic>94</italic></td>
<td align="right"><italic>0</italic>.<italic>000</italic></td>
</tr>
<tr>
<td align="justify"><italic>59</italic></td>
<td align="right"><italic>0</italic>.<italic>02</italic></td>
<td align="right"><italic>0</italic>.<italic>03</italic></td>
<td align="right"><italic>-2</italic>.<italic>45</italic></td>
<td align="right"><italic>0</italic>.<italic>006</italic></td>
</tr>
<tr>
<td align="justify"><bold>61</bold></td>
<td align="right"><bold>0.12</bold></td>
<td align="right"><bold>0.09</bold></td>
<td align="right"><bold>2.63</bold></td>
<td align="right"><bold>0.001</bold></td>
</tr>
<tr>
<td align="justify"><bold>62</bold></td>
<td align="right"><bold>0.26</bold></td>
<td align="right"><bold>0.21</bold></td>
<td align="right"><bold>3.99</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify"><bold>64</bold></td>
<td align="right"><bold>0.36</bold></td>
<td align="right"><bold>0.30</bold></td>
<td align="right"><bold>3.52</bold></td>
<td align="right"><bold>0.000</bold></td>
</tr>
<tr>
<td align="justify">FaceTouch</td>
<td align="right">0.15</td>
<td align="right">0.13</td>
<td align="right">0.85</td>
<td align="right">0.203</td>
</tr>
<tr>
<td align="justify"><bold>NeckTouch</bold></td>
<td align="right"><bold>0.03</bold></td>
<td align="right"><bold>0.02</bold></td>
<td align="right"><bold>2.81</bold></td>
<td align="right"><bold>0.002</bold></td>
</tr>
</tbody>
</table>
</alternatives>
<table-wrap-foot>
<fn id="t005fn001"><p>P-values denote the likelihood that the observed frequency of occurrence for an AU was more extreme than the predicted frequency. AU with significantly increased occurrence in bold, AU with significantly decreased occurrence in italics.</p></fn>
</table-wrap-foot>
</table-wrap>
</sec>
<sec id="sec025">
<title>Judged guilt</title>
<p>The full null model comparison revealed a significant impact of facial activity on judged guilt (X<sup>2</sup> = 1774.8, df = 5, p &lt; 0.001). Of the test predictors, the activity of AU4 in a video (X<sup>2</sup> = 817.8, df = 1, p &lt; 0.001) and the amount of neck touching (X<sup>2</sup> = 234.4, df = 1, p = 0.041) both positively influenced perceived guilt, while AU10 and AU20 did not show any effect; neither did the overall number of the four action units present. There was no impact of the PoO of the participant, nor was there here an impact of the self-reported guilt. Videos from the guilt condition were rated as displaying higher guilt than videos from the control condition (X<sup>2</sup> = 928.9, df = 1, p &lt; 0.001).</p>
</sec>
</sec>
<sec id="sec026" sec-type="conclusions">
<title>Discussion</title>
<p>This study aimed to identify which facial movements were perceived as guilt when guilt was induced in a laboratory experiment. We found that judges gave a higher rating of guilt in videos where people were seen frowning (AU4 Brow Lowerer) and touching their neck (Neck Touching). We used instances when judges reported seeing guilt to create 1s-window of interest and conduct our analysis only on those time windows of guilt. Doing this, we identified facial movements reliably associated with the perceived expression of guilty. Judges reported other emotions at the same time as guilt in only 14% of the guilt pinpoints. Moreover, pinpoints of guilt revealed specific facial movements that were not present in control videos. This made us fairly confident that the facial expressions identified were associated with the experience (perception) of guilt.</p>
</sec>
</sec>
<sec id="sec027">
<title>General discussion</title>
<p>In two studies, we aimed to identify facial movements and behavioural displays associated with the experience of guilt in humans. In the first study, we examined the production of guilt using a novel induction technique. In the second study, we examined whether others perceived guilt from the face of those experiencing guilt. We used an extensive, bottom-up coding scheme to identify facial patterns associated with the experience (production and perception) of guilt as part of a dynamic sequence of behaviour, combined with a robust bootstrapping method to analyse our data.</p>
<p>We found a positive relationship between the level of self-reported guilt and the extent this individual was judged as feeling guilty by others. This supports the idea that guilt could have evolved as an observable phenomenon with a potential communicative social function. The patterns identified in this experiment showed some consistency between what people do when feeling guilty and what people see when identifying guilt. Our first study showed that guilt was associated with frowning, lip stretching and neck touching [AU4 Brow Lowerer, AU20 Lips Stretch; <xref ref-type="bibr" rid="pone.0231756.ref059">59</xref>], as well as looking towards the right (AU52 Head Right, AU62 Eyes Right), which was probably an artefact of the position of the computer. Our second study showed that the identification of guilt in others was associated with frowning, eyes widening, and neck touching [AU4 Brow Lowerer, AU5 Upper Lid Raiser, AU10 Upper Lip Raiser; <xref ref-type="bibr" rid="pone.0231756.ref059">59</xref>], as well as looking down and sideways (AU54 Head Down, AU61 Eyes Left, AU62 Eyes Right, AU64 Eyes Down), another potential artefact due to the experimental set-up. Thus, it seems that in this study, guilt was associated with a non-verbal pattern of frowning and neck touching.</p>
<p>Using a bottom-up methodology allowed us not only to approach our question without any a priori assumptions regarding the results, but it also increased the likelihood that the movements identified in our studies (AU4, AU20, and neck touch) are associated with the experience of guilt and no other secondary moral emotion. Indeed, the “guilt” pinpoints identified by the judges (Study 2) were mainly instances of identification of guilt alone, with only 14% of the total number of guilt pinpoints associated with more than one emotion (see <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 2). This allowed us to focus our analysis on facial movements associated with the experience of guilt only. Moreover, even though guilt is often mistaken for embarrassment or shame, the embarrassed display has been characterised by the joint production of gaze down, controlled smiles, head turns, gaze shifts, face touches [<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>], and the occasional blushing [<xref ref-type="bibr" rid="pone.0231756.ref090">90</xref>]; and the typical face of shame was described with head and gaze movements down [<xref ref-type="bibr" rid="pone.0231756.ref043">43</xref>–<xref ref-type="bibr" rid="pone.0231756.ref045">45</xref>]. None of the movements we found associated with the expression of guilt were associated with those of other negative self-conscious emotions. During the AU selection process, most facial movements associated with either embarrassment or shame were discarded from further analysis, with the only exception of face touching. Face touch can emphasise embarrassment displays, but it is not necessary for the identification of embarrassment [<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>]. A previous study suggested a link between blushing and admission of guilt [<xref ref-type="bibr" rid="pone.0231756.ref091">91</xref>]; combining FACS analysis with thermal imaging techniques might have revealed changes in facial temperature in guilty participants, which could be unconsciously used by observers in their judgments.</p>
<p>This bottom-up methodology also diverges from previous research examining the facial display of guilt, which is why we may have found a more concrete candidate for the display of guilt. One notable previous study used a literature-based conceptualisation of the experience of guilt to present three candidates’ displays to their participants [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>]. In that study, using a top-down approach, the participants were presented with displays selected based on previous literature, which associated the experience of guilt with the experience of self-contempt, sympathy, and pain. The authors tested whether their conceptualisation of guilt accurately described a facial display associated with the experience of the emotion. The results were not conclusive as the candidates’ displays were more often associated with emotions other than guilt [<xref ref-type="bibr" rid="pone.0231756.ref008">8</xref>]. A more recent study associated the experience of guilty feeling with increased skin conductance and gaze avoidance [<xref ref-type="bibr" rid="pone.0231756.ref092">92</xref>]. We did not find gaze avoidance (i.e. actively avoiding to look in another person’s direction) to be part of the facial signal of guilt, even though participants in the guilt condition looked down and around more than participants in the control condition. Yet, this could be due to our experimental design: participants in the guilt condition might have been looking down at the laptop more than people in the control condition. It is thus unclear in our design whether guilty participants avoided eye-contact or focused on an object associated to their wrongdoing (the laptop could be incriminated for the deletion of data on the USB stick, removing the fault from them).</p>
<p>Both the production and perception of guilt was associated with self-directed behaviour (i.e., scratching, neck or face touching), which are often classified as displacement behaviours, and are defined as a group of behaviours that appear irrelevant to the situation in which they are displayed, but can gain communicative value over time [<xref ref-type="bibr" rid="pone.0231756.ref061">61</xref>]. The production of such behaviours has been shown to increase in stressful, negative, situations [<xref ref-type="bibr" rid="pone.0231756.ref093">93</xref>,<xref ref-type="bibr" rid="pone.0231756.ref094">94</xref>]. Self-directed behaviours may be used when individuals try to distance and protect themselves from an unpleasant situation, acting as a short-term diversion of attention, which could, in turn, reduce the negative feeling associated to the situation at hand [<xref ref-type="bibr" rid="pone.0231756.ref093">93</xref>,<xref ref-type="bibr" rid="pone.0231756.ref095">95</xref>,<xref ref-type="bibr" rid="pone.0231756.ref096">96</xref>]. Self-directed behaviour could thus help regulate the level of stress associated with emotionally challenging situations [<xref ref-type="bibr" rid="pone.0231756.ref094">94</xref>], such as the guilt induction experienced by our participants in Study 1. Indeed, some studies have shown that self-directed behaviours are common in situations such as embarrassment [<xref ref-type="bibr" rid="pone.0231756.ref044">44</xref>], discomfort [<xref ref-type="bibr" rid="pone.0231756.ref020">20</xref>], and anxiety and guilt [<xref ref-type="bibr" rid="pone.0231756.ref097">97</xref>], which focussed on hand movements and found a correlation between the production of self-directed behaviours (i.e., scratching) and anxiety and guilt feelings. In our study, we found that the experience of guilt was associated with self-directed behaviours (neck touching), which appears to be in line with previous research. However, the production of self-directed behaviours could be due to the experimental design: participants were seated at a table, in front of a computer. However, the setup is unlikely to have elicited those movements, as participants in the control condition, also seated at a computer, did not display as many self-directed behaviours.</p>
<p>More recent conceptualisations of emotional experiences [<xref ref-type="bibr" rid="pone.0231756.ref027">27</xref>,<xref ref-type="bibr" rid="pone.0231756.ref028">28</xref>,<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>–<xref ref-type="bibr" rid="pone.0231756.ref037">37</xref>] argue for a less universal and omnipotent link between the experience of an emotion and behavioural outcomes. In an emotional context, multiple systems will be triggered (e.g., cognitive processes, physiological systems, motor expressions; [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>]), leading to multiple behavioural outcomes (e.g. facial signals), one of which might be used by observers when responding to the situation [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>]. As such, an individual feeling guilty might produce multiple facial signals, one of which will be more strongly associated with the subjective, constructed, feeling of guilt (e.g., frown, lips stretch and neck touching); an observer might perceive those facial signals and rely mainly on specific ones to interpret the emotional state of the guilty individual (e.g., frown and neck touching).</p>
<p>It is important to remain cautious in the interpretation of our data. We need to acknowledge that if neck touching was present more in association with feelings of guilt, only 12.5%of the individuals displayed neck touching. Self-directed behaviour, however, were displayed in over 64% of the individuals during the guilt induction. Even though few participants displayed neck touching, our results showed it is a significant signal of guilt. We need to consider the possibility that by reducing our dataset to 1-second windows, we could have excluded non-verbal signals important for the onset of the experience of guilt. By focussing on the apexes of the expressions, we might have lost secondary signals contributing to the reliable identification of guilty signals. Our results provide preliminary information regarding the non-verbal signals exhibited more in association with guilty feelings. A follow-up study, using a reduced ethogram focussing on the movements identified here could allow to reach a better agreement score between coders and thus increase the K’s alpha and the validity of our results [<xref ref-type="bibr" rid="pone.0231756.ref067">67</xref>,<xref ref-type="bibr" rid="pone.0231756.ref068">68</xref>]. We also need to consider the fact that providing contextual information might have influenced the judges in their decisions. To assess the impact of context, we conducted a follow-up study comparing the judgements made with and without contextual information provided [<xref ref-type="bibr" rid="pone.0231756.ref098">98</xref>]. Our judgement study also presents some linguistic limitations. Even if there are differences in the appraisal and behavioural outcomes between shame and guilt, it has been previously shown that English speaker use “guilt” and “shame” interchangeably [<xref ref-type="bibr" rid="pone.0231756.ref099">99</xref>]. To overcome this conceptual barrier, we conducted another judgement study, without providing contextual information [<xref ref-type="bibr" rid="pone.0231756.ref098">98</xref>,<xref ref-type="bibr" rid="pone.0231756.ref100">100</xref>]. We hope to gauge how the expression of guilt is perceived when no verbal/written content needs to be understood first. Moreover, to compare various judgement methodologies [emotion words vs action tendencies vs dimensions; <xref ref-type="bibr" rid="pone.0231756.ref101">101</xref>], we conducted another follow-up study to help us have a better understanding of how people conceptualise the facial expression produced when experiencing guilt, using different types of words and classification methodologies [forced choice vs free labelling vs dimensions; <xref ref-type="bibr" rid="pone.0231756.ref100">100</xref>]. This way, we hoped to introduce more variability in the emotional judgements, looking at patterns of mislabelling of guilty displays.</p>
<p>These are the first studies to look at the genuine expression of guilt and the perception of secondary emotion using spontaneous dynamic stimuli. Judges had to rely on genuine, dynamically presented facial expressions to recognise and rate emotions. They were exploratory studies, using simple analysis and focussing on the behavioural signals associated with a guilt-inducing situation. We have however collected more extensive data; now that we identified a facial signal associated with the experience of guilt, more in-depth analysis (such as a lens modelling [<xref ref-type="bibr" rid="pone.0231756.ref035">35</xref>]) would be an interesting step to further break down the mechanisms associated with guilt.</p>
<p>Our experiments support a drive towards a new scientific culture, studying facial expressions using novel approaches removed from the dichotomous debate about nature vs nurture [<xref ref-type="bibr" rid="pone.0231756.ref073">73</xref>,<xref ref-type="bibr" rid="pone.0231756.ref102">102</xref>]. Previous research extensively looked at the behavioural consequences of guilty feelings: it can promote directed action towards those who have been wronged [<xref ref-type="bibr" rid="pone.0231756.ref004">4</xref>], it can reduce prejudice behaviours [<xref ref-type="bibr" rid="pone.0231756.ref013">13</xref>] and increase generosity [<xref ref-type="bibr" rid="pone.0231756.ref006">6</xref>]. We focussed on the first reactions people have when realising they did something wrong and the guilty feelings emerge; we were able to identify reliable candidates characterising the experience of self-reported guilt. Building on this, we conducted a study to investigate guilty people’s propensity to repair the relationship, as well as the impact of a facial expression on the person wronged, i.e. the victim, reaction [<xref ref-type="bibr" rid="pone.0231756.ref103">103</xref>]. Together, our results suggest that guilt is expressed on the face and communicates the experience of guilt to others through a signal.</p>
</sec>
<sec id="sec028">
<title>Supporting information</title>
<supplementary-material id="pone.0231756.s001" mimetype="image/tiff" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s001" xlink:type="simple">
<label>S1 Fig</label>
<caption>
<title>Post-induction affect change.</title>
<p>The variations in self-reported affect (guilt, shame, distress, and pride) are presented for each participant (grey dots/lines) before and after induction (see <xref ref-type="fig" rid="pone.0231756.g001">Fig 1</xref> for details). The central tendencies presented in <xref ref-type="supplementary-material" rid="pone.0231756.s019">S1 Study</xref> of Table 3 are displayed here by the thick line.</p>
<p>(TIF)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s002" mimetype="image/tiff" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s002" xlink:type="simple">
<label>S2 Fig</label>
<caption>
<title>Representation of the temporal production of facial movements.</title>
<p>The number of participants produced each AU through time is presented on this heat map. Cells in white indicate that the AU was produced equally in the guilt condition and the control condition at a given time; cells in red indicate the AU was produced more by participants in the guilt condition; cells in blue indicate the AU was more produced by participants in the control condition. Time is presented in seconds. Gradients of red and blue represent the difference between the proportion of participants displaying AU in guilt condition and the proportion of participants displaying AU in control condition; the dark the colour, the greater the difference (<italic>no statistical analysis conducted here</italic>). The patterns (dots and lines) were added to help increase the readability of the figure: cells with dots mean the AU was more produced in the guilt condition at this time and cells with lines mean the AU was more produced in the control condition (how much more produced is given by the shape of the colour).</p>
<p>(TIF)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s003" mimetype="image/tiff" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s003" xlink:type="simple">
<label>S3 Fig</label>
<caption>
<title>Judgement ratings.</title>
<p>A representation of the different judgements made by judges: a. ruler ratings on 5 emotions; b. reporting pinpoints section; c. overall confidence regarding judgement.</p>
<p>(TIF)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s004" mimetype="video/x-msvideo" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s004" xlink:type="simple">
<label>S1 Video</label>
<caption>
<title>Video taken during the guilt induction task presenting the succession of AUs produced by a participant.</title>
<p>(AVI)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s005" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s005" xlink:type="simple">
<label>S1 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s006" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s006" xlink:type="simple">
<label>S2 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s007" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s007" xlink:type="simple">
<label>S3 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s008" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s008" xlink:type="simple">
<label>S4 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s009" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s009" xlink:type="simple">
<label>S5 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s010" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s010" xlink:type="simple">
<label>S6 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s011" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s011" xlink:type="simple">
<label>S7 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s012" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s012" xlink:type="simple">
<label>S8 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s013" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s013" xlink:type="simple">
<label>S9 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s014" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s014" xlink:type="simple">
<label>S10 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s015" mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s015" xlink:type="simple">
<label>S11 Data</label>
<caption>
<title/>
<p>(XLSX)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s016" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s016" xlink:type="simple">
<label>S1 File</label>
<caption>
<title/>
<p>(R)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s017" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s017" xlink:type="simple">
<label>S2 File</label>
<caption>
<title/>
<p>(R)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s018" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s018" xlink:type="simple">
<label>S3 File</label>
<caption>
<title/>
<p>(R)</p>
</caption>
</supplementary-material>
<supplementary-material id="pone.0231756.s019" mimetype="application/vnd.openxmlformats-officedocument.wordprocessingml.document" position="float" xlink:href="info:doi/10.1371/journal.pone.0231756.s019" xlink:type="simple">
<label>S1 Study</label>
<caption>
<title/>
<p>(DOCX)</p>
</caption>
</supplementary-material>
</sec>
</body>
<back>
<ack>
<p>We thank the Leverhulme Trust Research Project Grant for funding this project (RPG- 2016–206 to BMW and AV, RPG-2018-334 to JM). We also thank the research assistants for help throughout data collection, and Hoi-Lam Jim and Marta Doroszuk for help with reliability analysis. Finally, we would like to thank the reviewers of this manuscript to help us improve our work.</p>
</ack>
<ref-list>
<title>References</title>
<ref id="pone.0231756.ref001"><label>1</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Tomasello</surname> <given-names>M</given-names></name> (<year>2008</year>) <source>Origins of human communication</source>. <publisher-loc>Cambridge, MA</publisher-loc>: <publisher-name>MIT Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref002"><label>2</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Haidt</surname> <given-names>J</given-names></name> (<year>2003</year>) <chapter-title>The moral emotions</chapter-title>. In: <name name-style="western"><surname>Davidson</surname> <given-names>RJ</given-names></name>, <name name-style="western"><surname>Schere</surname> <given-names>KR</given-names></name>, <name name-style="western"><surname>Goldsmith</surname> <given-names>HH</given-names></name>, editors. <source>Handbook of affective sciences</source>. <publisher-name>Oxford University Press</publisher-name>. pp. <fpage>852</fpage>–<lpage>870</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref003"><label>3</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Carroll</surname> <given-names>J</given-names></name> (<year>1985</year>) <source>Guilt: the grey eminence behind character, history, and culture</source>: <publisher-name>Routledge &amp; Kegan Paul Books</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref004"><label>4</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Cryder</surname> <given-names>CE</given-names></name>, <name name-style="western"><surname>Springer</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Morewedge</surname> <given-names>CK</given-names></name> (<year>2012</year>) <article-title>Guilty Feelings, Targeted Actions</article-title>. <source>Personality and Social Psychology Bulletin</source> <volume>38</volume>: <fpage>607</fpage>–<lpage>618</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1177/0146167211435796" xlink:type="simple">10.1177/0146167211435796</ext-link></comment> <object-id pub-id-type="pmid">22337764</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref005"><label>5</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>De Hooge</surname> <given-names>IE</given-names></name>, <name name-style="western"><surname>Zeelenberg</surname> <given-names>M</given-names></name>, <name name-style="western"><surname>Breugelmans</surname> <given-names>SM</given-names></name> (<year>2007</year>) <article-title>Moral sentiments and cooperation: Differential influences of shame and guilt</article-title>. <source>Cognition and Emotion</source> <volume>21</volume>: <fpage>1025</fpage>–<lpage>1042</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref006"><label>6</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ketelaar</surname> <given-names>T</given-names></name>, <name name-style="western"><surname>Au</surname> <given-names>WT</given-names></name> (<year>2003</year>) <article-title>The effects of feelings of guilt on the behaviour of uncooperative individuals in repeated social bargaining games: An affect-as-information interpretation of the role of emotion in social interaction</article-title>. <source>Cognition &amp; Emotion</source> <volume>17</volume>: <fpage>429</fpage>–<lpage>453</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref007"><label>7</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Weisman</surname> <given-names>MR</given-names></name> (<year>2014</year>) <source>Showing remorse: Law and the social control of emotion</source>: <publisher-name>Ashgate Publishing, Ltd</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref008"><label>8</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Keltner</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Buswell</surname> <given-names>BN</given-names></name> (<year>1996</year>) <article-title>Evidence for the distinctness of embarrassment, shame, and guilt: A study of recalled antecedents and facial expressions of emotion</article-title>. <source>Cognition &amp; Emotion</source> <volume>10</volume>: <fpage>155</fpage>–<lpage>171</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref009"><label>9</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Keltner</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Gross</surname> <given-names>JJ</given-names></name> (<year>1999</year>) <article-title>Functional accounts of emotions</article-title>. <source>Cognition &amp; Emotion</source> <volume>13</volume>: <fpage>467</fpage>–<lpage>480</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref010"><label>10</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Muris</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Meesters</surname> <given-names>C</given-names></name> (<year>2014</year>) <article-title>Small or big in the eyes of the other: on the developmental psychopathology of self-conscious emotions as shame, guilt, and pride</article-title>. <source>Clinical child and family psychology review</source> <volume>17</volume>: <fpage>19</fpage>–<lpage>40</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s10567-013-0137-z" xlink:type="simple">10.1007/s10567-013-0137-z</ext-link></comment> <object-id pub-id-type="pmid">23712881</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref011"><label>11</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Vaish</surname> <given-names>A</given-names></name> (<year>2018</year>) <article-title>The prosocial functions of early social emotions: the case of guilt</article-title>. <source>Current opinion in psychology</source> <volume>20</volume>: <fpage>25</fpage>–<lpage>29</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1016/j.copsyc.2017.08.008" xlink:type="simple">10.1016/j.copsyc.2017.08.008</ext-link></comment> <object-id pub-id-type="pmid">28830002</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref012"><label>12</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Zemack-Rugar</surname> <given-names>Y</given-names></name>, <name name-style="western"><surname>Bettman</surname> <given-names>JR</given-names></name>, <name name-style="western"><surname>Fitzsimons</surname> <given-names>GJ</given-names></name> (<year>2007</year>) <article-title>The effects of nonconsciously priming emotion concepts on behavior</article-title>. <source>Journal of personality and social psychology</source> <volume>93</volume>: <fpage>927</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/0022-3514.93.6.927" xlink:type="simple">10.1037/0022-3514.93.6.927</ext-link></comment> <object-id pub-id-type="pmid">18072846</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref013"><label>13</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Amodio</surname> <given-names>DM</given-names></name>, <name name-style="western"><surname>Devine</surname> <given-names>PG</given-names></name>, <name name-style="western"><surname>Harmon-Jones</surname> <given-names>E</given-names></name> (<year>2007</year>) <article-title>A dynamic model of guilt implications for motivation and self-regulation in the context of prejudice</article-title>. <source>Psychological Science</source> <volume>18</volume>: <fpage>524</fpage>–<lpage>530</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1111/j.1467-9280.2007.01933.x" xlink:type="simple">10.1111/j.1467-9280.2007.01933.x</ext-link></comment> <object-id pub-id-type="pmid">17576266</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref014"><label>14</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ohbuchi</surname> <given-names>K-i</given-names></name>, <name name-style="western"><surname>Kameda</surname> <given-names>M</given-names></name>, <name name-style="western"><surname>Agarie</surname> <given-names>N</given-names></name> (<year>1989</year>) <article-title>Apology as aggression control: its role in mediating appraisal of and response to harm</article-title>. <source>Journal of personality and social psychology</source> <volume>56</volume>: <fpage>219</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037//0022-3514.56.2.219" xlink:type="simple">10.1037//0022-3514.56.2.219</ext-link></comment> <object-id pub-id-type="pmid">2926625</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref015"><label>15</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>O'Malley</surname> <given-names>MN</given-names></name>, <name name-style="western"><surname>Greenberg</surname> <given-names>J</given-names></name> (<year>1983</year>) <article-title>Sex differences in restoring justice: The down payment effect</article-title>. <source>Journal of Research in personality</source> <volume>17</volume>: <fpage>174</fpage>–<lpage>185</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref016"><label>16</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Nelissen</surname> <given-names>R</given-names></name>, <name name-style="western"><surname>Zeelenberg</surname> <given-names>M</given-names></name> (<year>2009</year>) <article-title>When guilt evokes self-punishment: evidence for the existence of a Dobby Effect</article-title>. <source>Emotion</source> <volume>9</volume>: <fpage>118</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/a0014540" xlink:type="simple">10.1037/a0014540</ext-link></comment> <object-id pub-id-type="pmid">19186924</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref017"><label>17</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Baumeister</surname> <given-names>RF</given-names></name>, <name name-style="western"><surname>Stillwell</surname> <given-names>AM</given-names></name>, <name name-style="western"><surname>Heatherton</surname> <given-names>TF</given-names></name> (<year>1994</year>) <article-title>Guilt: an interpersonal approach</article-title>. <source>Psychological bulletin</source> <volume>115</volume>: <fpage>243</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/0033-2909.115.2.243" xlink:type="simple">10.1037/0033-2909.115.2.243</ext-link></comment> <object-id pub-id-type="pmid">8165271</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref018"><label>18</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Crivelli</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Fridlund</surname> <given-names>AJ</given-names></name> (<year>2019</year>) <article-title>Inside-Out: From Basic Emotions Theory to the Behavioral Ecology View</article-title>. <source>Journal of Nonverbal Behavior</source>: <fpage>1</fpage>–<lpage>34</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s10919-018-0289-0" xlink:type="simple">10.1007/s10919-018-0289-0</ext-link></comment></mixed-citation></ref>
<ref id="pone.0231756.ref019"><label>19</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Cordaro</surname> <given-names>D</given-names></name> (<year>2011</year>) <article-title>What is meant by calling emotions basic</article-title>. <source>Emotion review</source> <volume>3</volume>: <fpage>364</fpage>–<lpage>370</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref020"><label>20</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Friesen</surname> <given-names>WV</given-names></name> (<year>1969</year>) <article-title>The repertoire of non-verbal behavior: Categories, origins, usage and coding</article-title>. <source>Semiotica</source> <volume>1</volume>: <fpage>49</fpage>–<lpage>98</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref021"><label>21</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Brown</surname> <given-names>DE</given-names></name> (<year>1991</year>) <source>Human universals</source>: <publisher-name>McGraw-Hill</publisher-name> <publisher-loc>New York</publisher-loc>.</mixed-citation></ref>
<ref id="pone.0231756.ref022"><label>22</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Keltner</surname> <given-names>D</given-names></name> (<year>1970</year>) <article-title>Universal facial expressions of emotion</article-title>. <source>California Mental Health Research Digest</source> <volume>8</volume>: <fpage>151</fpage>–<lpage>158</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref023"><label>23</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Bedford</surname> <given-names>OA</given-names></name>, <name name-style="western"><surname>Hwang</surname> <given-names>KK</given-names></name> (<year>2003</year>) <article-title>Guilt and Shame in Chinese Culture: A Cross‐cultural Framework from the Perspective of Morality and Identity</article-title>. <source>Journal for the Theory of Social Behaviour</source> <volume>33</volume>: <fpage>127</fpage>–<lpage>144</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref024"><label>24</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Matsumoto</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Yoo</surname> <given-names>SH</given-names></name>, <name name-style="western"><surname>Hirayama</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Petrova</surname> <given-names>G</given-names></name> (<year>2005</year>) <article-title>Development and validation of a measure of display rule knowledge: the display rule assessment inventory</article-title>. <source>Emotion</source> <volume>5</volume>: <fpage>23</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/1528-3542.5.1.23" xlink:type="simple">10.1037/1528-3542.5.1.23</ext-link></comment> <object-id pub-id-type="pmid">15755217</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref025"><label>25</label><mixed-citation publication-type="other" xlink:type="simple">Tangney JP (1999) The self-conscious emotions: Shame, guilt, embarrassment and pride.</mixed-citation></ref>
<ref id="pone.0231756.ref026"><label>26</label><mixed-citation publication-type="other" xlink:type="simple">Izard CE (1994) Innate and universal facial expressions: evidence from developmental and cross-cultural research.</mixed-citation></ref>
<ref id="pone.0231756.ref027"><label>27</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Fridlund</surname> <given-names>AJ</given-names></name> (<year>1994</year>) <source>Human facial expression—An evolutionary view</source>. <publisher-loc>London</publisher-loc>: <publisher-name>Academic Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref028"><label>28</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Fridlund</surname> <given-names>AJ</given-names></name> (<year>2017</year>) <chapter-title>The behavioral ecology view of facial displays: 25 years later</chapter-title>. In: <name name-style="western"><surname>Russell</surname> <given-names>J-MF-DJA</given-names></name>, editor. <source>The science of facial expression</source>: <publisher-name>Oxford University Press</publisher-name>. pp. <fpage>77</fpage>–<lpage>92</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref029"><label>29</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Friesen</surname> <given-names>WV</given-names></name> (<year>1971</year>) <article-title>Constants across cultures in the face and emotion</article-title>. <source>Journal of personality and social psychology</source> <volume>17</volume>: <fpage>124</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/h0030377" xlink:type="simple">10.1037/h0030377</ext-link></comment> <object-id pub-id-type="pmid">5542557</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref030"><label>30</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Waller</surname> <given-names>BM</given-names></name>, <name name-style="western"><surname>Whitehouse</surname> <given-names>J</given-names></name>, <name name-style="western"><surname>Micheletta</surname> <given-names>J</given-names></name> (<year>2016</year>) <article-title>Macaques can predict social outcomes from facial expressions</article-title>. <source>Animal cognition</source> <volume>19</volume>: <fpage>1031</fpage>–<lpage>1036</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s10071-016-0992-3" xlink:type="simple">10.1007/s10071-016-0992-3</ext-link></comment> <object-id pub-id-type="pmid">27155662</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref031"><label>31</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Krebs</surname> <given-names>JR</given-names></name>, <name name-style="western"><surname>Davies</surname> <given-names>N</given-names></name> (<year>1993</year>) <source>An introduction to behavioural ecology</source>: <publisher-name>Blackwell Scientific Publications</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref032"><label>32</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Krebs</surname> <given-names>JR</given-names></name>, <name name-style="western"><surname>Dawkins</surname> <given-names>R</given-names></name> (<year>1978</year>) <chapter-title>Animal signals: information or manipulation</chapter-title>. In: <name name-style="western"><surname>Krebs</surname> <given-names>JR</given-names></name>, <name name-style="western"><surname>Dawkins</surname> <given-names>R</given-names></name>, editors. <source>Behavioural ecology: An evolutionary approach</source>: <publisher-name>Blackwell Scientifc Publications</publisher-name>. pp. <fpage>282</fpage>–<lpage>309</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref033"><label>33</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Dezecache</surname> <given-names>G</given-names></name>, <name name-style="western"><surname>Mercier</surname> <given-names>H</given-names></name>, <name name-style="western"><surname>Scott-Phillips</surname> <given-names>TC</given-names></name> (<year>2013</year>) <article-title>An evolutionary approach to emotional communication</article-title>. <source>Journal of Pragmatics</source> <volume>59</volume>: <fpage>221</fpage>–<lpage>233</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref034"><label>34</label><mixed-citation publication-type="other" xlink:type="simple">Bradbury JW, Vehrencamp SL (1998) Principles of animal communication.</mixed-citation></ref>
<ref id="pone.0231756.ref035"><label>35</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Scherer</surname> <given-names>KR</given-names></name>, <name name-style="western"><surname>Mortillaro</surname> <given-names>M</given-names></name>, <name name-style="western"><surname>Mehu</surname> <given-names>M</given-names></name> (<year>2013</year>) <article-title>Understanding the mechanisms underlying the production of facial expression of emotion: A componential perspective</article-title>. <source>Emotion Review</source> <volume>5</volume>: <fpage>47</fpage>–<lpage>53</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref036"><label>36</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Barrett</surname> <given-names>LF</given-names></name> (<year>2017</year>) <article-title>The theory of constructed emotion: an active inference account of interoception and categorization</article-title>. <source>Social cognitive and affective neuroscience</source> <volume>12</volume>: <fpage>1</fpage>–<lpage>23</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1093/scan/nsw154" xlink:type="simple">10.1093/scan/nsw154</ext-link></comment> <object-id pub-id-type="pmid">27798257</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref037"><label>37</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Barrett</surname> <given-names>LF</given-names></name> (<year>2017</year>) <source>How emotions are made: The secret life of the brain</source>: <publisher-name>Houghton Mifflin Harcourt</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref038"><label>38</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Barrett</surname> <given-names>LF</given-names></name>, <name name-style="western"><surname>Mesquita</surname> <given-names>B</given-names></name>, <name name-style="western"><surname>Gendron</surname> <given-names>M</given-names></name> (<year>2011</year>) <article-title>Context in emotion perception</article-title>. <source>Current Directions in Psychological Science</source> <volume>20</volume>: <fpage>286</fpage>–<lpage>290</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref039"><label>39</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>. <source>Universals and cultural differences in facial expressions of emotion</source>; <year>1971</year>. <publisher-name>University of Nebraska Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref040"><label>40</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Teroni</surname> <given-names>F</given-names></name>, <name name-style="western"><surname>Deonna</surname> <given-names>JA</given-names></name> (<year>2008</year>) <article-title>Differentiating shame from guilt</article-title>. <source>Consciousness and cognition</source> <volume>17</volume>: <fpage>725</fpage>–<lpage>740</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1016/j.concog.2008.02.002" xlink:type="simple">10.1016/j.concog.2008.02.002</ext-link></comment> <object-id pub-id-type="pmid">18445530</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref041"><label>41</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Aviezer</surname> <given-names>H</given-names></name>, <name name-style="western"><surname>Hassin</surname> <given-names>R</given-names></name>, <name name-style="western"><surname>Bentin</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Trope</surname> <given-names>Y</given-names></name> (<year>2008</year>) <article-title>Putting facial expressions back in context</article-title>. <source>First impressions</source>: <fpage>255</fpage>–<lpage>286</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref042"><label>42</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Hess</surname> <given-names>U</given-names></name>, <name name-style="western"><surname>Blaison</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Kafetsios</surname> <given-names>K</given-names></name> (<year>2016</year>) <article-title>Judging facial emotion expressions in context: The influence of culture and self-construal orientation</article-title>. <source>Journal of Nonverbal Behavior</source> <volume>40</volume>: <fpage>55</fpage>–<lpage>64</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref043"><label>43</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Izard</surname> <given-names>CE</given-names></name> (<year>1977</year>) <source>Human emotions</source>. <publisher-loc>New-York</publisher-loc>: <publisher-name>Plenum Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref044"><label>44</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Keltner</surname> <given-names>D</given-names></name> (<year>1995</year>) <article-title>Signs of appeasement: Evidence for the distinct displays of embarrassment, amusement, and shame</article-title>. <source>Journal of personality and social psychology</source> <volume>68</volume>: <fpage>441</fpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref045"><label>45</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Lewis</surname> <given-names>M</given-names></name>, <name name-style="western"><surname>Alessandri</surname> <given-names>SM</given-names></name>, <name name-style="western"><surname>Sullivan</surname> <given-names>MW</given-names></name> (<year>1992</year>) <article-title>Differences in shame and pride as a function of children's gender and task difficulty</article-title>. <source>Child development</source> <volume>63</volume>: <fpage>630</fpage>–<lpage>638</lpage>. <object-id pub-id-type="pmid">1600827</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref046"><label>46</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Keltner</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Anderson</surname> <given-names>C</given-names></name> (<year>2000</year>) <article-title>Saving face for Darwin: The functions and uses of embarrassment</article-title>. <source>Current directions in psychological science</source> <volume>9</volume>: <fpage>187</fpage>–<lpage>192</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref047"><label>47</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Higgins</surname> <given-names>ET</given-names></name> (<year>1987</year>) <article-title>Self-discrepancy: a theory relating self and affect</article-title>. <source>Psychological review</source> <volume>94</volume>: <fpage>319</fpage>. <object-id pub-id-type="pmid">3615707</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref048"><label>48</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Eisenberg</surname> <given-names>N</given-names></name>, <name name-style="western"><surname>Fabes</surname> <given-names>RA</given-names></name>, <name name-style="western"><surname>Miller</surname> <given-names>PA</given-names></name>, <name name-style="western"><surname>Fultz</surname> <given-names>J</given-names></name>, <name name-style="western"><surname>Shell</surname> <given-names>R</given-names></name>, <etal>et al</etal>. (<year>1989</year>) <article-title>Relation of sympathy and personal distress to prosocial behavior: a multimethod study</article-title>. <source>Journal of personality and social psychology</source> <volume>57</volume>: <fpage>55</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037//0022-3514.57.1.55" xlink:type="simple">10.1037//0022-3514.57.1.55</ext-link></comment> <object-id pub-id-type="pmid">2754604</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref049"><label>49</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Emde</surname> <given-names>RN</given-names></name>, <name name-style="western"><surname>Johnson</surname> <given-names>WF</given-names></name>, <name name-style="western"><surname>Easterbrooks</surname> <given-names>MA</given-names></name> (<year>1987</year>) <article-title>The do's and don'ts of early moral development: Psychoanalytic tradition and current research</article-title>. <source>The emergence of morality in young children</source>: <fpage>245</fpage>–<lpage>276</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref050"><label>50</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Henrich</surname> <given-names>J</given-names></name>, <name name-style="western"><surname>Heine</surname> <given-names>SJ</given-names></name>, <name name-style="western"><surname>Norenzayan</surname> <given-names>A</given-names></name> (<year>2010</year>) <article-title>The weirdest people in the world?</article-title> <source>Behavioral and brain sciences</source> <volume>33</volume>: <fpage>61</fpage>–<lpage>83</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1017/S0140525X0999152X" xlink:type="simple">10.1017/S0140525X0999152X</ext-link></comment> <object-id pub-id-type="pmid">20550733</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref051"><label>51</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Galati</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Sini</surname> <given-names>B</given-names></name>, <name name-style="western"><surname>Schmidt</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Tinti</surname> <given-names>C</given-names></name> (<year>2003</year>) <article-title>Spontaneous facial expressions in congenitally blind and sighted children aged 8–11</article-title>. <source>Journal of Visual Impairment and Blindness</source> <volume>97</volume>: <fpage>418</fpage>–<lpage>428</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref052"><label>52</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Schmidt</surname> <given-names>KL</given-names></name>, <name name-style="western"><surname>Cohn</surname> <given-names>JF</given-names></name>, <name name-style="western"><surname>Tian</surname> <given-names>Y</given-names></name> (<year>2003</year>) <article-title>Signal characteristics of spontaneous facial expressions: Automatic movement in solitary and social smiles</article-title>. <source>Biological psychology</source> <volume>65</volume>: <fpage>49</fpage>–<lpage>66</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1016/s0301-0511(03)00098-x" xlink:type="simple">10.1016/s0301-0511(03)00098-x</ext-link></comment> <object-id pub-id-type="pmid">14638288</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref053"><label>53</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Mathôt</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Schreij</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Theeuwes</surname> <given-names>J</given-names></name> (<year>2012</year>) <article-title>OpenSesame: An open-source, graphical experiment builder for the social sciences</article-title>. <source>Behavior research methods</source> <volume>44</volume>: <fpage>314</fpage>–<lpage>324</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3758/s13428-011-0168-7" xlink:type="simple">10.3758/s13428-011-0168-7</ext-link></comment> <object-id pub-id-type="pmid">22083660</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref054"><label>54</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Gosling</surname> <given-names>SD</given-names></name>, <name name-style="western"><surname>Rentfrow</surname> <given-names>PJ</given-names></name>, <name name-style="western"><surname>Swann</surname> <given-names>WB</given-names></name> (<year>2003</year>) <article-title>A very brief measure of the Big-Five personality domains</article-title>. <source>Journal of Research in Personality</source> <volume>37</volume>: <fpage>504</fpage>–<lpage>528</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref055"><label>55</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Jonason</surname> <given-names>PK</given-names></name>, <name name-style="western"><surname>Webster</surname> <given-names>GD</given-names></name> (<year>2010</year>) <article-title>The Dirty Dozen: A Concise Measure of the Dark Triad</article-title>. <source>Psychological Assessment</source> <volume>22</volume>: <fpage>420</fpage>–<lpage>432</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/a0019265" xlink:type="simple">10.1037/a0019265</ext-link></comment> <object-id pub-id-type="pmid">20528068</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref056"><label>56</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Watson</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Clark</surname> <given-names>LA</given-names></name>, <name name-style="western"><surname>Tellegen</surname> <given-names>A</given-names></name> (<year>1988</year>) <article-title>Development and validation of brief measures of positive and negative affect: the PANAS scales</article-title>. <source>Journal of personality and social psychology</source> <volume>54</volume>: <fpage>1063</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037//0022-3514.54.6.1063" xlink:type="simple">10.1037//0022-3514.54.6.1063</ext-link></comment> <object-id pub-id-type="pmid">3397865</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref057"><label>57</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Rebega</surname> <given-names>OL</given-names></name>, <name name-style="western"><surname>Apostol</surname> <given-names>L</given-names></name>, <name name-style="western"><surname>Benga</surname> <given-names>O</given-names></name>, <name name-style="western"><surname>Miclea</surname> <given-names>M</given-names></name> (<year>2013</year>) <article-title>Inducing Guilt: A Literature Review</article-title>. <source>Procedia-Social and Behavioral Sciences</source> <volume>78</volume>: <fpage>536</fpage>–<lpage>540</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref058"><label>58</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>De Hooge</surname> <given-names>IE</given-names></name>, <name name-style="western"><surname>Nelissen</surname> <given-names>R</given-names></name>, <name name-style="western"><surname>Breugelmans</surname> <given-names>SM</given-names></name>, <name name-style="western"><surname>Zeelenberg</surname> <given-names>M</given-names></name> (<year>2011</year>) <article-title>What is moral about guilt? Acting “prosocially” at the disadvantage of others</article-title>. <source>Journal of personality and social psychology</source> <volume>100</volume>: <fpage>462</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/a0021459" xlink:type="simple">10.1037/a0021459</ext-link></comment> <object-id pub-id-type="pmid">21244173</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref059"><label>59</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Friesen</surname> <given-names>WV</given-names></name> (<year>1978</year>) <source>Facial action coding system</source>. <publisher-loc>Palo Alto</publisher-loc>: <publisher-name>Consulting Psychologists Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref060"><label>60</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Friesen</surname> <given-names>WV</given-names></name>, <name name-style="western"><surname>Hager</surname> <given-names>JC</given-names></name> (<year>2002</year>) <source>Facial action coding system—investigator’s guide</source>. <publisher-name>Research Nexus</publisher-name>, <publisher-loc>Salt Lake City</publisher-loc>.</mixed-citation></ref>
<ref id="pone.0231756.ref061"><label>61</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Troisi</surname> <given-names>A</given-names></name> (<year>2002</year>) <article-title>Displacement activities as a behavioral measure of stress in nonhuman primates and human subjects</article-title>. <source>Stress</source> <volume>5</volume>: <fpage>47</fpage>–<lpage>54</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1080/102538902900012378" xlink:type="simple">10.1080/102538902900012378</ext-link></comment> <object-id pub-id-type="pmid">12171766</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref062"><label>62</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Martin</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Bateson</surname> <given-names>P</given-names></name> (<year>1993</year>) <source>Measuring behaviour: an introductory guide</source>: <publisher-name>Cambridge University Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref063"><label>63</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Mangold</surname> <given-names>P</given-names></name> (<year>1998</year>) <source>Interact [computer software]</source>. <publisher-loc>Arnstorf, Germany</publisher-loc>: <publisher-name>Mangold International</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref064"><label>64</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Krippendorff</surname> <given-names>K</given-names></name> (<year>1970</year>) <article-title>Bivariate agreement coefficients for reliability of data</article-title>. <source>Sociological methodology</source> <volume>2</volume>: <fpage>139</fpage>–<lpage>150</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref065"><label>65</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Hayes</surname> <given-names>AF</given-names></name>, <name name-style="western"><surname>Krippendorff</surname> <given-names>K</given-names></name> (<year>2007</year>) <article-title>Answering the call for a standard reliability measure for coding data</article-title>. <source>Communication methods and measures</source> <volume>1</volume>: <fpage>77</fpage>–<lpage>89</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref066"><label>66</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>IBM</surname> <given-names>C</given-names></name> (<year>2016</year>) <source>SPSS for Windows, version 24</source>. <publisher-name>IBM Corp</publisher-name> <publisher-loc>Armonk (NY)</publisher-loc>.</mixed-citation></ref>
<ref id="pone.0231756.ref067"><label>67</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Krippendorff</surname> <given-names>K</given-names></name> (<year>2004</year>) <article-title>Reliability in content analysis: Some common misconceptions and recommendations</article-title>. <source>Human communication research</source> <volume>30</volume>: <fpage>411</fpage>–<lpage>433</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref068"><label>68</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Krippendorff</surname> <given-names>K</given-names></name> (<year>2018</year>) <source>Content analysis: An introduction to its methodology</source>: <publisher-name>Sage publications</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref069"><label>69</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Snijders</surname> <given-names>TA</given-names></name>, <name name-style="western"><surname>Borgatti</surname> <given-names>SP</given-names></name> (<year>1999</year>) <article-title>Non-parametric standard errors and tests for network statistics</article-title>. <source>Connections</source> <volume>22</volume>: <fpage>161</fpage>–<lpage>170</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref070"><label>70</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Cousins</surname> <given-names>SD</given-names></name> (<year>1989</year>) <article-title>Culture and self-perception in Japan and the United States</article-title>. <source>Journal of Personality and Social Psychology</source> <volume>56</volume>: <fpage>124</fpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref071"><label>71</label><mixed-citation publication-type="other" xlink:type="simple">De Leersnyder J, Mesquita B (2015) How salient cultural concerns shape emotions: A behavioral coding study on biculturals’ emotional frame switching.</mixed-citation></ref>
<ref id="pone.0231756.ref072"><label>72</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Jack</surname> <given-names>RE</given-names></name>, <name name-style="western"><surname>Caldara</surname> <given-names>R</given-names></name>, <name name-style="western"><surname>Schyns</surname> <given-names>PG</given-names></name> (<year>2012</year>) <article-title>Internal representations reveal cultural diversity in expectations of facial expressions of emotion</article-title>. <source>Journal of Experimental Psychology: General</source> <volume>141</volume>: <fpage>19</fpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref073"><label>73</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Crivelli</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Russell</surname> <given-names>JA</given-names></name>, <name name-style="western"><surname>Jarillo</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Fernández-Dols</surname> <given-names>J-M</given-names></name> (<year>2016</year>) <article-title>The fear gasping face as a threat display in a Melanesian society</article-title>. <source>Proceedings of the National Academy of Sciences</source> <volume>113</volume>: <fpage>12403</fpage>–<lpage>12407</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref074"><label>74</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Crivelli</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Russell</surname> <given-names>JA</given-names></name>, <name name-style="western"><surname>Jarillo</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Fernández-Dols</surname> <given-names>J-M</given-names></name> (<year>2017</year>) <article-title>Recognizing spontaneous facial expressions of emotion in a small-scale society of Papua New Guinea</article-title>. <source>Emotion</source> <volume>17</volume>: <fpage>337</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/emo0000236" xlink:type="simple">10.1037/emo0000236</ext-link></comment> <object-id pub-id-type="pmid">27736108</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref075"><label>75</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Benjamini</surname> <given-names>Y</given-names></name>, <name name-style="western"><surname>Yekutieli</surname> <given-names>D</given-names></name> (<year>2001</year>) <article-title>The control of the false discovery rate in multiple testing under dependency</article-title>. <source>The annals of statistics</source> <volume>29</volume>: <fpage>1165</fpage>–<lpage>1188</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref076"><label>76</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Tracy</surname> <given-names>JL</given-names></name>, <name name-style="western"><surname>Robins</surname> <given-names>RW</given-names></name> (<year>2008</year>) <article-title>The nonverbal expression of pride: evidence for cross-cultural recognition</article-title>. <source>Journal of personality and social psychology</source> <volume>94</volume>: <fpage>516</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/0022-3514.94.3.516" xlink:type="simple">10.1037/0022-3514.94.3.516</ext-link></comment> <object-id pub-id-type="pmid">18284295</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref077"><label>77</label><mixed-citation publication-type="other" xlink:type="simple">Qualtrics s (2012) Qualtrics. Available from <ext-link ext-link-type="uri" xlink:href="http://qualtrics.com" xlink:type="simple">http://qualtrics.com</ext-link>.</mixed-citation></ref>
<ref id="pone.0231756.ref078"><label>78</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Cohen</surname> <given-names>TR</given-names></name>, <name name-style="western"><surname>Wolf</surname> <given-names>ST</given-names></name>, <name name-style="western"><surname>Panter</surname> <given-names>AT</given-names></name>, <name name-style="western"><surname>Insko</surname> <given-names>CA</given-names></name> (<year>2011</year>) <article-title>Introducing the GASP scale: a new measure of guilt and shame proneness</article-title>. <source>Journal of personality and social psychology</source> <volume>100</volume>: <fpage>947</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1037/a0022641" xlink:type="simple">10.1037/a0022641</ext-link></comment> <object-id pub-id-type="pmid">21517196</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref079"><label>79</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Fernandez‐Dols</surname> <given-names>JM</given-names></name>, <name name-style="western"><surname>Sierra</surname> <given-names>B</given-names></name>, <name name-style="western"><surname>Ruiz‐Belda</surname> <given-names>MA</given-names></name> (<year>1993</year>) <article-title>On the clarity of expressive and contextual information in the recognition of emotions: A methodological critique</article-title>. <source>European Journal of Social Psychology</source> <volume>23</volume>: <fpage>195</fpage>–<lpage>202</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref080"><label>80</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Hess</surname> <given-names>U</given-names></name>, <name name-style="western"><surname>Banse</surname> <given-names>R</given-names></name>, <name name-style="western"><surname>Kappas</surname> <given-names>A</given-names></name> (<year>1995</year>) <article-title>The intensity of facial expression is determined by underlying affective state and social situation</article-title>. <source>Journal of personality and social psychology</source> <volume>69</volume>: <fpage>280</fpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref081"><label>81</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Cohn</surname> <given-names>JF</given-names></name>, <name name-style="western"><surname>Schmidt</surname> <given-names>K</given-names></name> (<year>2003</year>) <chapter-title>The timing of facial motion in posed and spontaneous smiles</chapter-title>. <source>Active Media Technology</source>: <publisher-name>World Scientific</publisher-name>. pp. <fpage>57</fpage>–<lpage>69</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref082"><label>82</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Schmidt</surname> <given-names>KL</given-names></name>, <name name-style="western"><surname>Bhattacharya</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Denlinger</surname> <given-names>R</given-names></name> (<year>2009</year>) <article-title>Comparison of deliberate and spontaneous facial movement in smiles and eyebrow raises</article-title>. <source>Journal of nonverbal behavior</source> <volume>33</volume>: <fpage>35</fpage>–<lpage>45</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1007/s10919-008-0058-6" xlink:type="simple">10.1007/s10919-008-0058-6</ext-link></comment> <object-id pub-id-type="pmid">20333273</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref083"><label>83</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Baayen</surname> <given-names>R</given-names></name> (<year>2008</year>) <source>A practical introduction to statistics using R. Analyzing Linguistic Data</source>. <publisher-name>Cambridge University Press</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref084"><label>84</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Team</surname> <given-names>RD</given-names></name> (<year>2016</year>) <source>R: A Language and Environment for Statistical Computing</source>. <publisher-name>R Found Stat Comput</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref085"><label>85</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Schielzeth</surname> <given-names>H</given-names></name> (<year>2010</year>) <article-title>Simple means to improve the interpretability of regression coefficients</article-title>. <source>Methods in Ecology and Evolution</source> <volume>1</volume>: <fpage>103</fpage>–<lpage>113</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref086"><label>86</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Schielzeth</surname> <given-names>H</given-names></name>, <name name-style="western"><surname>Forstmeier</surname> <given-names>W</given-names></name> (<year>2008</year>) <article-title>Conclusions beyond support: overconfident estimates in mixed models</article-title>. <source>Behavioral Ecology</source> <volume>20</volume>: <fpage>416</fpage>–<lpage>420</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1093/beheco/arn145" xlink:type="simple">10.1093/beheco/arn145</ext-link></comment> <object-id pub-id-type="pmid">19461866</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref087"><label>87</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Dobson</surname> <given-names>AJ</given-names></name>, <name name-style="western"><surname>Barnett</surname> <given-names>AG</given-names></name> (<year>2008</year>) <source>An introduction to generalized linear models</source>: <publisher-name>Chapman and Hall/CRC</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref088"><label>88</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Field</surname> <given-names>A</given-names></name>, <name name-style="western"><surname>Miles</surname> <given-names>J</given-names></name>, <name name-style="western"><surname>Field</surname> <given-names>Z</given-names></name> (<year>2012</year>) <source>Discovering statistics using R</source>: <publisher-name>Sage publications</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref089"><label>89</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Fox</surname> <given-names>J</given-names></name>, <name name-style="western"><surname>Weisberg</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Adler</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Bates</surname> <given-names>D</given-names></name>, <name name-style="western"><surname>Baud-Bovy</surname> <given-names>G</given-names></name>, <etal>et al</etal>. (<year>2012</year>) <source>Package ‘car’</source>. <publisher-loc>Vienna</publisher-loc>: <publisher-name>R Foundation for Statistical Computing</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref090"><label>90</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>de Jong</surname> <given-names>PJ</given-names></name>, <name name-style="western"><surname>Dijk</surname> <given-names>C</given-names></name> (<year>2013</year>) <article-title>Social effects of facial blushing: influence of context and actor versus observer perspective</article-title>. <source>Social and Personality Psychology Compass</source> <volume>7</volume>: <fpage>13</fpage>–<lpage>26</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref091"><label>91</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>De Jong</surname> <given-names>PJ</given-names></name>, <name name-style="western"><surname>Peters</surname> <given-names>ML</given-names></name>, <name name-style="western"><surname>De Cremer</surname> <given-names>D</given-names></name> (<year>2003</year>) <article-title>Blushing may signify guilt: Revealing effects of blushing in ambiguous social situations</article-title>. <source>Motivation and emotion</source> <volume>27</volume>: <fpage>225</fpage>–<lpage>249</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref092"><label>92</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Yu</surname> <given-names>H</given-names></name>, <name name-style="western"><surname>Duan</surname> <given-names>Y</given-names></name>, <name name-style="western"><surname>Zhou</surname> <given-names>X</given-names></name> (<year>2017</year>) <article-title>Guilt in the eyes: Eye movement and physiological evidence for guilt-induced social avoidance</article-title>. <source>Journal of Experimental Social Psychology</source> <volume>71</volume>: <fpage>128</fpage>–<lpage>137</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref093"><label>93</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Mohiyeddini</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Bauer</surname> <given-names>S</given-names></name>, <name name-style="western"><surname>Semple</surname> <given-names>S</given-names></name> (<year>2013</year>) <article-title>Displacement behaviour is associated with reduced stress levels among men but not women</article-title>. <source>PloS one</source> <volume>8</volume>: <fpage>e56355</fpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.1371/journal.pone.0056355" xlink:type="simple">10.1371/journal.pone.0056355</ext-link></comment> <object-id pub-id-type="pmid">23457555</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref094"><label>94</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Mohiyeddini</surname> <given-names>C</given-names></name>, <name name-style="western"><surname>Semple</surname> <given-names>S</given-names></name> (<year>2013</year>) <article-title>Displacement behaviour regulates the experience of stress in men</article-title>. <source>Stress</source> <volume>16</volume>: <fpage>163</fpage>–<lpage>171</lpage>. <comment>doi: <ext-link ext-link-type="uri" xlink:href="https://doi.org/10.3109/10253890.2012.707709" xlink:type="simple">10.3109/10253890.2012.707709</ext-link></comment> <object-id pub-id-type="pmid">23017012</object-id></mixed-citation></ref>
<ref id="pone.0231756.ref095"><label>95</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Chance</surname> <given-names>MR</given-names></name>. <source>An interpretation of some agonistic postures; the role of “cut-off” acts and postures</source>; <year>1962</year>. <publisher-name>Academic Press</publisher-name> <publisher-loc>London</publisher-loc>. pp. <fpage>71</fpage>–<lpage>89</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref096"><label>96</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Sgoifo</surname> <given-names>A</given-names></name>, <name name-style="western"><surname>Braglia</surname> <given-names>F</given-names></name>, <name name-style="western"><surname>Costoli</surname> <given-names>T</given-names></name>, <name name-style="western"><surname>Musso</surname> <given-names>E</given-names></name>, <name name-style="western"><surname>Meerlo</surname> <given-names>P</given-names></name>, <etal>et al</etal>. (<year>2003</year>) <article-title>Cardiac autonomic reactivity and salivary cortisol in men and women exposed to social stressors: relationship with individual ethological profile. Neuroscience &amp;</article-title> <source>Biobehavioral Reviews</source> <volume>27</volume>: <fpage>179</fpage>–<lpage>188</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref097"><label>97</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Ekman</surname> <given-names>P</given-names></name>, <name name-style="western"><surname>Friesen</surname> <given-names>WV</given-names></name> (<year>1972</year>) <article-title>Hand movements</article-title>. <source>Journal of communication</source> <volume>22</volume>: <fpage>353</fpage>–<lpage>374</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref098"><label>98</label><mixed-citation publication-type="other" xlink:type="simple">Julle-Danière E, Whitehouse J, Harris C, Chung M, Vrij A, et al. (in prep) Guilt outside of context.</mixed-citation></ref>
<ref id="pone.0231756.ref099"><label>99</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Fessler</surname> <given-names>D</given-names></name> (<year>2004</year>) <article-title>Shame in two cultures: Implications for evolutionary approaches</article-title>. <source>Journal of Cognition and Culture</source> <volume>4</volume>: <fpage>207</fpage>–<lpage>262</lpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref100"><label>100</label><mixed-citation publication-type="book" xlink:type="simple"><name name-style="western"><surname>Julle-Danière</surname> <given-names>E</given-names></name> (<year>2019</year>) <source>The expression, experience, and social consequences of guilt: A cross-cultural study</source>: <publisher-name>University of Portsmouth</publisher-name>.</mixed-citation></ref>
<ref id="pone.0231756.ref101"><label>101</label><mixed-citation publication-type="other" xlink:type="simple">Julle-Danière E, Whitehouse J, Vrij A, Gustafsson E, Waller BM (under review) Are there non-verbal signals of guilt? PLOS One.</mixed-citation></ref>
<ref id="pone.0231756.ref102"><label>102</label><mixed-citation publication-type="journal" xlink:type="simple"><name name-style="western"><surname>Jack</surname> <given-names>RE</given-names></name>, <name name-style="western"><surname>Sun</surname> <given-names>W</given-names></name>, <name name-style="western"><surname>Delis</surname> <given-names>I</given-names></name>, <name name-style="western"><surname>Garrod</surname> <given-names>OG</given-names></name>, <name name-style="western"><surname>Schyns</surname> <given-names>PG</given-names></name> (<year>2016</year>) <article-title>Four not six: Revealing culturally common facial expressions of emotion</article-title>. <source>Journal of Experimental Psychology: General</source> <volume>145</volume>: <fpage>708</fpage>.</mixed-citation></ref>
<ref id="pone.0231756.ref103"><label>103</label><mixed-citation publication-type="other" xlink:type="simple">Julle-Danière E, Whitehouse J, Vrij A, Gustafsson E, Waller BM (in prep) The social outcomes of experiencing and seeing guilt.</mixed-citation></ref>
</ref-list>
</back>
</article>