Publications
2021
Williams, Duncan; Daly, Ian; Al-Taie, Inas; Franco, Paola Di Giuseppantonio Di; Tymkiw, Micheal
Neuro-curation: A case study on the use of sonic enhancement of virtual museum exhibits Conference
Audio Mostly 2021, 2021.
BibTeX | Tags: Affective computing, Emotion, Music generation
@conference{Williams2021,
title = {Neuro-curation: A case study on the use of sonic enhancement of virtual museum exhibits},
author = {Duncan Williams and Ian Daly and Inas Al-Taie and Paola Di Giuseppantonio Di Franco and Micheal Tymkiw},
year = {2021},
date = {2021-08-09},
publisher = {Audio Mostly 2021},
keywords = {Affective computing, Emotion, Music generation},
pubstate = {published},
tppubtype = {conference}
}
2020
Daly, Ian; Nicolaou, Nicoletta; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Miranda, Eduardo; Nasuto, Slawomir J.
Neural and physiological data from participants listening to affective music Journal Article
In: Scientific Data, 2020.
Abstract | BibTeX | Tags: Affective composition, Affective computing, BCI, BCMI, Data, EEG, Emotion, fMRI, Music
@article{Daly2020data,
title = {Neural and physiological data from participants listening to affective music},
author = {Ian Daly and Nicoletta Nicolaou and Duncan Williams and Faustina Hwang and Alexis Kirke and Eduardo Miranda and Slawomir J. Nasuto},
year = {2020},
date = {2020-05-07},
journal = {Scientific Data},
abstract = {Music provides a means of communicating affective meaning. However, the neurological mechanisms by which music induces affect are not fully understood. Our project sought to investigate this through a series of experiments into how humans react to affective musical stimuli and how physiological and neurological signals recorded from those participants change in accordance with self-reported changes in affect. In this paper, the datasets recorded over the course of this project are presented, including details of the musical stimuli, participant reports of their felt changes in affective states as they listened to the music, and concomitant recordings of physiological and neurological activity. We also include non-identifying meta data on our participant populations for purposes of further exploratory analysis. This data provides a large and valuable novel resource for researchers investigating emotion, music, and how they affect our neural and physiological activity.},
keywords = {Affective composition, Affective computing, BCI, BCMI, Data, EEG, Emotion, fMRI, Music},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Williams, Duncan
“Hello Computer, How Am I Feeling?”, Case Studies of Neural Technology to Measure Emotions Book Chapter
In: Springer, 2020, ISBN: 978-3-030-34783-3.
Links | BibTeX | Tags: Affective composition, Affective computing, BCI, Emotion, Music, Music generation
@inbook{Daly2020book,
title = {“Hello Computer, How Am I Feeling?”, Case Studies of Neural Technology to Measure Emotions},
author = {Ian Daly and Duncan Williams},
doi = {https://doi.org/10.1007/978-3-030-34784-0_11},
isbn = {978-3-030-34783-3},
year = {2020},
date = {2020-02-28},
publisher = {Springer},
keywords = {Affective composition, Affective computing, BCI, Emotion, Music, Music generation},
pubstate = {published},
tppubtype = {inbook}
}
2019
Daly, Ian; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Miranda, Eduardo; Nasuto, Slawomir J.
Electroencephalography reflects the activity of sub-cortical brain regions during approach-withdrawal behaviour while listening to music Journal Article
In: Scientific Reports, 2019.
Links | BibTeX | Tags: Affective composition, EEG, Emotion, fMRI, Music, Music generation
@article{Daly2019-fMRI,
title = {Electroencephalography reflects the activity of sub-cortical brain regions during approach-withdrawal behaviour while listening to music},
author = {Ian Daly and Duncan Williams and Faustina Hwang and Alexis Kirke and Eduardo Miranda and Slawomir J. Nasuto},
url = {https://www.nature.com/articles/s41598-019-45105-2},
doi = {https://doi.org/10.1038/s41598-019-45105-2},
year = {2019},
date = {2019-06-03},
journal = {Scientific Reports},
keywords = {Affective composition, EEG, Emotion, fMRI, Music, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Bourgaize, Jake; Vernitski, Alexei
Mathematical mindsets increase student motivation: Evidence from the EEG Journal Article
In: Trends in Neuroscience and Education, 2019.
BibTeX | Tags: Affective computing, Education, EEG, Emotion, Mathematical mindsets
@article{Daly2019,
title = {Mathematical mindsets increase student motivation: Evidence from the EEG},
author = {Ian Daly and Jake Bourgaize and Alexei Vernitski},
year = {2019},
date = {2019-04-11},
journal = {Trends in Neuroscience and Education},
keywords = {Affective computing, Education, EEG, Emotion, Mathematical mindsets},
pubstate = {published},
tppubtype = {article}
}
2018
Williams, Duncan; Daly, Ian
BCI for ensemble music making and performance: why and how (not how and why) Conference
Together in Music: Expression, Performance and Communication in Ensembles, National Centre for Early Music, York, 2018.
BibTeX | Tags: BCMI, EEG, Emotion, Music generation
@conference{Williams2018,
title = {BCI for ensemble music making and performance: why and how (not how and why)},
author = {Duncan Williams and Ian Daly},
year = {2018},
date = {2018-01-04},
booktitle = {Together in Music: Expression, Performance and Communication in Ensembles},
address = {National Centre for Early Music, York},
keywords = {BCMI, EEG, Emotion, Music generation},
pubstate = {published},
tppubtype = {conference}
}
2017
Daly, Ian
Affective Brain-Computer Interfacing and Methods for Affective State Detection Book Chapter
In: Nam, Chang S.; Nijholt, Anton; Lotte, Fabien (Ed.): BRAIN-COMPUTER INTERFACES HANDBOOK Technological and Theoretical Advances , Chapter 8, 2017.
BibTeX | Tags: BCI, BCMI, Classification, EEG, Emotion, Music
@inbook{Daly2016chap,
title = {Affective Brain-Computer Interfacing and Methods for Affective State Detection},
author = {Ian Daly},
editor = {Chang S. Nam and Anton Nijholt and Fabien Lotte},
year = {2017},
date = {2017-07-25},
booktitle = {BRAIN-COMPUTER INTERFACES HANDBOOK Technological and Theoretical Advances
},
chapter = {8},
keywords = {BCI, BCMI, Classification, EEG, Emotion, Music},
pubstate = {published},
tppubtype = {inbook}
}
2016
Williams, Duncan; Mears, Jamie; Kirke, Alexis; Miranda, Eduardo; Daly, Ian; Malik, Asad; Weaver, James; Hwang, Faustina; Nasuto, Slawomir
A Perceptual and Affective Evaluation of an Affectively -Driven Engine for Video Game Soundtracking Journal Article
In: ACM Computers in Entertainment, 2016.
BibTeX | Tags: Affective composition, Affective computing, Emotion, Music generation
@article{Williams2016,
title = {A Perceptual and Affective Evaluation of an Affectively -Driven Engine for Video Game Soundtracking},
author = {Duncan Williams and Jamie Mears and Alexis Kirke and Eduardo Miranda and Ian Daly and Asad Malik and James Weaver and Faustina Hwang and Slawomir Nasuto},
year = {2016},
date = {2016-06-29},
journal = {ACM Computers in Entertainment},
keywords = {Affective composition, Affective computing, Emotion, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Williams, Duncan; Kirke, Alexis; Weaver, James; Malik, Asad; Hwang, Faustina; Miranda, Eduardo; Nasuto, Slawomir J.
Affective Brain-Computer Music Interfacing Journal Article
In: Journal of Neural Engineering, vol. (accepted), 2016.
BibTeX | Tags: aBCMI, Affective composition, BCI, BCMI, Case based reasoning, EEG, Emotion, Hybrid BCI, Music generation
@article{Daly2016aBCMI,
title = {Affective Brain-Computer Music Interfacing},
author = {Ian Daly and Duncan Williams and Alexis Kirke and James Weaver and Asad Malik and Faustina Hwang and Eduardo Miranda and Slawomir J. Nasuto},
year = {2016},
date = {2016-06-21},
journal = {Journal of Neural Engineering},
volume = {(accepted)},
keywords = {aBCMI, Affective composition, BCI, BCMI, Case based reasoning, EEG, Emotion, Hybrid BCI, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Chen, Long; Zhou, Sijie; Jin, Jing
An Investigation Into The Use Of Six Facially Encoded Emotions In Brain-Computer Interfacing Journal Article
In: Brain Computer Interfaces, 2016.
BibTeX | Tags: BCI, Emotion, Event-related potential, Facially encoded emotion, Oddball paradigm
@article{Daly2016faces,
title = {An Investigation Into The Use Of Six Facially Encoded Emotions In Brain-Computer Interfacing},
author = {Ian Daly and Long Chen and Sijie Zhou and Jing Jin},
year = {2016},
date = {2016-02-01},
journal = {Brain Computer Interfaces},
keywords = {BCI, Emotion, Event-related potential, Facially encoded emotion, Oddball paradigm},
pubstate = {published},
tppubtype = {article}
}
2015
Daly, Ian; Williams, Duncan; Hallowell, James; Hwang, Faustina; Kirke, Alexis; Malik, Asad; Weaver, James; Miranda, Eduardo; Nasuto, Slawomir J.
Music-induced emotions can be predicted from a combination of brain activity and acoustic features Journal Article
In: Brain and Cognition, vol. 101, pp. 1-11, 2015.
Abstract | Links | BibTeX | Tags: Acoustic features, EEG, Emotion, Music
@article{DalyPred2015,
title = {Music-induced emotions can be predicted from a combination of brain activity and acoustic features},
author = {Ian Daly and Duncan Williams and James Hallowell and Faustina Hwang and Alexis Kirke and Asad Malik and James Weaver and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/music-induced-emotions-can-be-predicted-from-a-combination-of-brain-activity-and-acoustic-features.pdf},
doi = {10.1016/j.bandc.2015.08.003},
year = {2015},
date = {2015-12-01},
journal = {Brain and Cognition},
volume = {101},
pages = {1-11},
abstract = {It is widely acknowledged that music can communicate and induce a wide range of emotions in the listener. However, music is a highly-complex audio signal composed of a wide range of complex time- and frequency-varying components. Additionally, music-induced emotions are known to differ greatly between listeners. Therefore, it is not immediately clear what emotions will be induced in a given individual by a piece of music. We attempt to predict the music-induced emotional response in a listener by measuring the activity in the listeners electroencephalogram (EEG). We combine these measures with acoustic descriptors of the music, an approach that allows us to consider music as a complex set of time-varying acoustic features, independently of any specific music theory. Regression models are found which allow us to predict the music-induced emotions of our participants with a correlation between the actual and predicted responses of up to r=0.234,p<0.001. This regression fit suggests that over 20% of the variance of the participant's music induced emotions can be predicted by their neural activity and the properties of the music. Given the large amount of noise, non-stationarity, and non-linearity in both EEG and music, this is an encouraging result. Additionally, the combination of measures of brain activity and acoustic features describing the music played to our participants allows us to predict music-induced emotions with significantly higher accuracies than either feature type alone (p<0.01).},
keywords = {Acoustic features, EEG, Emotion, Music},
pubstate = {published},
tppubtype = {article}
}
Williams, Duncan; Kirke, Alexis; Miranda, Eduardo; Daly, Ian; Hallowell, James; Weaver, James; Malik, Asad; Roesch, Etienne; Hwang, Faustina; Nasuto, Slawomir
Investigating Perceived Emotional Correlates of Rhythmic Density in Algorithmic Music Composition Journal Article
In: ACM Transactions on Applied Perception (TAP), vol. 12, no. 3, pp. 1-21, 2015.
Abstract | Links | BibTeX | Tags: Affective composition, Emotion, Music generation
@article{WilliamsRD2015,
title = {Investigating Perceived Emotional Correlates of Rhythmic Density in Algorithmic Music Composition},
author = {Duncan Williams and Alexis Kirke and Eduardo Miranda and Ian Daly and James Hallowell and James Weaver and Asad Malik and Etienne Roesch and Faustina Hwang and Slawomir Nasuto},
doi = {10.1145/2749466},
year = {2015},
date = {2015-07-01},
journal = {ACM Transactions on Applied Perception (TAP)},
volume = {12},
number = {3},
pages = {1-21},
abstract = {Affective algorithmic composition is a growing field that combines perceptually motivated affective computing strategies with novel music generation. This article presents work toward the development of one application. The long-term goal is to develop a responsive and adaptive system for inducing affect that is both controlled and validated by biophysical measures. Literature documenting perceptual responses to music identifies a variety of musical features and possible affective correlations, but perceptual evaluations of these musical features for the purposes of inclusion in a music generation system are not readily available. A discrete feature, rhythmic density (a function of note duration in each musical bar, regardless of tempo), was selected because it was shown to be well-correlated with affective responses in existing literature. A prototype system was then designed to produce controlled degrees of variation in rhythmic density via a transformative algorithm. A two-stage perceptual evaluation of a stimulus set created by this prototype was then undertaken. First, listener responses from a pairwise scaling experiment were analyzed via Multidimensional Scaling Analysis (MDS). The statistical best-fit solution was rotated such that stimuli with the largest range of variation were placed across the horizontal plane in two dimensions. In this orientation, stimuli with deliberate variation in rhythmic density appeared farther from the source material used to generate them than from stimuli generated by random permutation. Second, the same stimulus set was then evaluated according to the order suggested in the rotated two-dimensional solution in a verbal elicitation experiment. A Verbal Protocol Analysis (VPA) found that listener perception of the stimulus set varied in at least two commonly understood emotional descriptors, which might be considered affective correlates of rhythmic density. Thus, these results further corroborate previous studies wherein musical parameters are monitored for changes in emotional expression and that some similarly parameterized control of perceived emotional content in an affective algorithmic composition system can be achieved and provide a methodology for evaluating and including further possible musical features in such a system. Some suggestions regarding the test procedure and analysis techniques are also documented here.},
keywords = {Affective composition, Emotion, Music generation},
pubstate = {published},
tppubtype = {article}
}
2014
Daly, Ian; Roesch, Etienne; Weaver, James; Nasuto, Slawomir J.
Machine learning to identify neural correlates of music and emotions Book Chapter
In: Eduardo Reck Miranda, Julien Castet (Ed.): pp. 89-103, Springer, 2014, ISBN: 978-1-4471-6583-5.
Abstract | Links | BibTeX | Tags: EEG, Emotion, Machine learning, Models of emotion, Music
@inbook{Daly2014mu,
title = {Machine learning to identify neural correlates of music and emotions},
author = {Ian Daly and Etienne Roesch and James Weaver and Slawomir J. Nasuto},
editor = {Eduardo Reck Miranda, Julien Castet},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/Machine-learning-to-identify-neural-correlates-of-music-and-emotions.pdf},
doi = {10.1007/978-1-4471-6584-2_5},
isbn = {978-1-4471-6583-5},
year = {2014},
date = {2014-10-04},
pages = {89-103},
publisher = {Springer},
abstract = {While music is widely understood to induce an emotional response in the listener, the exact nature of that response and its neural correlates are not yet fully explored. Furthermore, the large number of features which may be extracted from, and used to describe, neurological data, music stimuli, and emotional responses, means that the relationships between these datasets produced during music listening tasks or the operation of a brain–computer music interface (BCMI) are likely to be complex and multidimensional. As such, they may not be apparent from simple visual inspection of the data alone. Machine learning, which is a field of computer science that aims at extracting information from data, provides an attractive framework for uncovering stable relationships between datasets and has been suggested as a tool by which neural correlates of music and emotion may be revealed. In this chapter, we provide an introduction to the use of machine learning methods for identifying neural correlates of musical perception and emotion. We then provide examples of machine learning methods used to study the complex relationships between neurological activity, musical stimuli, and/or emotional responses.},
keywords = {EEG, Emotion, Machine learning, Models of emotion, Music},
pubstate = {published},
tppubtype = {inbook}
}
Williams, Duncan; Kirke, Alexis; Miranda, Eduardo; Roesch, Etienne; Daly, Ian; Nasuto, Slawomir
Investigating affect in algorithmic composition systems Journal Article
In: Psychology of Music, pp. 1-24, 2014.
Abstract | Links | BibTeX | Tags: Emotion, Music, Music generation
@article{Williams2014,
title = {Investigating affect in algorithmic composition systems},
author = {Duncan Williams and Alexis Kirke and Eduardo Miranda and Etienne Roesch and Ian Daly and Slawomir Nasuto},
doi = {10.1177/0305735614543282},
year = {2014},
date = {2014-09-15},
journal = {Psychology of Music},
pages = {1-24},
abstract = {There has been a significant amount of work implementing systems for algorithmic composition with the intention of targeting specific emotional responses in the listener, but a full review of this work is not currently available. This gap creates a shared obstacle to those entering the field. Our aim is thus to give an overview of progress in the area of these affectively driven systems for algorithmic composition. Performative and transformative systems are included and differentiated where appropriate, highlighting the challenges these systems now face if they are to be adapted to, or have already incorporated, some form of affective control. Possible real-time applications for such systems, utilizing affectively driven algorithmic composition and biophysical sensing to monitor and induce affective states in the listener are suggested.},
keywords = {Emotion, Music, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Malik, Asad; Hwang, Faustina; Roesch, Etienne; Weaver, James; Kirke, Alexis; Williams, Duncan; Miranda, Eduardo; Nasuto, Slawomir J.
Neural correlates of emotional responses to music: an EEG study Journal Article
In: Neuroscience letters, vol. 573, pp. 52–57, 2014.
Abstract | Links | BibTeX | Tags: Asymmetry, EEG, Emotion, Functional connectivity, Music
@article{Daly2014NC,
title = {Neural correlates of emotional responses to music: an EEG study},
author = {Ian Daly and Asad Malik and Faustina Hwang and Etienne Roesch and James Weaver and Alexis Kirke and Duncan Williams and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/Neural-correlates-of-emotional-responses-to-music-an-EEG-study-pub.pdf},
doi = {10.1016/j.neulet.2014.05.003},
year = {2014},
date = {2014-06-24},
journal = {Neuroscience letters},
volume = {573},
pages = {52–57},
abstract = {This paper presents an EEG study into the neural correlates of music-induced emotions. We presented participants with a large dataset containing musical pieces in different styles, and asked them to report on their induced emotional responses.
We found neural correlates of music-induced emotion in a number of frequencies over the pre-frontal cortex. Additionally, we found a set of patterns of functional connectivity, defined by inter-channel coherence measures, to be significantly different between groups of music-induced emotional responses.},
keywords = {Asymmetry, EEG, Emotion, Functional connectivity, Music},
pubstate = {published},
tppubtype = {article}
}
We found neural correlates of music-induced emotion in a number of frequencies over the pre-frontal cortex. Additionally, we found a set of patterns of functional connectivity, defined by inter-channel coherence measures, to be significantly different between groups of music-induced emotional responses.