Publications
2020
Daly, Ian; Nicolaou, Nicoletta; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Miranda, Eduardo; Nasuto, Slawomir J.
Neural and physiological data from participants listening to affective music Journal Article
In: Scientific Data, 2020.
Abstract | BibTeX | Tags: Affective composition, Affective computing, BCI, BCMI, Data, EEG, Emotion, fMRI, Music
@article{Daly2020data,
title = {Neural and physiological data from participants listening to affective music},
author = {Ian Daly and Nicoletta Nicolaou and Duncan Williams and Faustina Hwang and Alexis Kirke and Eduardo Miranda and Slawomir J. Nasuto},
year = {2020},
date = {2020-05-07},
journal = {Scientific Data},
abstract = {Music provides a means of communicating affective meaning. However, the neurological mechanisms by which music induces affect are not fully understood. Our project sought to investigate this through a series of experiments into how humans react to affective musical stimuli and how physiological and neurological signals recorded from those participants change in accordance with self-reported changes in affect. In this paper, the datasets recorded over the course of this project are presented, including details of the musical stimuli, participant reports of their felt changes in affective states as they listened to the music, and concomitant recordings of physiological and neurological activity. We also include non-identifying meta data on our participant populations for purposes of further exploratory analysis. This data provides a large and valuable novel resource for researchers investigating emotion, music, and how they affect our neural and physiological activity.},
keywords = {Affective composition, Affective computing, BCI, BCMI, Data, EEG, Emotion, fMRI, Music},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Williams, Duncan
“Hello Computer, How Am I Feeling?”, Case Studies of Neural Technology to Measure Emotions Book Chapter
In: Springer, 2020, ISBN: 978-3-030-34783-3.
Links | BibTeX | Tags: Affective composition, Affective computing, BCI, Emotion, Music, Music generation
@inbook{Daly2020book,
title = {“Hello Computer, How Am I Feeling?”, Case Studies of Neural Technology to Measure Emotions},
author = {Ian Daly and Duncan Williams},
doi = {https://doi.org/10.1007/978-3-030-34784-0_11},
isbn = {978-3-030-34783-3},
year = {2020},
date = {2020-02-28},
publisher = {Springer},
keywords = {Affective composition, Affective computing, BCI, Emotion, Music, Music generation},
pubstate = {published},
tppubtype = {inbook}
}
2019
Daly, Ian; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Miranda, Eduardo; Nasuto, Slawomir J.
Electroencephalography reflects the activity of sub-cortical brain regions during approach-withdrawal behaviour while listening to music Journal Article
In: Scientific Reports, 2019.
Links | BibTeX | Tags: Affective composition, EEG, Emotion, fMRI, Music, Music generation
@article{Daly2019-fMRI,
title = {Electroencephalography reflects the activity of sub-cortical brain regions during approach-withdrawal behaviour while listening to music},
author = {Ian Daly and Duncan Williams and Faustina Hwang and Alexis Kirke and Eduardo Miranda and Slawomir J. Nasuto},
url = {https://www.nature.com/articles/s41598-019-45105-2},
doi = {https://doi.org/10.1038/s41598-019-45105-2},
year = {2019},
date = {2019-06-03},
journal = {Scientific Reports},
keywords = {Affective composition, EEG, Emotion, fMRI, Music, Music generation},
pubstate = {published},
tppubtype = {article}
}
2017
Daly, Ian
Affective Brain-Computer Interfacing and Methods for Affective State Detection Book Chapter
In: Nam, Chang S.; Nijholt, Anton; Lotte, Fabien (Ed.): BRAIN-COMPUTER INTERFACES HANDBOOK Technological and Theoretical Advances , Chapter 8, 2017.
BibTeX | Tags: BCI, BCMI, Classification, EEG, Emotion, Music
@inbook{Daly2016chap,
title = {Affective Brain-Computer Interfacing and Methods for Affective State Detection},
author = {Ian Daly},
editor = {Chang S. Nam and Anton Nijholt and Fabien Lotte},
year = {2017},
date = {2017-07-25},
booktitle = {BRAIN-COMPUTER INTERFACES HANDBOOK Technological and Theoretical Advances
},
chapter = {8},
keywords = {BCI, BCMI, Classification, EEG, Emotion, Music},
pubstate = {published},
tppubtype = {inbook}
}
Williams, Duncan; Kirke, Alexis; Miranda, Eduardo; Daly, Ian; Hwang, Faustina; Weaver, James; Nasuto, Slawomir J.
Affective Calibration of Musical Feature Sets in an Emotionally Intelligent Music Composition System Journal Article
In: ACM Transactions on Applied Perception (TAP), vol. 14, no. 3, 2017.
Links | BibTeX | Tags: Affective composition, Affective computing, Music, Music generation
@article{Williams2017,
title = { Affective Calibration of Musical Feature Sets in an Emotionally Intelligent Music Composition System},
author = {Duncan Williams and Alexis Kirke and Eduardo Miranda and Ian Daly and Faustina Hwang and James Weaver and Slawomir J. Nasuto},
url = {http://dl.acm.org/citation.cfm?id=3059005},
year = {2017},
date = {2017-05-22},
journal = {ACM Transactions on Applied Perception (TAP)},
volume = {14},
number = {3},
keywords = {Affective composition, Affective computing, Music, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Ho, Aileen; Marcon, Julien; Hwang, Faustina; Williams, Duncan; Kirke, Alexis; Miranda, Eduardo; Nasuto, Slawomir
Affective Brain Computer Music Interfacing: A Case Study Of Use By An IndividualWith Huntington’s Disease Conference
Graz BCI conference 2017, 2017.
BibTeX | Tags: affect, BCI EEG, BCMI, Huntington's disease, Music
@conference{Daly2017,
title = {Affective Brain Computer Music Interfacing: A Case Study Of Use By An IndividualWith Huntington’s Disease},
author = {Ian Daly and Aileen Ho and Julien Marcon and Faustina Hwang and Duncan Williams and Alexis Kirke and Eduardo Miranda and Slawomir Nasuto},
year = {2017},
date = {2017-05-05},
publisher = {Graz BCI conference 2017},
keywords = {affect, BCI EEG, BCMI, Huntington's disease, Music},
pubstate = {published},
tppubtype = {conference}
}
2015
Daly, Ian; Williams, Duncan; Hallowell, James; Hwang, Faustina; Kirke, Alexis; Malik, Asad; Weaver, James; Miranda, Eduardo; Nasuto, Slawomir J.
Music-induced emotions can be predicted from a combination of brain activity and acoustic features Journal Article
In: Brain and Cognition, vol. 101, pp. 1-11, 2015.
Abstract | Links | BibTeX | Tags: Acoustic features, EEG, Emotion, Music
@article{DalyPred2015,
title = {Music-induced emotions can be predicted from a combination of brain activity and acoustic features},
author = {Ian Daly and Duncan Williams and James Hallowell and Faustina Hwang and Alexis Kirke and Asad Malik and James Weaver and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/music-induced-emotions-can-be-predicted-from-a-combination-of-brain-activity-and-acoustic-features.pdf},
doi = {10.1016/j.bandc.2015.08.003},
year = {2015},
date = {2015-12-01},
journal = {Brain and Cognition},
volume = {101},
pages = {1-11},
abstract = {It is widely acknowledged that music can communicate and induce a wide range of emotions in the listener. However, music is a highly-complex audio signal composed of a wide range of complex time- and frequency-varying components. Additionally, music-induced emotions are known to differ greatly between listeners. Therefore, it is not immediately clear what emotions will be induced in a given individual by a piece of music. We attempt to predict the music-induced emotional response in a listener by measuring the activity in the listeners electroencephalogram (EEG). We combine these measures with acoustic descriptors of the music, an approach that allows us to consider music as a complex set of time-varying acoustic features, independently of any specific music theory. Regression models are found which allow us to predict the music-induced emotions of our participants with a correlation between the actual and predicted responses of up to r=0.234,p<0.001. This regression fit suggests that over 20% of the variance of the participant's music induced emotions can be predicted by their neural activity and the properties of the music. Given the large amount of noise, non-stationarity, and non-linearity in both EEG and music, this is an encouraging result. Additionally, the combination of measures of brain activity and acoustic features describing the music played to our participants allows us to predict music-induced emotions with significantly higher accuracies than either feature type alone (p<0.01).},
keywords = {Acoustic features, EEG, Emotion, Music},
pubstate = {published},
tppubtype = {article}
}
2014
Daly, Ian; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Malik, Asad; Roesch, Etienne; Weaver, James; Miranda, Eduardo; Nasuto, Slawomir
Investigating music tempo as a feedback mechanism for closed-loop BCI control Journal Article
In: Brain-Computer Interfaces, vol. 1, no. 3, pp. 158-169, 2014.
Abstract | Links | BibTeX | Tags: BCI, BCMI, ERD, Motor imagery, Music, Tempo
@article{Daly2014tempoBCI,
title = {Investigating music tempo as a feedback mechanism for closed-loop BCI control},
author = {Ian Daly and Duncan Williams and Faustina Hwang and Alexis Kirke and Asad Malik and Etienne Roesch and James Weaver and Eduardo Miranda and Slawomir Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/tempoBCI.pdf},
doi = {10.1080/2326263X.2014.979728},
year = {2014},
date = {2014-10-17},
journal = {Brain-Computer Interfaces},
volume = {1},
number = {3},
pages = {158-169},
abstract = {The feedback mechanism used in a brain-computer interface (BCI) forms an integral part of the closed-loop learning process required for successful operation of a BCI. However, ultimate success of the BCI may be dependent upon the modality of the feedback used. This study explores the use of music tempo as a feedback mechanism in BCI and compares it to the more commonly used visual feedback mechanism. Three different feedback modalities are compared for a kinaesthetic motor imagery BCI: visual, auditory via music tempo, and a combined visual and auditory feedback modality. Visual feedback is provided via the position, on the y-axis, of a moving ball. In the music feedback condition, the tempo of a piece of continuously generated music is dynamically adjusted via a novel music-generation method. All the feedback mechanisms allowed users to learn to control the BCI. However, users were not able to maintain as stable control with the music tempo feedback condition as they could in the visual feedback and combined conditions. Additionally, the combined condition exhibited significantly less inter-user variability, suggesting that multi-modal feedback may lead to more robust results. Finally, common spatial patterns are used to identify participant-specific spatial filters for each of the feedback modalities. The mean optimal spatial filter obtained for the music feedback condition is observed to be more diffuse and weaker than the mean spatial filters obtained for the visual and combined feedback conditions.},
keywords = {BCI, BCMI, ERD, Motor imagery, Music, Tempo},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Roesch, Etienne; Weaver, James; Nasuto, Slawomir J.
Machine learning to identify neural correlates of music and emotions Book Chapter
In: Eduardo Reck Miranda, Julien Castet (Ed.): pp. 89-103, Springer, 2014, ISBN: 978-1-4471-6583-5.
Abstract | Links | BibTeX | Tags: EEG, Emotion, Machine learning, Models of emotion, Music
@inbook{Daly2014mu,
title = {Machine learning to identify neural correlates of music and emotions},
author = {Ian Daly and Etienne Roesch and James Weaver and Slawomir J. Nasuto},
editor = {Eduardo Reck Miranda, Julien Castet},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/Machine-learning-to-identify-neural-correlates-of-music-and-emotions.pdf},
doi = {10.1007/978-1-4471-6584-2_5},
isbn = {978-1-4471-6583-5},
year = {2014},
date = {2014-10-04},
pages = {89-103},
publisher = {Springer},
abstract = {While music is widely understood to induce an emotional response in the listener, the exact nature of that response and its neural correlates are not yet fully explored. Furthermore, the large number of features which may be extracted from, and used to describe, neurological data, music stimuli, and emotional responses, means that the relationships between these datasets produced during music listening tasks or the operation of a brain–computer music interface (BCMI) are likely to be complex and multidimensional. As such, they may not be apparent from simple visual inspection of the data alone. Machine learning, which is a field of computer science that aims at extracting information from data, provides an attractive framework for uncovering stable relationships between datasets and has been suggested as a tool by which neural correlates of music and emotion may be revealed. In this chapter, we provide an introduction to the use of machine learning methods for identifying neural correlates of musical perception and emotion. We then provide examples of machine learning methods used to study the complex relationships between neurological activity, musical stimuli, and/or emotional responses.},
keywords = {EEG, Emotion, Machine learning, Models of emotion, Music},
pubstate = {published},
tppubtype = {inbook}
}
Williams, Duncan; Kirke, Alexis; Miranda, Eduardo; Roesch, Etienne; Daly, Ian; Nasuto, Slawomir
Investigating affect in algorithmic composition systems Journal Article
In: Psychology of Music, pp. 1-24, 2014.
Abstract | Links | BibTeX | Tags: Emotion, Music, Music generation
@article{Williams2014,
title = {Investigating affect in algorithmic composition systems},
author = {Duncan Williams and Alexis Kirke and Eduardo Miranda and Etienne Roesch and Ian Daly and Slawomir Nasuto},
doi = {10.1177/0305735614543282},
year = {2014},
date = {2014-09-15},
journal = {Psychology of Music},
pages = {1-24},
abstract = {There has been a significant amount of work implementing systems for algorithmic composition with the intention of targeting specific emotional responses in the listener, but a full review of this work is not currently available. This gap creates a shared obstacle to those entering the field. Our aim is thus to give an overview of progress in the area of these affectively driven systems for algorithmic composition. Performative and transformative systems are included and differentiated where appropriate, highlighting the challenges these systems now face if they are to be adapted to, or have already incorporated, some form of affective control. Possible real-time applications for such systems, utilizing affectively driven algorithmic composition and biophysical sensing to monitor and induce affective states in the listener are suggested.},
keywords = {Emotion, Music, Music generation},
pubstate = {published},
tppubtype = {article}
}
Daly, Ian; Williams, Duncan; Hwang, Faustina; Kirke, Alexis; Malik, Asad; Roesch, Etienne; Weaver, James; Miranda, Eduardo; Nasuto, Slawomir J.
Brain-computer music interfacing for continuous control of musical tempo Conference
Proceedings of the Graz Brain-computer interface conference 2014, 2014.
Abstract | Links | BibTeX | Tags: BCI, BCMI, Music, Tempo
@conference{Daly2014tempoconf,
title = {Brain-computer music interfacing for continuous control of musical tempo},
author = {Ian Daly and Duncan Williams and Faustina Hwang and Alexis Kirke and Asad Malik and Etienne Roesch and James Weaver and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://dx.doi.org/10.3217/978-3-85125-378-8-4},
doi = {10.3217/978-3-85125-378-8-4},
year = {2014},
date = {2014-09-01},
booktitle = {Proceedings of the Graz Brain-computer interface conference 2014},
abstract = {A Brain-computer music interface (BCMI) is developed to allow for continuous modification of the tempo of dynamically generated music. Six out of seven participants are able to control the BCMI at significant accuracies and their performance is observed to increase over time.},
keywords = {BCI, BCMI, Music, Tempo},
pubstate = {published},
tppubtype = {conference}
}
Daly, Ian; Hallowell, James; Hwang, Faustina; Kirke, Alexis; Malik, Asad; Roesch, Etienne; Weaver, James; Williams, Duncan; Miranda, Eduardo; Nasuto, Slawomir J.
Changes in music tempo entrain movement related brain activity Conference
Engineering in Medicine and Biology Society (EMBC), 2014 36th Annual International Conference of the IEEE, IEEE, 2014, ISSN: 1557-170X.
Links | BibTeX | Tags: EEG, Entrainment, ERD, Music, Tempo
@conference{Daly2014embc,
title = {Changes in music tempo entrain movement related brain activity},
author = {Ian Daly and James Hallowell and Faustina Hwang and Alexis Kirke and Asad Malik and Etienne Roesch and James Weaver and Duncan Williams and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/TempoEntrainmentOfERD.pdf},
doi = {10.1109/EMBC.2014.6944647},
issn = {1557-170X},
year = {2014},
date = {2014-08-30},
booktitle = {Engineering in Medicine and Biology Society (EMBC), 2014 36th Annual International Conference of the IEEE},
pages = {4595 - 4598},
publisher = {IEEE},
keywords = {EEG, Entrainment, ERD, Music, Tempo},
pubstate = {published},
tppubtype = {conference}
}
Daly, Ian; Malik, Asad; Hwang, Faustina; Roesch, Etienne; Weaver, James; Kirke, Alexis; Williams, Duncan; Miranda, Eduardo; Nasuto, Slawomir J.
Neural correlates of emotional responses to music: an EEG study Journal Article
In: Neuroscience letters, vol. 573, pp. 52–57, 2014.
Abstract | Links | BibTeX | Tags: Asymmetry, EEG, Emotion, Functional connectivity, Music
@article{Daly2014NC,
title = {Neural correlates of emotional responses to music: an EEG study},
author = {Ian Daly and Asad Malik and Faustina Hwang and Etienne Roesch and James Weaver and Alexis Kirke and Duncan Williams and Eduardo Miranda and Slawomir J. Nasuto},
url = {http://www.iandaly.co.uk/wp-content/uploads/2016/01/Neural-correlates-of-emotional-responses-to-music-an-EEG-study-pub.pdf},
doi = {10.1016/j.neulet.2014.05.003},
year = {2014},
date = {2014-06-24},
journal = {Neuroscience letters},
volume = {573},
pages = {52–57},
abstract = {This paper presents an EEG study into the neural correlates of music-induced emotions. We presented participants with a large dataset containing musical pieces in different styles, and asked them to report on their induced emotional responses.
We found neural correlates of music-induced emotion in a number of frequencies over the pre-frontal cortex. Additionally, we found a set of patterns of functional connectivity, defined by inter-channel coherence measures, to be significantly different between groups of music-induced emotional responses.},
keywords = {Asymmetry, EEG, Emotion, Functional connectivity, Music},
pubstate = {published},
tppubtype = {article}
}
We found neural correlates of music-induced emotion in a number of frequencies over the pre-frontal cortex. Additionally, we found a set of patterns of functional connectivity, defined by inter-channel coherence measures, to be significantly different between groups of music-induced emotional responses.