@ARTICLE{, author = {}, title = {Emotion Classification Using Brain and Peripheral Signals}, volume = {6}, number = {1}, abstract ={Abstract Emotions play a powerful and significant role in human beings everyday life. They motivate us, impact our beliefs and decision making and would affect some cognitive processes like creativity, attention, and memory. Nowadays the use of emotion in computers is an increasingly in vogue field. In many ways emotions are one of the last and least explored frontiers of intuitive human-computer interactions. This can perhaps be explained by the fact that computers are traditionally viewed as logical and rational tools which is incompatible with the often irrational and seeming illogical nature of emotions. It is apparent that we as humans, in spite of having extremely good abilities at felling and expressing emotions, still cannot agree on how they should best be defined. until now, there are a bunch of good reasons which supports that emotion is a fitting topic for Human-Computer Interaction research. Human beings who are emotional creatures should theoretically be able to interact more effectively with computers which can account for these emotions. So Emotions assessed would make some improvement in HCI. The goal of our research is to perform a multimodal fusion between EEG’s and peripheral physiological signals for emotion detection. The input signals were electroencephalogram, galvanic skin resistance, blood pressure and respiration, which can reflect the influence of emotion on the central nervous system and autonomic nervous system respectively. The acquisition protocol is based on a subset of pictures which correspond to three specific areas of valance-arousal emotional space(positively excited, negatively excited, and calm). The features extracted from input signals, and to improve the results of brain signals, nonlinear features as correlation dimension, largest lyapunov exponent and fractal dimension is used. The performance of four classifiers: LDA, QDA, KNN, SVM has been evaluated on different feature sets: peripheral signals, EEG’s, and both. Synchronization likelihood is used as a channel selection algorithm and the performance of two feature selection algorithms; Genetic Algorithm and Mutual information is evaluated. The best result of accuracy in EEG signals is 63.3% with QDA as classifier, the best result of peripheral signals is 61.67% and the best of both is 63.3% with QDA. In comparison among the results of different feature sets, EEG signals seem to perform better than other physiological signals, and the results presented showed that EEG’s can be used to assess emotional states of a user. Also, fusion provides more robust results since some participants had better scores with peripheral signals than with EEG’s and vice-versa. }, URL = {http://jsdp.rcisp.ac.ir/article-1-743-en.html}, eprint = {http://jsdp.rcisp.ac.ir/article-1-743-en.pdf}, journal = {Signal and Data Processing}, doi = {}, year = {2009} }