journal articles
![]() | Andreas Bulling; Jamie A. Ward; Hans Gellersen; Gerhard Tröster Eye Movement Analysis for Activity Recognition Using Electrooculography Journal Article IEEE Transactions on Pattern Analysis and Machine Intelligence, 33 (4), pp. 741-753, 2011. @article{bulling11_pami, title = {Eye Movement Analysis for Activity Recognition Using Electrooculography}, author = {Andreas Bulling and Jamie A. Ward and Hans Gellersen and Gerhard Tröster}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/bulling11_pami.pdf http://doi.ieeecomputersociety.org/10.1109/TPAMI.2010.86}, year = {2011}, date = {2011-01-01}, journal = {IEEE Transactions on Pattern Analysis and Machine Intelligence}, volume = {33}, number = {4}, pages = {741-753}, abstract = {In this work we investigate eye movement analysis as a new sensing modality for activity recognition. Eye movement data was recorded using an electrooculography (EOG) system. We first describe and evaluate algorithms for detecting three eye movement characteristics from EOG signals - saccades, fixations, and blinks - and propose a method for assessing repetitive patterns of eye movements. We then devise 90 different features based on these characteristics and select a subset of them using minimum redundancy maximum relevance feature selection (mRMR). We validate the method using an eight participant study in an office environment using an example set of five activity classes: copying a text, reading a printed paper, taking hand-written notes, watching a video, and browsing the web. We also include periods with no specific activity (the NULL class). Using a support vector machine (SVM) classifier and a person-independent (leave-one-out) training scheme, we obtain an average precision of 76.1% and recall of 70.5% over all classes and participants. The work demonstrates the promise of eye-based activity recognition (EAR) and opens up discussion on the wider applicability of EAR to other activities that are difficult, or even impossible, to detect using common sensing modalities.}, keywords = {}, pubstate = {published}, tppubtype = {article} } In this work we investigate eye movement analysis as a new sensing modality for activity recognition. Eye movement data was recorded using an electrooculography (EOG) system. We first describe and evaluate algorithms for detecting three eye movement characteristics from EOG signals - saccades, fixations, and blinks - and propose a method for assessing repetitive patterns of eye movements. We then devise 90 different features based on these characteristics and select a subset of them using minimum redundancy maximum relevance feature selection (mRMR). We validate the method using an eight participant study in an office environment using an example set of five activity classes: copying a text, reading a printed paper, taking hand-written notes, watching a video, and browsing the web. We also include periods with no specific activity (the NULL class). Using a support vector machine (SVM) classifier and a person-independent (leave-one-out) training scheme, we obtain an average precision of 76.1% and recall of 70.5% over all classes and participants. The work demonstrates the promise of eye-based activity recognition (EAR) and opens up discussion on the wider applicability of EAR to other activities that are difficult, or even impossible, to detect using common sensing modalities. |
![]() | Andreas Bulling; Daniel Roggen; Gerhard Tröster What's in the Eyes for Context-Awareness? Journal Article IEEE Pervasive Computing, 10 (2), pp. 48 - 57, 2011. @article{bulling11_pcm, title = {What's in the Eyes for Context-Awareness?}, author = {Andreas Bulling and Daniel Roggen and Gerhard Tröster}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/bulling11_pcm.pdf}, doi = {10.1109/MPRV.2010.49}, year = {2011}, date = {2011-01-01}, journal = {IEEE Pervasive Computing}, volume = {10}, number = {2}, pages = {48 - 57}, abstract = {Eye movements are a rich source of information about a person's context. Analyzing the link between eye movements and cognition might even allow us to develop cognition-aware pervasive computing systems that assess a person's cognitive context.}, keywords = {}, pubstate = {published}, tppubtype = {article} } Eye movements are a rich source of information about a person's context. Analyzing the link between eye movements and cognition might even allow us to develop cognition-aware pervasive computing systems that assess a person's cognitive context. |
conference papers
![]() | Andreas Bulling; Daniel Roggen Recognition of Visual Memory Recall Processes Using Eye Movement Analysis Inproceedings Proc. of the 13th International Conference on Ubiquitous Computing (UbiComp 2011), pp. 455-464, 2011. @inproceedings{bulling11_ubicomp, title = {Recognition of Visual Memory Recall Processes Using Eye Movement Analysis}, author = {Andreas Bulling and Daniel Roggen}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/bulling11_ubicomp.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 13th International Conference on Ubiquitous Computing (UbiComp 2011)}, pages = {455-464}, abstract = {Physical activity, location, as well as a person's psychophysiological and affective state are common dimensions for developing context-aware systems in ubiquitous computing. An important yet missing contextual dimension is the cognitive context that comprises all aspects related to mental information processing, such as perception, memory, knowledge, or learning. In this work we investigate the feasibility of recognising visual memory recall. We use a recognition methodology that combines minimum redundancy maximum relevance feature selection (mRMR) with a support vector machine (SVM) classifier. We validate the methodology in a dual user study with a total of fourteen participants looking at familiar and unfamiliar pictures from four picture categories: abstract, landscapes, faces, and buildings. Using person-independent training, we are able to discriminate between familiar and unfamiliar abstract pictures with a top recognition rate of 84.3% (89.3% recall, 21.0% false positive rate) over all participants. We show that eye movement analysis is a promising approach to infer the cognitive context of a person and discuss the key challenges for the real-world implementation of eye-based cognition-aware systems.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } Physical activity, location, as well as a person's psychophysiological and affective state are common dimensions for developing context-aware systems in ubiquitous computing. An important yet missing contextual dimension is the cognitive context that comprises all aspects related to mental information processing, such as perception, memory, knowledge, or learning. In this work we investigate the feasibility of recognising visual memory recall. We use a recognition methodology that combines minimum redundancy maximum relevance feature selection (mRMR) with a support vector machine (SVM) classifier. We validate the methodology in a dual user study with a total of fourteen participants looking at familiar and unfamiliar pictures from four picture categories: abstract, landscapes, faces, and buildings. Using person-independent training, we are able to discriminate between familiar and unfamiliar abstract pictures with a top recognition rate of 84.3% (89.3% recall, 21.0% false positive rate) over all participants. We show that eye movement analysis is a promising approach to infer the cognitive context of a person and discuss the key challenges for the real-world implementation of eye-based cognition-aware systems. |
Bernd Tessendorf; Andreas Bulling; Daniel Roggen; Thomas Stiefmeier; Manuela Feilner; Peter Derleth; Gerhard Tröster Kontexterkennung für Hörgeräte mittels zusätzlicher Sensormodalitäten Inproceedings Proc. of the 37th Annual Convention for Acoustics (DAGA 2011), 2011. @inproceedings{tessendorf11_daga, title = {Kontexterkennung für Hörgeräte mittels zusätzlicher Sensormodalitäten}, author = {Bernd Tessendorf and Andreas Bulling and Daniel Roggen and Thomas Stiefmeier and Manuela Feilner and Peter Derleth and Gerhard Tröster}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 37th Annual Convention for Acoustics (DAGA 2011)}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } | |
![]() | Bernd Tessendorf; Andreas Bulling; Daniel Roggen; Thomas Stiefmeier; Manuela Feilner; Peter Derleth; Gerhard Tröster Recognition of Hearing Needs From Body and Eye Movements to Improve Hearing Instruments Inproceedings Proc. of the 9th International Conference on Pervasive Computing, pp. 314-331, Springer, 2011. @inproceedings{tessendorf11_pervasive, title = {Recognition of Hearing Needs From Body and Eye Movements to Improve Hearing Instruments}, author = {Bernd Tessendorf and Andreas Bulling and Daniel Roggen and Thomas Stiefmeier and Manuela Feilner and Peter Derleth and Gerhard Tröster}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/tessendorf11_pervasive.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 9th International Conference on Pervasive Computing}, pages = {314-331}, publisher = {Springer}, abstract = {Hearing instruments (HIs) have emerged as true pervasive computers as they continuously adapt the hearing program to the user’s context. However, current HIs are not able to distinguish different hearing needs in the same acoustic environment. In this work, we explore how information derived from body and eye movements can be used to improve the recognition of such hearing needs. We conduct an experiment to provoke an acoustic environment in which different hearing needs arise: active conversation and working while colleagues are having a conversation in a noisy office environment. We record body movements on nine body locations, eye movements using electrooculography (EOG), and sound using commercial HIs for eleven participants. Using a support vector machine (SVM) classifier and person-independent training we improve the accuracy of 77% based on sound to an accuracy of 92% using body movements. With a view to a future implementation into a HI we then perform a detailed analysis of the sensors attached to the head. We achieve the best accuracy of 86% using eye movements compared to 84% for head movements. Our work demonstrates the potential of additional sensor modalities for future HIs and motivates to investigate the wider applicability of this approach on further hearing situations and needs.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } Hearing instruments (HIs) have emerged as true pervasive computers as they continuously adapt the hearing program to the user’s context. However, current HIs are not able to distinguish different hearing needs in the same acoustic environment. In this work, we explore how information derived from body and eye movements can be used to improve the recognition of such hearing needs. We conduct an experiment to provoke an acoustic environment in which different hearing needs arise: active conversation and working while colleagues are having a conversation in a noisy office environment. We record body movements on nine body locations, eye movements using electrooculography (EOG), and sound using commercial HIs for eleven participants. Using a support vector machine (SVM) classifier and person-independent training we improve the accuracy of 77% based on sound to an accuracy of 92% using body movements. With a view to a future implementation into a HI we then perform a detailed analysis of the sensors attached to the head. We achieve the best accuracy of 86% using eye movements compared to 84% for head movements. Our work demonstrates the potential of additional sensor modalities for future HIs and motivates to investigate the wider applicability of this approach on further hearing situations and needs. |
![]() | Jayson Turner; Andreas Bulling; Hans Gellersen Combining Gaze with Manual Interaction to Extend Physical Reach Inproceedings Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011), pp. 33-36, ACM Press, 2011. @inproceedings{turner11_petmei, title = {Combining Gaze with Manual Interaction to Extend Physical Reach}, author = {Jayson Turner and Andreas Bulling and Hans Gellersen}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/turner11_petmei.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011)}, pages = {33-36}, publisher = {ACM Press}, abstract = {Situated public displays and interactive surfaces are becoming ubiquitous in our daily lives. Issues arise with these devices when attempting to interact over a distance or with content that is physically out of reach. In this paper we outline three techniques that combine gaze with manual hand-controlled input to move objects. We demonstrate and discuss how these techniques could be applied to two scenarios involving, (1) a multi-touch surface and (2) a public display and a mobile device.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } Situated public displays and interactive surfaces are becoming ubiquitous in our daily lives. Issues arise with these devices when attempting to interact over a distance or with content that is physically out of reach. In this paper we outline three techniques that combine gaze with manual hand-controlled input to move objects. We demonstrate and discuss how these techniques could be applied to two scenarios involving, (1) a multi-touch surface and (2) a public display and a mobile device. |
![]() | Eduardo Velloso; Andreas Bulling; Hans Gellersen Towards Qualitative Assessment of Weight Lifting Exercises Using Body-Worn Sensors Inproceedings Proc. of the 13th International Conference on Ubiquitous Computing (UbiComp 2011), pp. 587-588, ACM Press, 2011, ISBN: 978-1-4503-0630-0. @inproceedings{velloso11_ubicomp, title = {Towards Qualitative Assessment of Weight Lifting Exercises Using Body-Worn Sensors}, author = {Eduardo Velloso and Andreas Bulling and Hans Gellersen}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/velloso11_ubicomp.pdf}, isbn = {978-1-4503-0630-0}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 13th International Conference on Ubiquitous Computing (UbiComp 2011)}, pages = {587-588}, publisher = {ACM Press}, abstract = {Sports exercises are beneficial for general health and fitness. Some exercises such as weight lifting are particularly error-prone and using incorrect techniques can result in serious injuries. The current work aims to develop a weight lifting assistant that relies on motion sensors mounted on the body and integrated into gym equipment that provides qualitative feedback on the user\'s performance. We believe that by comparing motion data recorded from different parts of the body with a mathematical model of the correct technique, we will be able to qualitatively assess the user\'s performance, and provide a score and suggestions for improvement.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } Sports exercises are beneficial for general health and fitness. Some exercises such as weight lifting are particularly error-prone and using incorrect techniques can result in serious injuries. The current work aims to develop a weight lifting assistant that relies on motion sensors mounted on the body and integrated into gym equipment that provides qualitative feedback on the user's performance. We believe that by comparing motion data recorded from different parts of the body with a mathematical model of the correct technique, we will be able to qualitatively assess the user's performance, and provide a score and suggestions for improvement. |
![]() | Eduardo Velloso; Débora Cardador; Katia Vega; Wallace Ugulino; Andreas Bulling; Hans Gellersen; Hugo Fuks The Web of Things as an Infrastructure for Improving Users' Health and Wellbeing Inproceedings Proc. of the 2nd Workshop of the Brazilian Institute for Web Science Research, 2011. @inproceedings{velloso11_wsr, title = {The Web of Things as an Infrastructure for Improving Users\' Health and Wellbeing}, author = {Eduardo Velloso and Débora Cardador and Katia Vega and Wallace Ugulino and Andreas Bulling and Hans Gellersen and Hugo Fuks}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/velloso11_wsr.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 2nd Workshop of the Brazilian Institute for Web Science Research}, abstract = {This position paper outlines the authors’ vision on how the Web of Things, using interconnected devices, including sensor nodes, mobile phones and conventional computers can help improve the overall health and wellbeing of its users. We describe ongoing work being carried by our research group both at PUC-Rio and at Lancaster University as well as the motivating background.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } This position paper outlines the authors’ vision on how the Web of Things, using interconnected devices, including sensor nodes, mobile phones and conventional computers can help improve the overall health and wellbeing of its users. We describe ongoing work being carried by our research group both at PUC-Rio and at Lancaster University as well as the motivating background. |
![]() | Mélodie Vidal; Andreas Bulling; Hans Gellersen Analysing EOG Signal Features for the Discrimination of Eye Movements with Wearable Devices Inproceedings Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011), pp. 15-20, ACM Press, 2011. @inproceedings{vidal11_petmei, title = {Analysing EOG Signal Features for the Discrimination of Eye Movements with Wearable Devices}, author = {Mélodie Vidal and Andreas Bulling and Hans Gellersen}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/vidal11_petmei.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011)}, pages = {15-20}, publisher = {ACM Press}, abstract = {Eye tracking research in human-computer interaction and experimental psychology traditionally focuses on stationary devices and a small number of common eye movements. The advent of pervasive eye tracking promises new applications, such as eye-based mental health monitoring or eye-based activity and context recognition. These applications might require further research on additional eye movement types such as smooth pursuits and the vestibulo-ocular reflex as these movements have not been studied as extensively as saccades, fixations and blinks. In this paper we report our first step towards an effective discrimination of these movements. In a user study we collect naturalistic eye movements from 19 people using the two most common measurement techniques (EOG and IR-based). We develop a set of basic signal features that we extract from the collected eye movement data and show that a feature-based approach has the potential to discriminate between saccades, smooth pursuits, and vestibulo-ocular reflex movements.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } Eye tracking research in human-computer interaction and experimental psychology traditionally focuses on stationary devices and a small number of common eye movements. The advent of pervasive eye tracking promises new applications, such as eye-based mental health monitoring or eye-based activity and context recognition. These applications might require further research on additional eye movement types such as smooth pursuits and the vestibulo-ocular reflex as these movements have not been studied as extensively as saccades, fixations and blinks. In this paper we report our first step towards an effective discrimination of these movements. In a user study we collect naturalistic eye movements from 19 people using the two most common measurement techniques (EOG and IR-based). We develop a set of basic signal features that we extract from the collected eye movement data and show that a feature-based approach has the potential to discriminate between saccades, smooth pursuits, and vestibulo-ocular reflex movements. |
![]() | Yanxia Zhang; Andreas Bulling; Hans Gellersen Discrimination of Gaze Directions Using Low-Level Eye Image Features Inproceedings Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011), pp. 9-13, ACM Press, 2011. @inproceedings{zhang11_petmei, title = {Discrimination of Gaze Directions Using Low-Level Eye Image Features}, author = {Yanxia Zhang and Andreas Bulling and Hans Gellersen}, url = {https://perceptual.mpi-inf.mpg.de/files/2013/03/zhang11_petmei.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. of the 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI 2011)}, pages = {9-13}, publisher = {ACM Press}, abstract = {In mobile daily life settings, video-based gaze tracking faces challenges associated with changes in lighting conditions and artefacts in the video images caused by head and body movements. These challenges call for the development of new methods that are robust to such influences. In this paper we investigate the problem of gaze estimation, more specifically how to discriminate different gaze directions from eye images. In a 17 participant user study we record eye images for 13 different gaze directions from a standard web- cam. We extract a total of 50 features from these images that encode information on color, intensity and orientations. Using mRMR feature selection and a k-nearest neighbor (kNN) classifier we show that we can estimate these gaze directions with a mean recognition performance of 86%.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } In mobile daily life settings, video-based gaze tracking faces challenges associated with changes in lighting conditions and artefacts in the video images caused by head and body movements. These challenges call for the development of new methods that are robust to such influences. In this paper we investigate the problem of gaze estimation, more specifically how to discriminate different gaze directions from eye images. In a 17 participant user study we record eye images for 13 different gaze directions from a standard web- cam. We extract a total of 50 features from these images that encode information on color, intensity and orientations. Using mRMR feature selection and a k-nearest neighbor (kNN) classifier we show that we can estimate these gaze directions with a mean recognition performance of 86%. |
book chapters
Andreas Bulling; Andrew T. Duchowski; Päivi Majaranta Proc. 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI) Book Chapter Proc. ACM International Joint Conference on Pervasive and Ubiquitous Computing (UbiComp), 2011. @inbook{bulling11_petmei, title = {Proc. 1st International Workshop on Pervasive Eye Tracking and Mobile Eye-Based Interaction (PETMEI)}, author = { Andreas Bulling and Andrew T. Duchowski and Päivi Majaranta}, doi = {10.1145/2030112.2030248}, year = {2011}, date = {2011-01-01}, booktitle = {Proc. ACM International Joint Conference on Pervasive and Ubiquitous Computing (UbiComp)}, abstract = {Recent developments in mobile eye tracking equipment and automated eye movement analysis point the way toward unobtrusive eye-based human-computer interfaces that are pervasively usable in everyday life. We call this new paradigm pervasive eye tracking - continuous eye monitoring and analysis 24/7. PETMEI 2011 provides a forum for researcher from human-computer interaction, context-aware computing, and eye tracking to discuss techniques and applications that go beyond classical eye tracking and stationary eye-based interaction. We aim to discuss the implications of pervasive eye tracking for context-aware computing and to identify the key research challenges of mobile eye-based interaction. The long-term goal is to create a strong interdisciplinary research community linking these research fields together and to establish the workshop as the premier forum for research on pervasive eye tracking and mobile eye-based interaction.}, keywords = {}, pubstate = {published}, tppubtype = {inbook} } Recent developments in mobile eye tracking equipment and automated eye movement analysis point the way toward unobtrusive eye-based human-computer interfaces that are pervasively usable in everyday life. We call this new paradigm pervasive eye tracking - continuous eye monitoring and analysis 24/7. PETMEI 2011 provides a forum for researcher from human-computer interaction, context-aware computing, and eye tracking to discuss techniques and applications that go beyond classical eye tracking and stationary eye-based interaction. We aim to discuss the implications of pervasive eye tracking for context-aware computing and to identify the key research challenges of mobile eye-based interaction. The long-term goal is to create a strong interdisciplinary research community linking these research fields together and to establish the workshop as the premier forum for research on pervasive eye tracking and mobile eye-based interaction. |