2022
Rowland Goddy-Worlu; Martha Dais Ferreira; Matthew Peachey; Claire Nicholas; James Forren; Derek Reilly
Capture and Recognition of Bead Weaving Activities using Hand Skeletal Data and an LSTM Deep Neural Network Proceedings Article
In: Proceedings of the 5th IEEE International Conference on Artificial Intelligence & Virtual Reality (AIVR 2022), IEEE, 2022.
BibTeX | Tags: augmented reality, craft, creativity support tools, cultural heritage, dynamic guides, gesture, machine learning
@inproceedings{GoddyWorlu2022,
title = {Capture and Recognition of Bead Weaving Activities using Hand Skeletal Data and an LSTM Deep Neural Network},
author = {Rowland Goddy-Worlu and Martha Dais Ferreira and Matthew Peachey and Claire Nicholas and James Forren and Derek Reilly},
year = {2022},
date = {2022-12-12},
urldate = {2022-12-12},
booktitle = {Proceedings of the 5th IEEE International Conference on Artificial Intelligence & Virtual Reality (AIVR 2022)},
publisher = {IEEE},
keywords = {augmented reality, craft, creativity support tools, cultural heritage, dynamic guides, gesture, machine learning},
pubstate = {published},
tppubtype = {inproceedings}
}
Thiago Malheiros Porcino; Seyed Adel Ghaeinian; Juliano Franz; Joseph Malloch; Derek Reilly
Design of an Extended Reality Collaboration Architecture for Mixed Immersive and Multi-Surface Interaction Proceedings Article
In: The 21st IFIP International Conference on Entertainment Computing (ICEC 2022), IFIP, 2022.
BibTeX | Tags: 3-D user interface, AR, architecture, collaborative systems, connected devices, embodied interaction, gesture, immersive visualization, information propagation, natural user interface, spatial annotation, temporal objects, toolkit
@inproceedings{PorcinoICEC2022,
title = {Design of an Extended Reality Collaboration Architecture for Mixed Immersive and Multi-Surface Interaction},
author = {Thiago Malheiros Porcino and Seyed Adel Ghaeinian and Juliano Franz and Joseph Malloch and Derek Reilly
},
year = {2022},
date = {2022-11-01},
urldate = {2022-11-01},
booktitle = {The 21st IFIP International Conference on Entertainment Computing (ICEC 2022)},
publisher = {IFIP},
keywords = {3-D user interface, AR, architecture, collaborative systems, connected devices, embodied interaction, gesture, immersive visualization, information propagation, natural user interface, spatial annotation, temporal objects, toolkit},
pubstate = {published},
tppubtype = {inproceedings}
}
2021
Christian Frisson; Mathias Bredholt; Joseph Malloch; Marcelo M. Wanderley
MapLooper: Live-looping of Distributed Gesture-to-Sound Mappings Proceedings Article
In: Proceedings of the International Conference on New Interfaces for Musical Expression, Shanghai, China, 2021.
BibTeX | Tags: control mapping, digital musical instruments, gesture, live looping
@inproceedings{Frisson2021,
title = {MapLooper: Live-looping of Distributed Gesture-to-Sound Mappings},
author = {Christian Frisson and Mathias Bredholt and Joseph Malloch and Marcelo M. Wanderley},
year = {2021},
date = {2021-06-15},
booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
address = {Shanghai, China},
keywords = {control mapping, digital musical instruments, gesture, live looping},
pubstate = {published},
tppubtype = {inproceedings}
}
2018
Felwah Alqahtani; Derek Reilly
It’s the Gesture That (re)Counts: Annotating While Running to Recall Affective Experience Conference
Proceedings of Graphics Interface (GI 2018), Toronto, Canada, 2018.
Abstract | Links | BibTeX | Tags: embodied interaction, emotional recall, gesture, mobile, running, spatial annotation, visualization
@conference{Alqahtani2018,
title = {It’s the Gesture That (re)Counts: Annotating While Running to Recall Affective Experience},
author = {Felwah Alqahtani and Derek Reilly},
doi = {10.20380/GI2018.12},
year = {2018},
date = {2018-05-08},
booktitle = {Proceedings of Graphics Interface (GI 2018)},
address = {Toronto, Canada},
abstract = {We present results from a study exploring whether gestural annotations of felt emotion presented on a map-based visualization can support recall of affective experience during recreational runs. We compare gestural annotations with audio and video notes and a “mental note” baseline. In our study, 20 runners were asked to record their emotional state at regular intervals while running a familiar route. Each runner used one of the four methods to capture emotion over four separate runs. Five days after the last run, runners used an interactive map-based visualization to review and recall their running experiences. Results indicate that gestural annotation promoted recall of affective experience more effectively than the baseline condition, as measured by confidence in recall and detail provided. Gestural annotation was also comparable to video and audio annotation in terms of recollection confidence and detail. Audio annotation supported recall primarily through the runner's spoken annotation, but sound in the background was sometimes used. Video annotation yielded the most detail, much directly related to visual cues in the video, however using video annotations required runners to stop during their runs. Given these results we propose that background logging of ambient sounds and video may supplement gestural annotation.},
keywords = {embodied interaction, emotional recall, gesture, mobile, running, spatial annotation, visualization},
pubstate = {published},
tppubtype = {conference}
}
2017
Joseph Malloch; Carla Griggio; Joanna McGrenere; Wendy E. Mackay
Fieldward and Pathward: Dynamic Guides for Defining your own Gestures Conference
Proceedings of the ACM SIGCHI Conference on Human Factors in Computing Systems (CHI 2017), ACM Denver, USA, 2017.
Abstract | Links | BibTeX | Tags: dynamic guides, feedforward, gesture, mobile, progressive feedforward
@conference{Malloch2017_CHI,
title = {Fieldward and Pathward: Dynamic Guides for Defining your own Gestures},
author = {Joseph Malloch and Carla Griggio and Joanna McGrenere and Wendy E. Mackay},
doi = {10.1145/3025453.3025764},
year = {2017},
date = {2017-05-06},
booktitle = {Proceedings of the ACM SIGCHI Conference on Human Factors in Computing Systems (CHI 2017)},
address = {Denver, USA},
organization = {ACM},
abstract = {Although users accomplish ever more tasks on touch-enabled mobile devices, gesture-based interaction remains limited and almost never customizable by users. Our goal is to help users create gestures that are both personally memorable and reliably recognized by a touch-enabled mobile device. We address these competing requirements with two dynamic guides that use progressive feedforward to interactively visualize the "negative space" of unused gestures: the Pathward technique suggests four possible completions to the current gesture, and the Fieldward technique uses color gradients to reveal optimal directions for creating recognizable gestures. We ran a two-part experiment in which 27 participants each created 42 personal gesture shortcuts on a smartphone, using Pathward, Fieldward or No Feedforward. The Fieldward technique best supported the most common user strategy, i.e. to create a memorable gesture first and then adapt it to be recognized by the system. Users preferred the Fieldward technique to Pathward or No Feedforward, and remembered gestures more easily when using the technique. Dynamic guides can help developers design novel gesture vocabularies and support users as they design custom gestures for mobile applications.},
keywords = {dynamic guides, feedforward, gesture, mobile, progressive feedforward},
pubstate = {published},
tppubtype = {conference}
}
Bin Hannan, Nabil; Khalid Tearo; Joseph Malloch; Derek Reilly
Once More, with Feeling: Expressing Emotional Intensity in Touchscreen Gestures Conference
Proceedings of the 22nd International Conference on Intelligent User Interfaces (IUI 2017), ACM, 2017.
Abstract | Links | BibTeX | Tags: emotion, gesture, touchscreen
@conference{Bin_Hannan2017_IUI,
title = {Once More, with Feeling: Expressing Emotional Intensity in Touchscreen Gestures},
author = {Bin Hannan, Nabil and Khalid Tearo and Joseph Malloch and Derek Reilly},
doi = {10.1145/3025171.3025182},
year = {2017},
date = {2017-03-14},
booktitle = {Proceedings of the 22nd International Conference on Intelligent User Interfaces (IUI 2017)},
pages = {427–437},
publisher = {ACM},
abstract = {In this paper, we explore how people use touchscreens to express emotional intensity, and whether these intensities can be understood by oneself at a later date or by others. In a controlled study, 26 participants were asked to express a set of emotions mapped to predefined gestures, at range of different intensities. One week later, participants were asked to identify the emotional intensity visualized in animations of the gestures made by themselves and by other participants. Our participants expressed emotional intensity using gesture length, pressure, and speed primarily; the choice of attributes was impacted by the specific emotion, and the range and rate of increase of these attributes varied by individual and by emotion. Recognition accuracy of emotional intensity was higher at extreme ends, and was higher for one's own gestures than those made by others. The attributes of size and pressure (mapped to color in the animation) were most readily interpreted, while speed was more difficult to differentiate. We discuss human gesture drawing patterns to express emotional intensities and implications for developers of annotation systems and other touchscreen interfaces that wish to capture affect.},
keywords = {emotion, gesture, touchscreen},
pubstate = {published},
tppubtype = {conference}
}
Nabil Bin Hannan; Derek Reilly
User Awareness when Expressing Emotional Intensity using Touchscreen Gestures Workshop
Workshop on Awareness Interfaces and Interactions (AWARE) at IUI 2017, Limassol, Cyprus, 2017.
BibTeX | Tags: emotional recall, gesture
@workshop{BinHannan2017b,
title = {User Awareness when Expressing Emotional Intensity using Touchscreen Gestures},
author = {Nabil Bin Hannan and Derek Reilly},
year = {2017},
date = {2017-03-02},
booktitle = {Workshop on Awareness Interfaces and Interactions (AWARE) at IUI 2017},
address = {Limassol, Cyprus},
keywords = {emotional recall, gesture},
pubstate = {published},
tppubtype = {workshop}
}
2016
Aniruddha Waje; Khalid Tearo; Raghav V. Sampangi; Derek Reilly
Proceedings of the 2016 ACM International Conference on Interactive Surfaces and Spaces (ISS '16), ACM, Niagara Falls, Canada, 2016.
Abstract | Links | BibTeX | Tags: children, collaborative systems, embodied interaction, gesture, museum, tabletop, tangible interfaces
@conference{Waje2016,
title = {Grab This, Swipe That: Combining Tangible and Gestural Interaction in Multiple Display Collaborative Gameplay},
author = {Aniruddha Waje and Khalid Tearo and Raghav V. Sampangi and Derek Reilly},
doi = {10.1145/2992154.2996794},
year = {2016},
date = {2016-11-06},
booktitle = {Proceedings of the 2016 ACM International Conference on Interactive Surfaces and Spaces (ISS '16)},
publisher = {ACM},
address = {Niagara Falls, Canada},
abstract = {In this paper, we explore the use of multimodal interfaces (tangible and gestural) in collaborative gameplay. In our setup, gestural interaction is performed to perform tasks with reference content on a wall display, in support of a main activity involving tangible interaction on a tabletop display. We designed two games using this configuration, in order to explore how children share these multimodal tasks during collaborative gameplay. We conducted a pilot within-subjects user study, piloting the games with 35 children between the ages of 8 and 15, and considered the impact of a number of factors (group size, age range, game type and interaction style) on gameplay. We present lessons learned from prototyping and pilot testing tangible+gestural multi-display systems, and describe the resulting refinements made in our design and implementation.},
keywords = {children, collaborative systems, embodied interaction, gesture, museum, tabletop, tangible interfaces},
pubstate = {published},
tppubtype = {conference}
}
Nabil Bin Hannan; Felwah Alqahtani; Derek Reilly
JogChalking: Capturing and Visualizing Affective Experience for Recreational Runners Conference
Proceedings of the 2016 ACM Conference Companion Publication on Designing Interactive Systems (DIS 2016), Brisbane, Australia, 2016.
Links | BibTeX | Tags: design, emotional recall, gesture, mobile annotation, running, visualization
@conference{BinHannan2016,
title = {JogChalking: Capturing and Visualizing Affective Experience for Recreational Runners},
author = {Nabil Bin Hannan and Felwah Alqahtani and Derek Reilly},
doi = {10.1145/2908805.2909406},
year = {2016},
date = {2016-06-06},
booktitle = {Proceedings of the 2016 ACM Conference Companion Publication on Designing Interactive Systems (DIS 2016)},
address = {Brisbane, Australia},
keywords = {design, emotional recall, gesture, mobile annotation, running, visualization},
pubstate = {published},
tppubtype = {conference}
}