2023
Nasirinejad, Majid; Reilly, Derek
mF+C: Linking Suboptimal Projections to Detail on Handheld Devices Journal Article
In: International Journal of Human Computer Studies (IJHCS), 2023, ISSN: 1071-5819.
Abstract | Links | BibTeX | Tags: mobile, projection, ubiquitous computing, visualization
@article{Nasirinejad2023,
title = {mF+C: Linking Suboptimal Projections to Detail on Handheld Devices},
author = {Majid Nasirinejad and Derek Reilly},
doi = {https://doi.org/10.1016/j.ijhcs.2023.103170.},
issn = {1071-5819},
year = {2023},
date = {2023-10-14},
journal = {International Journal of Human Computer Studies (IJHCS)},
abstract = {Mobile Focus + Context (mF+C) involves using a handheld device as a focus screen for content on an immersive display or mobile projector. In this work we examine how using a focus device can mitigate poor context image quality due to environmental factors. In an exploratory study we compare three techniques for linking focus and context: lens-focus (Lens), where the device works as a mobile lens held parallel to and in front of the context, center-focus (Centered), where the user holds the device in the center of the projection and pans both context and focus by swiping, and marker-focus (Marker), where the focus region is highlighted on the context, and the user pans the focus by swiping. Participants performed searching, tracing, and detail acquisition tasks with maps and electronics diagrams under a range of simulated projection conditions. All techniques were able to mitigate poor projection quality and performed comparably in time and precision, but the effectiveness of each technique was impacted by task type. There was lower variation in time between participants using Lens for tracing tasks versus the other techniques, but wider variation for searching tasks. Tasks completed using sub-optimal projections involved more time spent looking at the context image than tasks with clear projections, however this difference is less pronounced for the Lens technique. We propose a hybrid Lens-Marker approach for mobile Focus+Context applications in dynamic environments.},
keywords = {mobile, projection, ubiquitous computing, visualization},
pubstate = {published},
tppubtype = {article}
}
2022
Franz, Juliano; Reilly, Derek
Ride With Me: Exploring Group Road Cycling Through Contextual Design Proceedings Article
In: Proceedings of ACM SIGCHI Conference on Designing Interactive Systems (DIS 2022), ACM, 2022.
BibTeX | Tags: annotation, collaborative systems, cycling, design, ethnography, mobile, visualization
@inproceedings{FranzDIS2022,
title = {Ride With Me: Exploring Group Road Cycling Through Contextual Design},
author = {Juliano Franz and Derek Reilly},
year = {2022},
date = {2022-06-20},
urldate = {2022-06-20},
booktitle = {Proceedings of ACM SIGCHI Conference on Designing Interactive Systems (DIS 2022)},
publisher = {ACM},
keywords = {annotation, collaborative systems, cycling, design, ethnography, mobile, visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
2018
Alqahtani, Felwah; Reilly, Derek
It’s the Gesture That (re)Counts: Annotating While Running to Recall Affective Experience Conference
Proceedings of Graphics Interface (GI 2018), Toronto, Canada, 2018.
Abstract | Links | BibTeX | Tags: embodied interaction, emotional recall, gesture, mobile, running, spatial annotation, visualization
@conference{Alqahtani2018,
title = {It’s the Gesture That (re)Counts: Annotating While Running to Recall Affective Experience},
author = {Felwah Alqahtani and Derek Reilly},
doi = {10.20380/GI2018.12},
year = {2018},
date = {2018-05-08},
booktitle = {Proceedings of Graphics Interface (GI 2018)},
address = {Toronto, Canada},
abstract = {We present results from a study exploring whether gestural annotations of felt emotion presented on a map-based visualization can support recall of affective experience during recreational runs. We compare gestural annotations with audio and video notes and a “mental note” baseline. In our study, 20 runners were asked to record their emotional state at regular intervals while running a familiar route. Each runner used one of the four methods to capture emotion over four separate runs. Five days after the last run, runners used an interactive map-based visualization to review and recall their running experiences. Results indicate that gestural annotation promoted recall of affective experience more effectively than the baseline condition, as measured by confidence in recall and detail provided. Gestural annotation was also comparable to video and audio annotation in terms of recollection confidence and detail. Audio annotation supported recall primarily through the runner's spoken annotation, but sound in the background was sometimes used. Video annotation yielded the most detail, much directly related to visual cues in the video, however using video annotations required runners to stop during their runs. Given these results we propose that background logging of ambient sounds and video may supplement gestural annotation.},
keywords = {embodied interaction, emotional recall, gesture, mobile, running, spatial annotation, visualization},
pubstate = {published},
tppubtype = {conference}
}
2016
Hannan, Nabil Bin; Alqahtani, Felwah; Reilly, Derek
JogChalking: Capturing and Visualizing Affective Experience for Recreational Runners Conference
Proceedings of the 2016 ACM Conference Companion Publication on Designing Interactive Systems (DIS 2016), Brisbane, Australia, 2016.
Links | BibTeX | Tags: design, emotional recall, gesture, mobile annotation, running, visualization
@conference{BinHannan2016,
title = {JogChalking: Capturing and Visualizing Affective Experience for Recreational Runners},
author = {Nabil Bin Hannan and Felwah Alqahtani and Derek Reilly},
doi = {10.1145/2908805.2909406},
year = {2016},
date = {2016-06-06},
booktitle = {Proceedings of the 2016 ACM Conference Companion Publication on Designing Interactive Systems (DIS 2016)},
address = {Brisbane, Australia},
keywords = {design, emotional recall, gesture, mobile annotation, running, visualization},
pubstate = {published},
tppubtype = {conference}
}