2025
Rowland Goddy-Worlu; Nirmal Adhikari; Derek Reilly
Virtual Reality for Active Aging: First-Time Experiences of Older Adults with 'First Steps.' Conference
In: Our Future is Aging: Multidisciplinary Research Informing People, Policy and Practice., 2025.
@conference{Goddy-Worlu2025,
title = {Virtual Reality for Active Aging: First-Time Experiences of Older Adults with 'First Steps.'},
author = {Rowland Goddy-Worlu and Nirmal Adhikari and Derek Reilly },
year = {2025},
date = {2025-06-04},
urldate = {2025-06-04},
publisher = {In: Our Future is Aging: Multidisciplinary Research Informing People, Policy and Practice.},
abstract = {Virtual Reality (VR) is increasingly recognized in gerontology as a tool for promoting physical activities, enhancing cognitive engagement, reducing isolation, and fostering interactive experiences among older adults. However, seniors often face challenges when learning VR, including physical limitations. Partnering with a local active aging community, we explore First Steps - a beginner-friendly VR application for Meta Quest, across two monthly sessions involving five community members to identify the challenges in using VR and applications of interest. Each session begins with a brief VR exposure, followed by initial impressions, extended VR engagement, and a focus group discussion. Our thematic analysis of the focus group and facilitator debrief sessions revealed key themes that provide insights into the group's various challenges, perceptions and interests. Our findings highlight both challenges - device discomfort, controller interaction difficulties, accessibility, and safety concerns - and opportunities, including enhanced social engagement, facilitator support, immersive enjoyment and potential for unlocking unlived adventures. These insights underscore VR's potential to enrich the lives of older adults when thoughtfully adapted to an active aging community's unique needs and interests for their members. },
keywords = {VR},
pubstate = {published},
tppubtype = {conference}
}
Aayush Shrestha; Joseph Malloch
Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration Proceedings Article
In: Proceedings of the ACM SIGCHI Conference on Human Factors in Computing Systems (CHI 2025), pp. 1–19, ACM, 2025.
Abstract | Links | BibTeX | Tags: assistive technology, haptics, navigation, spatial audio, virtual environment, VR
@inproceedings{Shrestha2025,
title = {Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration},
author = {Aayush Shrestha and Joseph Malloch},
url = {https://dl.acm.org/doi/10.1145/3706598.3713400},
doi = {10.1145/3706598.371340},
year = {2025},
date = {2025-04-26},
urldate = {2025-04-26},
booktitle = {Proceedings of the ACM SIGCHI Conference on Human Factors in Computing Systems (CHI 2025)},
number = {812},
pages = {1--19},
publisher = {ACM},
abstract = {Contemporary research in Virtual Reality for users who are visually impaired often employs navigation and interaction modalities that are either non-conventional, constrained by physical spaces, or both. We designed and examined a hapto-acoustic VR system that mitigates this by enabling non-visual exploration of large virtual environments using white cane simulation and walk-in place locomotion. The system features a complex urban cityscape incorporating a physical cane prototype coupled with a virtual cane for rendering surface textures, and an omnidirectional slide mill for navigation. In addition, spatialized audio is rendered based on the progression of sound through the geometry around the user. A study involving twenty sighted participants evaluated the system through three formative tasks while blindfolded to simulate absolute blindness. Participants were highly successful in completing all the tasks while effectively navigating through the environment. Our work highlights the potential for accessible, non-visual VR experiences, achievable even with minimal training and little prior exposure to VR.},
keywords = {assistive technology, haptics, navigation, spatial audio, virtual environment, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
2024
Aayush Shrestha
Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration Masters Thesis
Dalhousie University, 2024.
Abstract | BibTeX | Tags: assistive technology, haptics, navigation, spatial audio, VR
@mastersthesis{Shrestha2024,
title = {Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration},
author = {Aayush Shrestha},
year = {2024},
date = {2024-08-09},
school = {Dalhousie University},
abstract = {Virtual Reality (VR), predominantly focusing on visuospatial renderings in its contemporary approach, has created a conservative narrative, making VR solely analogous to a mediated visual experience. While accessibility is included in the developmental phase of commercial VR applications, it is often considered an add-on, resulting in sub-par virtual experiences that often exclude visually impaired users. This research addresses these limitations by designing a hapto-acoustic VR system that leverages spatial audio and haptic feedback for sensory substitution of visual dominance in VR. A large-scale urban virtual environment (VE) was created using the Unity Game Engine, incorporating a physical cane prototype coupled with a virtual cane for interaction and an omnidirectional slide mill for navigation. A user study with 20 normally sighted participants evaluated and compared the system's effectiveness in texture differentiation and navigation tasks under two conditions: with visual cues and exclusively through audio-haptic feedback. The study results indicated that even with minimal training and limited prior VR experience, participants could navigate the environment effectively in non-visual conditions, though at the cost of increased cognitive load and error rates compared to visual conditions. The evaluation highlights the necessity for improved feedback mechanisms and suggests further validation with visually impaired users. The overall research contributes to the development of accessible VR systems through a novel white cane prototype, realistic spatial audio effects and a comprehensive evaluation demonstrating the system's potential in aiding non-visual navigation in a complex, large-scale VE while also engendering empathetic literacy among sighted users.},
keywords = {assistive technology, haptics, navigation, spatial audio, VR},
pubstate = {published},
tppubtype = {mastersthesis}
}
2022
Thiago Malheiros Porcino; Derek Reilly; Daniela Trevisan; Esteban Clua
A guideline proposal for minimizing cybersickness in VR-based serious games and applications Proceedings Article
In: Proceedings of the IEEE 10th International Conference on Serious Games and Applications for Health (SeGAH 2022), IEEE, 2022.
BibTeX | Tags: cybersickness, exergames, gamification, health, VR
@inproceedings{PorcinoSeGAH2022,
title = {A guideline proposal for minimizing cybersickness in VR-based serious games and applications},
author = {Thiago Malheiros Porcino and Derek Reilly and Daniela Trevisan and Esteban Clua},
year = {2022},
date = {2022-08-10},
urldate = {2022-08-10},
booktitle = {Proceedings of the IEEE 10th International Conference on Serious Games and Applications for Health (SeGAH 2022)},
publisher = {IEEE},
keywords = {cybersickness, exergames, gamification, health, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
2021
Sathaporn "Hubert" Hu; Joseph Malloch; Derek Reilly
A Comparative Evaluation of Techniques for Locating Out of View Targets in Virtual Reality Proceedings Article
In: Proceedings of the Graphics Interface 2021 Conference, 2021.
Abstract | Links | BibTeX | Tags: peripheral vision, target acquisition, VR
@inproceedings{Hu2021,
title = {A Comparative Evaluation of Techniques for Locating Out of View Targets in Virtual Reality},
author = {Sathaporn "Hubert" Hu and Joseph Malloch and Derek Reilly},
doi = {10.20380/GI2021.32},
year = {2021},
date = {2021-05-27},
urldate = {2021-05-27},
booktitle = {Proceedings of the Graphics Interface 2021 Conference},
abstract = {In this work, we present the design and comparative evaluation of techniques for increasing awareness of out-of-view targets in virtual reality. We first compare two variants of SOUS--a technique that guides the user to out-of-view targets using circle cues in their peripheral vision--with the existing FlyingARrow technique, in which arrows fly from the user's central (foveal) vision toward the target. fSOUS, a variant with low visual salience, performed well in a simple environment but not in visually complex environments, while bSOUS, a visually salient variant, yielded faster target selection than both fSous and FlyingARrow across all environments. We then compare hybrid techniques in which aspects of SOUS relating to unobtrusiveness and visual persistence were reflected in design modifications made to FlyingARrow. Increasing persistence by adding trails to arrows improved performance but there were concerns about obtrusiveness, while other modifications yielded slower and less accurate target acquisition. Nevertheless, since fSOUS and bSOUS are exclusively for head-mounted display with wide field-of-view, FlyingARrow with trail can still be beneficial for devices with limited field-of-view.},
keywords = {peripheral vision, target acquisition, VR},
pubstate = {published},
tppubtype = {inproceedings}
}
Abbey Singh; Ramanpreet Kaur; Peter Haltner; Matthew Peachey; Mar Gonzalez-Franco; Joseph Malloch; Derek Reilly
Story CreatAR: a Toolkit for Spatially-Adaptive Augmented Reality Storytelling Conference
Proceedings of IEEE VR 2021, IEEE VR, 2021.
Links | BibTeX | Tags: AR, open source software, proxemics, space syntax, VR
@conference{Singh2021,
title = {Story CreatAR: a Toolkit for Spatially-Adaptive Augmented Reality Storytelling},
author = {Abbey Singh and Ramanpreet Kaur and Peter Haltner and Matthew Peachey and Mar Gonzalez-Franco and Joseph Malloch and Derek Reilly},
url = {https://gem.cs.dal.ca/wp-content/uploads/2023/06/Story-CreatAR-a-Toolkit-for-Spatially-Adaptive-Augmented-Reality-Storytelling.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Proceedings of IEEE VR 2021},
journal = {IEEE VR 2021},
publisher = {IEEE VR},
keywords = {AR, open source software, proxemics, space syntax, VR},
pubstate = {published},
tppubtype = {conference}
}
2020
Mar Gonzalez-Franco; Zelia Egan; Matthew Peachey; Angus Antley; Tanmay Randhavane; Payod Panda; Yaying Zhang; Derek Reilly; Tabitha C Peck; Andrea Stevenson Won; Cheng Yao Wang; Anthony Steed; Eyal Ofek
MoveBox: Democratizing MoCap for the Microsoft Rocketbox Avatar Library Proceedings Article
In: Proceedings of the 3rd International Conference on Artificial Intelligence & Virtual Reality, special session on Avatars for AI and VR. , IEEE, 2020.
BibTeX | Tags: AR, mixed reality, open source software, VR
@inproceedings{MoveBox2020,
title = {MoveBox: Democratizing MoCap for the Microsoft Rocketbox Avatar Library},
author = {Mar Gonzalez-Franco and Zelia Egan and Matthew Peachey and Angus Antley and Tanmay Randhavane and Payod Panda and Yaying Zhang and Derek Reilly and Tabitha C Peck and Andrea Stevenson Won and Cheng Yao Wang and Anthony Steed and Eyal Ofek},
year = {2020},
date = {2020-12-14},
urldate = {2020-12-14},
booktitle = {Proceedings of the 3rd International Conference on Artificial Intelligence & Virtual Reality, special session on Avatars for AI and VR. },
publisher = {IEEE},
keywords = {AR, mixed reality, open source software, VR},
pubstate = {published},
tppubtype = {inproceedings}
}