2024
Shrestha, Aayush
Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration Masters Thesis
Dalhousie University, 2024.
Abstract | BibTeX | Tags: assistive technology, haptics, navigation, spatial audio, VR
@mastersthesis{Shrestha2024,
title = {Virtual Worlds Beyond Sight: Designing and Evaluating an Audio-Haptic System for Non-Visual VR Exploration},
author = {Aayush Shrestha},
year = {2024},
date = {2024-08-09},
school = {Dalhousie University},
abstract = {Virtual Reality (VR), predominantly focusing on visuospatial renderings in its contemporary approach, has created a conservative narrative, making VR solely analogous to a mediated visual experience. While accessibility is included in the developmental phase of commercial VR applications, it is often considered an add-on, resulting in sub-par virtual experiences that often exclude visually impaired users. This research addresses these limitations by designing a hapto-acoustic VR system that leverages spatial audio and haptic feedback for sensory substitution of visual dominance in VR. A large-scale urban virtual environment (VE) was created using the Unity Game Engine, incorporating a physical cane prototype coupled with a virtual cane for interaction and an omnidirectional slide mill for navigation. A user study with 20 normally sighted participants evaluated and compared the system's effectiveness in texture differentiation and navigation tasks under two conditions: with visual cues and exclusively through audio-haptic feedback. The study results indicated that even with minimal training and limited prior VR experience, participants could navigate the environment effectively in non-visual conditions, though at the cost of increased cognitive load and error rates compared to visual conditions. The evaluation highlights the necessity for improved feedback mechanisms and suggests further validation with visually impaired users. The overall research contributes to the development of accessible VR systems through a novel white cane prototype, realistic spatial audio effects and a comprehensive evaluation demonstrating the system's potential in aiding non-visual navigation in a complex, large-scale VE while also engendering empathetic literacy among sighted users.},
keywords = {assistive technology, haptics, navigation, spatial audio, VR},
pubstate = {published},
tppubtype = {mastersthesis}
}
Ghaeinian, Seyed Adel
Design and Implementation of an Interactive Visual Querying System for Maritime Data Masters Thesis
Dalhousie University, 2024.
Abstract | BibTeX | Tags: AMNIS, tabletop displays, visualization
@mastersthesis{Ghaeinian2024,
title = {Design and Implementation of an Interactive Visual Querying System for Maritime Data},
author = {Seyed Adel Ghaeinian},
year = {2024},
date = {2024-06-10},
school = {Dalhousie University},
abstract = {Automatic Identification System (AIS) data is a crucial foundation of maritime operations such as navigation, traffic management, and safety monitoring. This requires robust management, visualization, and interactive visual analytics systems for the operators. Since usability, effectiveness, and accuracy are key factors in such maritime operations, it is important to design, implement, and evaluate these tools meticulously and incorporate the latest advancements in user interaction and analytics. This research explores the design and implementation of a novel system architecture for maritime data querying and exploration, enabling enhanced user interactions through direct-manipulation techniques. This system architecture provides a collaborative environment that incorporates Mixed Reality (MR), a touchable table-top interface, as well as iterative design and evaluation of a graph-based Visual Query Builder (VQB). The aim of developing the VQB interface is to allow non-experts to explore and query maritime data without the need of having technical skills while enhancing the decision-making process and spatial awareness of the maritime operations. This research further evaluates the VQB interface in terms of efficiency, accuracy, and user preferences by conducting a user study. For this study, we developed a baseline web-based textual interface for SPARQL queries, enhanced with auto-correction features and additional spatial querying capabilities, to effectively measure and fairly compare the performance of the VQB interface. 20 students from the faculty of computer science participated in this study without prior knowledge of RDF data querying and SPARQL language. Overall, the results of this study were promising as it showed a higher efficiency and accuracy rate as well as less perceived workload among participants in the VQB interface compared to the baseline. These findings highlight the role of visual query interfaces in improving the user experience as well as elevating efficiency, especially for non-experts, allowing them to explore and query maritime data without the need to learn technical skills.},
keywords = {AMNIS, tabletop displays, visualization},
pubstate = {published},
tppubtype = {mastersthesis}
}
"Hubert" Hu, Sathaporn
2024.
Links | BibTeX | Tags: AR, ARTIV, immersive visualization, statistics
@phdthesis{Hu2024,
title = {A Tablet + Augmented Reality Interface for Interactive Multiple Linear Regression with Geospatial Data},
author = {Sathaporn "Hubert" Hu},
url = {http://hdl.handle.net/10222/83437},
year = {2024},
date = {2024-01-29},
urldate = {2024-01-29},
keywords = {AR, ARTIV, immersive visualization, statistics},
pubstate = {published},
tppubtype = {phdthesis}
}
2023
Franz, Juliano
Enhancing Collaboration Through Role-Specific Information Sharing PhD Thesis
Dalhousie University, 2023.
Abstract | Links | BibTeX | Tags: augmented reality, collaborative systems, cycling
@phdthesis{Franz2023,
title = {Enhancing Collaboration Through Role-Specific Information Sharing},
author = {Juliano Franz},
url = {https://dalspace.library.dal.ca/bitstream/handle/10222/82339/JulianoFranz2023.pdf},
year = {2023},
date = {2023-03-17},
school = {Dalhousie University},
abstract = {Information sharing, either directly or trough summaries and proxies, is one of the pillars of collaborative systems in human-computer interaction. Representing and sharing role-specific information in collocated systems is one of the many challenges in groupware systems. In my thesis, I explore methods and their impacts on sharing role-based information in applications where users do not have uniform access to information either because they are using different technologies to access data or because there is no support for the flow of information. I present tools spanning both "What You See Is What I See" and "What You See Is Not What I See" approaches displaying either raw or summarized information to users. I present three studies leading towards an approach to support road cyclists to perform better as a unit by sharing their exertion information among all group members while leveraging technology ubiquitous to them. I start my work with a study exploring how to enable shared augmented reality experiences in museums when only one of the group members has access to an augmented reality headset. Either because there are not enough devices available or someone might feel uncomfortable using one in public. I propose two approaches for AR experiences (Over-the-Shoulder AR and Semantic Linking), a complementary technique (Indicator Rings), evaluate them in a long-term in-the-wild study, and discuss their impacts on museum scenarios and other applications. Later I present a design research study exploring the work done by road cyclists while training together, following a Contextual Design methodology. I expose their strategies and challenges with group coordination and communication and uncover a lack of support for sharing individual metrics such as effort in standard tools. Finally, I present my last study comparing the impacts on performance and self-reported metrics of two methods for road cyclists to share their exertion levels with group members while exercising: Paceguide and RPE View. I then discuss the broader implications of my work for the HCI community while proposing future research venues in Sport-HCI and other co-located work domains.},
keywords = {augmented reality, collaborative systems, cycling},
pubstate = {published},
tppubtype = {phdthesis}
}
2021
Haltner, Peter
A Comparative Evaluation of Augmented Reality Learning Techniques and Traditional Learning Materials for Bead Weaving Masters Thesis
Dalhousie University, 2021.
@mastersthesis{Haltner2021,
title = {A Comparative Evaluation of Augmented Reality Learning Techniques and Traditional Learning Materials for Bead Weaving},
author = {Peter Haltner},
year = {2021},
date = {2021-12-13},
school = {Dalhousie University},
abstract = {The most common learning materials for handcraft today are videos and figures, which are limited in their ability to express embodied knowledge in the way an in-person tutor can. I developed EmbodiAR, an application for headworn augmented reality (AR) displays designed to teach beginner bead weaving patterns using virtual 3D hands that show weaving sequences recorded from an experienced bead weaver and a dynamic 3D bead model showing how the work progresses. Using a mixed within/between-subjects user study (n=30), I compared the AR learning materials with videos and figures, and I compared learning material placement at the area of work or off to the side. Quantitative and qualitative data analysis shows that the new AR learning materials had comparable effectiveness to the more traditional video and figures. Hand visualizations were found to lack crucial context, however, making them less useful than the animated 3D model of beadwork. Additionally, extra measures to prevent obstruction are required when placing learning materials at the area of work.},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}
Singh, Abbey
Story CreatAR: a spatial analysis tool for space-adaptive story content placement Masters Thesis
Dalhousie University, 2021.
@mastersthesis{Singh2021b,
title = {Story CreatAR: a spatial analysis tool for space-adaptive story content placement},
author = {Abbey Singh},
year = {2021},
date = {2021-12-06},
urldate = {2021-12-06},
school = {Dalhousie University},
abstract = {Immersive locative augmented reality (AR) provides rich storytelling possibilities. When authors place content in a locative AR narrative, they need to consider the relationships between the environment, the player's perspective, and the story events. This becomes more challenging and time-consuming when designing for multiple and/or unknown locations. We present Story CreatAR, a locative authoring tool that uses spatial analysis techniques to facilitate multi-site deployment. Story CreatAR was iteratively designed to help authors consider, test, and adjust spatial relationships in their story. We evaluated Story CreatAR in several ways: authors used Story CreatAR with a graph representation of their story and with a script, and developers did the same. Through a thematic analysis of user comments and behaviour we find that authors had difficulty understanding spatial analysis concepts and thinking in the abstract about placement across multiple sites. However, Story CreatAR encouraged authors to consider how the physical environment impacts their stories, and authors desired additional spatial analysis features.},
keywords = {AR},
pubstate = {published},
tppubtype = {mastersthesis}
}
Alnusayri, Mohammed
ProxemicUI: Iterative Design and Evaluation of a Flexible and Generic Framework for Proxemics Aware Applications PhD Thesis
Dalhousie University, 2021.
BibTeX | Tags:
@phdthesis{Alnusayri2021,
title = { ProxemicUI: Iterative Design and Evaluation of a Flexible and Generic Framework for Proxemics Aware Applications },
author = {Mohammed Alnusayri},
year = {2021},
date = {2021-08-19},
urldate = {2021-08-19},
school = {Dalhousie University},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
Peachey, Matthew
Effectively and Efficiently Representing Hand-Tracking Data using Named Signal Instances Bachelor Thesis
2021.
@bachelorthesis{Peachey2021,
title = {Effectively and Efficiently Representing Hand-Tracking Data using Named Signal Instances},
author = {Matthew Peachey},
year = {2021},
date = {2021-04-15},
urldate = {2021-04-15},
institution = {Dalhousie University},
abstract = {Hand-tracking has been an important research topic in both Human Computer Interaction and Machine Learning in recent years. Hand-tracking has many meaningful use cases such as input mechanisms for Virtual and Augmented reality systems to sign language recognition and translation services. Several high quality research projects and consumer grade products have been introduced recently, which has made hand-tracking more accessible to both developers and users than ever before. While many resources have been spent on developing these highly accurate hand-tracking services, significantly less effort has been spent on defining effective and efficient representations for the output data that these services report. As a solution to these shortcomings, this thesis introduces the concept of "named instances" which aim to provide a conceptual framework for representing hand-tracking data in a way that is accessible to users with all manner of unique use cases.},
keywords = {},
pubstate = {published},
tppubtype = {bachelorthesis}
}
2018
Salimian, Hossein
Dalhousie University, 2018.
Abstract | Links | BibTeX | Tags:
@phdthesis{Salimian2018b,
title = {Exploring the Impact of Asymmetrical Interfaces on Presence and Group Awareness in Mixed Reality Collaborative Environments},
author = {Hossein Salimian},
url = {https://gem.cs.dal.ca/wp-content/uploads/2022/06/salimian-MohamadHossein-PhD-December-2018.pdf},
year = {2018},
date = {2018-12-18},
urldate = {2018-12-18},
school = {Dalhousie University},
abstract = {Mixed Reality (MR) can be used for mixed presence collaboration by connecting physical and virtual worlds to create an integrated space: remote collaborators connect virtually to a physical workspace inhabited by collocated collaborators. While a “What-You-See-Is-What-I-See” (WYSIWIS) approach holds benefits for group awareness in traditional desktop collaborative systems, WYSIWIS is made problematic by the fundamental asymmetry of many MR configurations. This thesis examines the relationship between interface asymmetry, group awareness, and a sense of co-presence in MR collaborative spaces. We conducted a user study with three scenarios involving hiding and sharing blended physical-virtual documents around a fused physical-virtual tabletop. The remote collaborator was presented to collocated collaborators as an avatar in a VE in tableside and circumambient display conditions. Collocated collaborators actively sought information about how the physical and virtual environments were mapped chiefly when this was relevant to the tasks, although the circumambient displays generated more curiosity than a single tableside display about how the spaces were connected. Most participants felt that keeping documents away from the tabletop was sufficient to hide them from the remote collaborator, but indications that remote participants could somehow “see” around the physical environment in WYSIWIS fashion led some participants to trust the integrated physical-virtual environment less. We further investigated how the nature of WYSIWIS abstraction in a collaborative MR environment impacts collaborators’ awareness and feeling of co-presence, specifically for tasks involving 3D artefacts. Collocated collaborators used a tabletop display, while remote collaborators used either a tabletop display or a head-mounted display and physical proxy table to work on tasks involving 3D object manipulation. The results of the study suggest that an immersive VE significantly increases group awareness and the feeling of being co-present for both remote and collocated collaborators in comparison to a pure WYSIWIS tabletop configuration. Presenting 3D models in front of the remote participant above the virtual tabletop (Hover) or within the virtual tabletop (Fishtank) did not yield significant differences in group awareness or presence, despite Fishtank providing a more WYSIWIS experience. In addition, a significant percentage of remote participants preferred presentation over the virtual tabletop. The lack of toolkit support for our research motivated us to combine the software technologies and algorithms used in our work to create a Unity toolkit for rapidly prototyping immersive mixed reality collaborative environments (IMRCE). The IMRCE toolkit helps developers add five components to their systems: hand tracking (visualized and synchronized on all clients), position tracking, touch gestures, virtual reality interaction, and client/server functionality. We evaluated the usability of our toolkit by conducting an A/B comparison between IMRCE and common Unity libraries. The results showed that the IMRCE toolkit made a significant improvement in time to completion, lines of code, number of features, and number of bugs in comparison to development without IMRCE.},
keywords = {},
pubstate = {published},
tppubtype = {phdthesis}
}
2015
Alnusayri, Mohammed
Proximity Table: Exploring Tabletop Interfaces that Respond to Body Position and Motion Masters Thesis
Dalhousie University, 2015.
@mastersthesis{Alnusayri2015,
title = {Proximity Table: Exploring Tabletop Interfaces that Respond to Body Position and Motion},
author = {Mohammed Alnusayri},
url = {https://gem.cs.dal.ca/wp-content/uploads/2022/06/Alnusayri-Mohammed-MCSc-CSCI-March-2015.pdf},
year = {2015},
date = {2015-04-06},
school = {Dalhousie University},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}