XR Research
Brewed for Industry
Served as Academic Publications
Haohong, Wang; Shenghao, Gao; Yimin, Zhao; Maojia, Song; Heng, Wang; Rompapas, Damien
2022 IEEE 5th International Conference on Electronics Technology (ICET), IEEE, 2022.
@conference{9824583,
title = {The Mind Commands You: Combining Brain-Computer Interactions with Augmented Reality to Control Internet of Things (IoT) Tools, and Robotic Platforms},
author = {Wang Haohong and Gao Shenghao and Zhao Yimin and Song Maojia and Wang Heng and Damien Rompapas},
doi = {10.1109/ICET55676.2022.9824583},
year = {2022},
date = {2022-05-13},
urldate = {2022-05-13},
booktitle = {2022 IEEE 5th International Conference on Electronics Technology (ICET)},
pages = {1026-1031},
publisher = {IEEE},
abstract = {Many researchers are exploring the use of Brain Computer Interfaces (BCI) in combination with Internet of Things (IoT) tools, and robotic control. However, the training and application process for BCI controls can be difficult to achieve partially due to the required double-attention of both the target matter, and the on screen feedback loop used during training. Enter Augmented Reality (AR), a technique for embedding computer generated content (CG) in the user’s view of the environment. In this research, we describe a system that explores the combination of a BCI training environment with AR technologies for both training and run time usage. We show the key advantage of this combination, allowing the user to focus directly on the subject matter. While our work is in the prototype phase, we show that this combination of AR and BCI has the potential to be effective in the training and usage of BCI interfaces.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lawrence, Louise; Lee, Gun; Billinghurst, Mark; Rompapas, Damien
2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), IEEE, 2022.
@conference{nokey,
title = {3DColAR: Exploring 3D Color Selection and Surface Painting for Head Worn Augmented Reality using Hand Gestures},
author = {Louise Lawrence and Gun Lee and Mark Billinghurst and Damien Rompapas},
url = {https://www.beerlabs.com.au/wp-content/uploads/2022/07/Exploring_3D_Color_Selection_and_Surface_Painting_for_Head_Worn_AR_using_Hand_Gestures.pdf},
doi = {10.1109/VRW55335.2022.00338},
year = {2022},
date = {2022-03-12},
urldate = {2022-03-12},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
publisher = {IEEE},
abstract = {Color selection and surface painting has been largely unexplored in head-worn Augmented Reality (AR) using hand gestures. In this demonstration we present 3DColAR: A system that implements several 2D and 3D techniques for color selection. We also implement two key approaches for painting a virtual 3D model using mid-air hand gestures. This includes a virtual pen which the user can grasp using their hand, akin to a real pen and the use of the user's fingertip directly on the virtual 3D model. We hope to explore how these various techniques effect user's efficiency and accuracy when performing surface painting of virtual objects using mid-air hand gestures via. several user studies.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Campbell, James; Cassinelli, Alvaro; Saakes, Daniel; Rompapas, Damien
B-Handy: An Augmented Reality System for Biomechanical Measurement Conference
2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW), IEEE 2022.
@conference{9757687,
title = {B-Handy: An Augmented Reality System for Biomechanical Measurement},
author = {James Campbell and Alvaro Cassinelli and Daniel Saakes and Damien Rompapas},
url = {https://www.beerlabs.com.au/wp-content/uploads/2022/07/B_Handy__An_Augmented_Reality_System_for_Biomechanics.pdf},
doi = {10.1109/VRW55335.2022.00339},
year = {2022},
date = {2022-03-12},
booktitle = {2022 IEEE Conference on Virtual Reality and 3D User Interfaces Abstracts and Workshops (VRW)},
organization = {IEEE},
abstract = {The study of bio-mechanics allows us to infer measurements for every day objects without needing measurement tools. A limitation of this comes from the complex mental transformations of space involved. The efficiency of this task degrades the larger these measurements become. We present B-Handy, a system that offloads this mental workload by providing visual transformations of space in the form of tracking and duplicating the user's hand in AR. It is our hope that this system will simplify the complexity of these mental transformations and increase the efficiency of bio-mechanical measurements.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Rompapas, Damien Constantine; Campbell, James; Ta, Vincent; Cassinelli, Alvaro
Project Ariel: An Open Source Augmented Reality Headset for Industrial Applications Conference
2021.
@conference{Rompapas2018d,
title = {Project Ariel: An Open Source Augmented Reality Headset for Industrial Applications},
author = {Damien Constantine Rompapas and James Campbell and Vincent Ta and Alvaro Cassinelli},
url = {https://dl.acm.org/doi/abs/10.1145/3460418.3479359https://www.beerlabs.com.au/wp-content/uploads/2021/12/Project_Ariel__An_Open_Source_Augmented_Reality_Headset_for_Industrial_Applications.pdf},
year = {2021},
date = {2021-09-21},
urldate = {2021-09-21},
abstract = {Some of the biggest challenges in applying Augmented Reality (AR) technologies to the industry floor are in the form factor, and safety requirements of the head worn display. This includes alleviating issues such as peripheral view occlusion, and adaptation to personal protective equipment. In this work we present the design of Project Ariel, an Open Source 3D printable display specifically designed for use in industrial environments. It is our hope that with this technology, the average tradesman can utilize the powerful visualizations AR has to offer, significantly improving their daily work flow.
KEYWORDS
Augmented Reality; Headset Design; Optical See-Through; Open Source;},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
KEYWORDS
Augmented Reality; Headset Design; Optical See-Through; Open Source;
Reddy, G S Rajeshekar; Rompapas, Damien
Liquid Hands: Evoking Emotional States via Augmented Reality Music Visualizations Conference
IMX '21: ACM International Conference on Interactive Media Experiences, AM, 2021.
@conference{10.1145/3452918.3465496,
title = {Liquid Hands: Evoking Emotional States via Augmented Reality Music Visualizations},
author = {G S Rajeshekar Reddy and Damien Rompapas},
doi = {doi.org/10.1145/3452918.3465496},
year = {2021},
date = {2021-06-16},
urldate = {2021-06-16},
booktitle = {IMX '21: ACM International Conference on Interactive Media Experiences},
pages = {305-310},
publisher = {AM},
abstract = {Music performances have transformed in unprecedented ways with the advent of digital music. Plenty of music visualizers enhance live performances in various forms, including LED display boards and holographic illustrations. However, the impracticability of live performances due to the CoVID-19 outbreak has led to event organizers adopting alternatives in virtual environments. In this work, we propose Liquid Hands, an Augmented Reality (AR) music visualizer system, wherein three-dimensional particles react to the flow of music, forming a visually aesthetic escapade. With hand-particle interactions, Liquid Hands aims to enrich the music listening experience in one’s personal space and bridge the gap between virtual and physical concerts. We intend to explore the emotions our system induces by conducting a pilot study, in which we measure the user’s psychological state through Electroencephalography (EEG). We hypothesize that the proposed system will evoke emotions akin to those exhibited in live music performances.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jack Fraser Carolin Reichherzer, Damien Constantine Rompapas
SecondSight: A Framework for Cross-Device Augmented Reality Interfaces Conference
CHI EA '21: Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems, 2021.
@conference{10.1145/3411763.3451839,
title = {SecondSight: A Framework for Cross-Device Augmented Reality Interfaces},
author = {Carolin Reichherzer, Jack Fraser, Damien Constantine Rompapas, Mark Billinghurst},
doi = {10.1145/3411763.3451839},
year = {2021},
date = {2021-05-13},
urldate = {2021-05-13},
publisher = {CHI EA '21: Extended Abstracts of the 2021 CHI Conference on Human Factors in Computing Systems},
abstract = {This paper describes a modular framework developed to facilitate the design space exploration of cross-device Augmented Reality (AR) interfaces that combine an AR head-mounted display (HMD) with a smartphone. Currently, there is a growing interest in how AR HMDs can be used with smartphones to improve the user’s AR experience. In this work, we describe a framework that enables rapid prototyping and evaluation of an interface. Our system enables different modes of interaction, content placement, and simulated AR HMD field of view to assess which combination is best suited to inform future researchers on design recommendations. We provide examples of how the framework could be used to create sample applications, the types of the studies which could be supported, and example results from a simple pilot study.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Rompapas, Damien; Steven, Yoon; Chan, Jonothan
A Hybrid Approach to Teaching Computational Thinking at a K-1 and K-2 Level Conference
Proceedings of the CTE-STEM, 2021, 2021.
@conference{nokey,
title = {A Hybrid Approach to Teaching Computational Thinking at a K-1 and K-2 Level},
author = {Damien Rompapas and Yoon Steven and Jonothan Chan},
url = {https://www.beerlabs.com.au/wp-content/uploads/2022/07/A_Hybrid_Approach_to_Teaching_Computational_Thinking_at_a_K_1_and_K_2_Level.pdf},
year = {2021},
date = {2021-05-13},
urldate = {2021-05-13},
booktitle = {Proceedings of the CTE-STEM, 2021},
abstract = {Computational Thinking (CT) has been described as taking an approach to solving problems, designing systems and understanding human behavior that draws on concepts fundamental to computing. It is the ability to integrate human creativity and insight with concepts derived from Computer Science. We argue that it is best to learn the fundamentals of CT at a young age, when the mind is most malleable, instead of much later when these concepts are taught as part of Computer Science courses. However, challenges arise not only when trying to teach these complex concepts to young children, but also when applying these teachings through kindergarten environments. We present a definition of the basic fundamental CT concepts and then describe a unique hybrid approach of offline and online activities to teach these fundamentals to students at the kindergarten (K1 and K2) level (children aged 4-6 years old). Finally we validate this approach with a pilot class to determine its learning effectiveness.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Rompapas, Damien Constantine; Quiros, Daniel Flores; Rodda, Charlton; Brown, Bryan Christopher; Zerkin, Noah Benjamin; Cassinelli, Alvaro
Project Esky: an Open Source Software Framework for High Fidelity Extended Reality Conference
2021.
@conference{Rompapas2018c,
title = {Project Esky: an Open Source Software Framework for High Fidelity Extended Reality},
author = {Damien Constantine Rompapas and Daniel Flores Quiros and Charlton Rodda and Bryan Christopher Brown and Noah Benjamin Zerkin and Alvaro Cassinelli},
url = {https://www.beerlabs.com.au/wp-content/uploads/2021/12/Esky__ISMAR_Submission.pdf},
year = {2021},
date = {2021-05-08},
urldate = {2021-05-08},
abstract = {This demonstration showcases a complete Open-Source Augmented Reality (AR) modular platform capable of high fidelity natural handinteractions with virtual content, high field of view, and spatial mapping for environment interactions. We do this via several live desktop demonstrations. Finally, included in this demonstration is a completed open source schematic, allowing anyone interested in utilizing our proposed platform to engage with high fidelity AR. It is our hope that the work described in this demo will be a stepping stone towards bringing high-fidelity AR content to researchers and commodity users alike.
Keywords: Augmented Reality, High Fidelity, Collaborative Augmented Reality, Open Source Platforms},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Keywords: Augmented Reality, High Fidelity, Collaborative Augmented Reality, Open Source Platforms
Rompapas, Damien; Sandor, Christian; Plopski, Alexander; Daniel Saakes, Dong Hyeok Yun; Taketomi, Takafumi; Kato, Hirokazu
Holoroyale: A Large Scale High Fidelity Augmented Reality Game Conference
2018, ISBN: 978-1-4503-5949-8/18/10.
@conference{Rompapas2018,
title = {Holoroyale: A Large Scale High Fidelity Augmented Reality Game},
author = {Damien Rompapas and Christian Sandor and Alexander Plopski and Daniel Saakes, Dong Hyeok Yun and Takafumi Taketomi and Hirokazu Kato},
url = {https://www.beerlabs.com.au/wp-content/uploads/2021/12/HoloRoyale___UIST.pdf},
doi = {10.1145/3266037.3271637},
isbn = {978-1-4503-5949-8/18/10},
year = {2018},
date = {2018-10-11},
urldate = {2018-10-11},
abstract = {INTRODUCTION
Recent years saw an explosion in Augmented Reality (AR) experiences for consumers. These experiences can be classified based on the scale of the interactive area (room vs city/global scale) , or the fidelity of the experience (high vs low) [4]. Experiences that target large areas, such as campus or world scale [7, 6], commonly have only rudimentary interactions with the physical world, and suffer from registration errors and jitter. We classify these experiences as large scale and low fidelity. On the other hand, various room sized experiences [5, 8] feature realistic interaction of virtual content with the real world. We classify these experiences as small scale and high fidelity.
Our work is the first to explore the domain of large scale high fidelity (LSHF) AR experiences. We build upon the small scale high fidelity capabilities of the Microsoft HoloLens to allow LSHF interactions. We demonstrate the capabilities of our system with a game specifically designed for LSHF
interactions, handling many challenges and limitations unique to the domain of LSHF AR through the game design.
Our contributions are twofold:
The lessons learned during the design and development of a system capable of LSHF AR interactions.
Identification of a set of reusable game elements specific to LSHF AR, including mechanisms for addressing spatio-temporal inconsistencies and crowd control. We believe our contributions will be fully applicable not only to games, but all LSHF AR experiences.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Recent years saw an explosion in Augmented Reality (AR) experiences for consumers. These experiences can be classified based on the scale of the interactive area (room vs city/global scale) , or the fidelity of the experience (high vs low) [4]. Experiences that target large areas, such as campus or world scale [7, 6], commonly have only rudimentary interactions with the physical world, and suffer from registration errors and jitter. We classify these experiences as large scale and low fidelity. On the other hand, various room sized experiences [5, 8] feature realistic interaction of virtual content with the real world. We classify these experiences as small scale and high fidelity.
Our work is the first to explore the domain of large scale high fidelity (LSHF) AR experiences. We build upon the small scale high fidelity capabilities of the Microsoft HoloLens to allow LSHF interactions. We demonstrate the capabilities of our system with a game specifically designed for LSHF
interactions, handling many challenges and limitations unique to the domain of LSHF AR through the game design.
Our contributions are twofold:
The lessons learned during the design and development of a system capable of LSHF AR interactions.
Identification of a set of reusable game elements specific to LSHF AR, including mechanisms for addressing spatio-temporal inconsistencies and crowd control. We believe our contributions will be fully applicable not only to games, but all LSHF AR experiences.
Santos, Marc Ericson C.; Rompapas, Damien; Nishiki, Yoshinari; Taketomi, Takafumi; Yamamoto, Goshiro
The COMPASS Framework for Digital Entertainment: Discussing Augmented Reality Activities for Scouts Conference
ACE '16: Proceedings of the 13th International Conference on Advances in Computer Entertainment Technology, 0000.
@conference{nokey,
title = {The COMPASS Framework for Digital Entertainment: Discussing Augmented Reality Activities for Scouts},
author = {Marc Ericson C. Santos and Damien Rompapas and Yoshinari Nishiki and Takafumi Taketomi and Goshiro Yamamoto},
doi = {10.1145/3001773.3001799},
publisher = {ACE '16: Proceedings of the 13th International Conference on Advances in Computer Entertainment Technology},
abstract = {Entertainment is challenging to observe, especially with children, due to limited analytical tools. In response, we present a modified framework for entertainment computing, COMPASS -- COmbined Mental, PhysicAl, Social and Spatial factors, which we use to analyze augmented reality activities for cub scouts.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Sorry, no publications matched your criteria.