@inproceedings{72f2a0e27e2743748170b14125428c8f,
title = "Depth based interaction and field of view manipulation for augmented reality",
abstract = "In recent years, the market for portable devices has seen a large increase in the development of head mounted displays. While these displays provide many benefits to users, safety is still a concern. In particular, ensuring that content does not interfere with everyday activities and that users have adequate peripheral vision is very important for situational awareness. In this paper, I address these issues through the use of two novel display prototypes. The first is an optical see-through multi-focal plane display combined with an eye tracking interface. Through eye tracking and knowledge of the focal plane distances, I can calculate whether a user is looking at the environment or at a focal plane in the display. Any distracting text can then be quickly removed so that he or she has a clear view of the environment. The second prototype is a video see-through display which expands a user's environmental view through the use of 238° ultra wide field of view fisheye lenses. Based on the results of several initial evaluations, these new interfaces have the potential help users improve environmental awareness.",
keywords = "Augmented reality, Eye tracking, Fisheye vision, Multi-focal plane, Spatial interaction, Wide field of view",
author = "Jason Orlosky",
year = "2014",
month = oct,
day = "5",
doi = "10.1145/2658779.2661164",
language = "English (US)",
series = "UIST 2014 - Adjunct Publication of the 27th Annual ACM Symposium on User Interface Software and Technology",
publisher = "Association for Computing Machinery",
pages = "5--8",
booktitle = "UIST 2014 - Adjunct Publication of the 27th Annual ACM Symposium on User Interface Software and Technology",
note = "27th Annual ACM Symposium on User Interface Software and Technology, UIST 2014 ; Conference date: 05-10-2014 Through 08-10-2014",
}