From 3ee5a4d00f57c1a1a181845e560a7ddaa5458e81 Mon Sep 17 00:00:00 2001 From: Andreas Bulling Date: Mon, 13 Feb 2023 10:43:12 +0100 Subject: [PATCH] Update 'Home' --- Home.md | 124 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 62 insertions(+), 62 deletions(-) diff --git a/Home.md b/Home.md index d926581..607c280 100644 --- a/Home.md +++ b/Home.md @@ -1,63 +1,63 @@ -# OpenGaze: Open Source Toolkit for Camera-Based Gaze Estimation and Interaction - -Appearance-based gaze estimation methods that only require an off-the-shelf camera have significantly improved and promise a wide range of new applications in gaze-based interaction and attentive user interfaces. However, these methods are not yet widely used in the human-computer interaction (HCI) community. - -To democratize their use in HCI, we present OpenGaze, the first software toolkit that is specifically developed for gaze interface designers. OpenGaze is open source and aims to implement state-of-the-art methods for camera-based gaze estimation and interaction. - -## Functionality - -The toolkit is capable of performing the following gaze-related tasks: - -* **Gaze Estimation** -Estimate and show a user's gaze on a screen in real time. - -[![Demo](https://img.youtube.com/vi/aenp4ZWjBZo/0.jpg)](https://youtu.be/aenp4ZWjBZo "Gaze Estimation") -

 

- -* **Gaze Visualization** -Plot gaze direction in images. - -[![Demo](https://img.youtube.com/vi/9Lujg3beiYI/0.jpg)](https://youtu.be/9Lujg3beiYI "Gaze Visualization") -

 

- -* **Personal Calibration** -Perform personal calibration and remap the gaze target on a screen. - -[![Demo](https://img.youtube.com/vi/BjhZcRw4N-w/0.jpg)](https://youtu.be/BjhZcRw4N-w "Personal Calibration") -

 

- -## Installation -[Unix Installation](https://git.perceptualui.org/public-projects/opengaze/wikis/Unix-installation) - -## Use -[Command line arguments](https://git.perceptualui.org/public-projects/opengaze/wikis/Command-line-arguments) - -## Citation -**If you use any of the resources provided on this page in any of your publications, please cite the following paper:** - -``` -Evaluation of Appearance-Based Methods and Implications for Gaze-Based Applications -Xucong Zhang, Yusuke Sugano, Andreas Bulling -Proc. ACM SIGCHI Conference on Human Factors in Computing Systems (CHI), 2019 -``` -[Project page](https://www.perceptualui.org/publications/zhang19_chi/) - -@inproceedings{zhang19_chi,
-title = {Evaluation of Appearance-Based Methods and Implications for Gaze-Based Applications},
-author = {Xucong Zhang and Yusuke Sugano and Andreas Bulling},
-doi = {10.1145/3290605.3300646},
-year = {2019},
-booktitle = {Proc. ACM SIGCHI Conference on Human Factors in Computing Systems (CHI)},
-abstract = {Appearance-based gaze estimation methods that only require an off-the-shelf camera have significantly improved but they are still not yet widely used in the human-computer interaction (HCI) community. This is partly because it remains unclear how they perform compared to model-based approaches as well as dominant, special-purpose eye tracking equipment. To address this limitation, we evaluate the performance of state-of-the-art appearance-based gaze estimation for interaction scenarios with and without personal calibration, indoors and outdoors, for different sensing distances, as well as for users with and without glasses. We discuss the obtained findings and their implications for the most important gaze-based applications, namely explicit eye input, attentive user interfaces, gaze-based user modelling, and passive eye monitoring. To democratise the use of appearance-based gaze estimation and interaction in HCI, we finally present OpenGaze (www.opengaze.org), the first software toolkit for appearance-based gaze estimation and interaction.}
-} - -## License - -The license agreement can be found in [LICENSE](https://git.perceptualui.org/public-projects/opengaze/blob/master/LICENSE). - -You have to respect boost, OpenFace and OpenCV licenses. - -Furthermore, you have to respect the licenses of the datasets used for [model training](https://git.perceptualui.org/public-projects/opengaze/wikis/Model-training). - -## Contact +# OpenGaze: Open Source Toolkit for Camera-Based Gaze Estimation and Interaction + +Appearance-based gaze estimation methods that only require an off-the-shelf camera have significantly improved and promise a wide range of new applications in gaze-based interaction and attentive user interfaces. However, these methods are not yet widely used in the human-computer interaction (HCI) community. + +To democratize their use in HCI, we present OpenGaze, the first software toolkit that is specifically developed for gaze interface designers. OpenGaze is open source and aims to implement state-of-the-art methods for camera-based gaze estimation and interaction. + +## Functionality + +The toolkit is capable of performing the following gaze-related tasks: + +* **Gaze Estimation** +Estimate and show a user's gaze on a screen in real time. + +[![Demo](https://img.youtube.com/vi/aenp4ZWjBZo/0.jpg)](https://youtu.be/aenp4ZWjBZo "Gaze Estimation") +

 

+ +* **Gaze Visualization** +Plot gaze direction in images. + +[![Demo](https://img.youtube.com/vi/9Lujg3beiYI/0.jpg)](https://youtu.be/9Lujg3beiYI "Gaze Visualization") +

 

+ +* **Personal Calibration** +Perform personal calibration and remap the gaze target on a screen. + +[![Demo](https://img.youtube.com/vi/BjhZcRw4N-w/0.jpg)](https://youtu.be/BjhZcRw4N-w "Personal Calibration") +

 

+ +## Installation +[Unix Installation](https://git.hcics.simtech.uni-stuttgart.de/public-projects/opengaze/wiki/Unix-installation) + +## Use +[Command line arguments](https://git.hcics.simtech.uni-stuttgart.de/public-projects/opengaze/wiki/Command-line-arguments) + +## Citation +**If you use any of the resources provided on this page in any of your publications, please cite the following paper:** + +``` +Evaluation of Appearance-Based Methods and Implications for Gaze-Based Applications +Xucong Zhang, Yusuke Sugano, Andreas Bulling +Proc. ACM SIGCHI Conference on Human Factors in Computing Systems (CHI), 2019 +``` +[Project page](https://www.perceptualui.org/publications/zhang19_chi/) + +@inproceedings{zhang19_chi,
+title = {Evaluation of Appearance-Based Methods and Implications for Gaze-Based Applications},
+author = {Xucong Zhang and Yusuke Sugano and Andreas Bulling},
+doi = {10.1145/3290605.3300646},
+year = {2019},
+booktitle = {Proc. ACM SIGCHI Conference on Human Factors in Computing Systems (CHI)},
+abstract = {Appearance-based gaze estimation methods that only require an off-the-shelf camera have significantly improved but they are still not yet widely used in the human-computer interaction (HCI) community. This is partly because it remains unclear how they perform compared to model-based approaches as well as dominant, special-purpose eye tracking equipment. To address this limitation, we evaluate the performance of state-of-the-art appearance-based gaze estimation for interaction scenarios with and without personal calibration, indoors and outdoors, for different sensing distances, as well as for users with and without glasses. We discuss the obtained findings and their implications for the most important gaze-based applications, namely explicit eye input, attentive user interfaces, gaze-based user modelling, and passive eye monitoring. To democratise the use of appearance-based gaze estimation and interaction in HCI, we finally present OpenGaze (www.opengaze.org), the first software toolkit for appearance-based gaze estimation and interaction.}
+} + +## License + +The license agreement can be found in [LICENSE](https://git.perceptualui.org/public-projects/opengaze/blob/master/LICENSE). + +You have to respect boost, OpenFace and OpenCV licenses. + +Furthermore, you have to respect the licenses of the datasets used for [model training](https://git.perceptualui.org/public-projects/opengaze/wikis/Model-training). + +## Contact email: opengaze.toolkit@gmail.com \ No newline at end of file