forked from microsoft/OpenXR-Unity-MixedReality-Samples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathFollowEyeGaze.cs
82 lines (72 loc) · 2.91 KB
/
FollowEyeGaze.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR;
using UnityEngine.XR.OpenXR.Features.Interactions;
namespace Microsoft.MixedReality.OpenXR.BasicSample
{
public class FollowEyeGaze : MonoBehaviour
{
[SerializeField, Tooltip("The material to use when eye gaze isn't tracked")]
private Material untrackedMaterial = null;
private static readonly List<InputDevice> InputDeviceList = new List<InputDevice>();
private InputDevice eyeTrackingDevice = default(InputDevice);
private Renderer materialRenderer = null;
private Material trackedMaterial = null;
private bool wasEyeTrackingValidLastFrame = true;
/// <summary>
/// Toggles the enabled state of this script to actively follow eye gaze or not.
/// </summary>
public void ToggleFollow() => gameObject.SetActive(!gameObject.activeSelf);
private void Awake()
{
materialRenderer = GetComponent<Renderer>();
if (materialRenderer != null)
{
trackedMaterial = materialRenderer.material;
}
}
private void Update()
{
if (!eyeTrackingDevice.isValid)
{
InputDevices.GetDevicesWithCharacteristics(InputDeviceCharacteristics.EyeTracking, InputDeviceList);
if (InputDeviceList.Count > 0)
{
eyeTrackingDevice = InputDeviceList[0];
}
if (!eyeTrackingDevice.isValid)
{
if (wasEyeTrackingValidLastFrame)
{
Debug.LogWarning($"Unable to acquire eye tracking device. Have permissions been granted?");
}
wasEyeTrackingValidLastFrame = false;
return;
}
}
wasEyeTrackingValidLastFrame = true;
// Gets gaze data from the device.
bool hasData = eyeTrackingDevice.TryGetFeatureValue(CommonUsages.isTracked, out bool isTracked);
hasData &= eyeTrackingDevice.TryGetFeatureValue(EyeTrackingUsages.gazePosition, out Vector3 position);
hasData &= eyeTrackingDevice.TryGetFeatureValue(EyeTrackingUsages.gazeRotation, out Quaternion rotation);
if (isTracked && hasData)
{
if (materialRenderer != null)
{
materialRenderer.material = trackedMaterial;
}
transform.localPosition = position + (rotation * Vector3.forward);
transform.localRotation = rotation;
}
else
{
if (materialRenderer != null)
{
materialRenderer.material = untrackedMaterial;
}
}
}
}
}