OVRDisplay.cs
10.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
/************************************************************************************
Copyright : Copyright (c) Facebook Technologies, LLC and its affiliates. All rights reserved.
Your use of this SDK or tool is subject to the Oculus SDK License Agreement, available at
https://developer.oculus.com/licenses/oculussdk/
Unless required by applicable law or agreed to in writing, the Utilities SDK distributed
under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. See the License for the specific language governing
permissions and limitations under the License.
************************************************************************************/
#if USING_XR_MANAGEMENT && USING_XR_SDK_OCULUS
#define USING_XR_SDK
#endif
#if UNITY_2020_1_OR_NEWER
#define REQUIRES_XR_SDK
#endif
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using UnityEngine;
using System.Collections.Generic;
#if USING_XR_SDK
using UnityEngine.XR;
using UnityEngine.Experimental.XR;
#endif
using InputTracking = UnityEngine.XR.InputTracking;
using Node = UnityEngine.XR.XRNode;
using Settings = UnityEngine.XR.XRSettings;
/// <summary>
/// Manages an Oculus Rift head-mounted display (HMD).
/// </summary>
public class OVRDisplay
{
/// <summary>
/// Contains full fov information per eye
/// Under Symmetric Fov mode, UpFov == DownFov and LeftFov == RightFov.
/// </summary>
public struct EyeFov
{
public float UpFov;
public float DownFov;
public float LeftFov;
public float RightFov;
}
/// <summary>
/// Specifies the size and field-of-view for one eye texture.
/// </summary>
public struct EyeRenderDesc
{
/// <summary>
/// The horizontal and vertical size of the texture.
/// </summary>
public Vector2 resolution;
/// <summary>
/// The angle of the horizontal and vertical field of view in degrees.
/// For Symmetric FOV interface compatibility
/// Note this includes the fov angle from both sides
/// </summary>
public Vector2 fov;
/// <summary>
/// The full information of field of view in degrees.
/// When Asymmetric FOV isn't enabled, this returns the maximum fov angle
/// </summary>
public EyeFov fullFov;
}
/// <summary>
/// Contains latency measurements for a single frame of rendering.
/// </summary>
public struct LatencyData
{
/// <summary>
/// The time it took to render both eyes in seconds.
/// </summary>
public float render;
/// <summary>
/// The time it took to perform TimeWarp in seconds.
/// </summary>
public float timeWarp;
/// <summary>
/// The time between the end of TimeWarp and scan-out in seconds.
/// </summary>
public float postPresent;
public float renderError;
public float timeWarpError;
}
private bool needsConfigureTexture;
private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
private bool recenterRequested = false;
private int recenterRequestedFrameCount = int.MaxValue;
private int localTrackingSpaceRecenterCount = 0;
/// <summary>
/// Creates an instance of OVRDisplay. Called by OVRManager.
/// </summary>
public OVRDisplay()
{
UpdateTextures();
}
/// <summary>
/// Updates the internal state of the OVRDisplay. Called by OVRManager.
/// </summary>
public void Update()
{
UpdateTextures();
if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
{
Debug.Log("Recenter event detected");
if (RecenteredPose != null)
{
RecenteredPose();
}
recenterRequested = false;
recenterRequestedFrameCount = int.MaxValue;
}
if (OVRPlugin.GetSystemHeadsetType() >= OVRPlugin.SystemHeadset.Oculus_Quest &&
OVRPlugin.GetSystemHeadsetType() < OVRPlugin.SystemHeadset.Rift_DK1) // all Oculus Standalone headsets
{
int recenterCount = OVRPlugin.GetLocalTrackingSpaceRecenterCount();
if (localTrackingSpaceRecenterCount != recenterCount)
{
Debug.Log("Recenter event detected");
if (RecenteredPose != null)
{
RecenteredPose();
}
localTrackingSpaceRecenterCount = recenterCount;
}
}
}
/// <summary>
/// Occurs when the head pose is reset.
/// </summary>
public event System.Action RecenteredPose;
/// <summary>
/// Recenters the head pose.
/// </summary>
public void RecenterPose()
{
#if USING_XR_SDK
XRInputSubsystem currentInputSubsystem = OVRManager.GetCurrentInputSubsystem();
if (currentInputSubsystem != null)
{
currentInputSubsystem.TryRecenter();
}
#elif !REQUIRES_XR_SDK
#pragma warning disable 618
InputTracking.Recenter();
#pragma warning restore 618
#endif
// The current poses are cached for the current frame and won't be updated immediately
// after UnityEngine.VR.InputTracking.Recenter(). So we need to wait until next frame
// to trigger the RecenteredPose delegate. The application could expect the correct pose
// when the RecenteredPose delegate get called.
recenterRequested = true;
recenterRequestedFrameCount = Time.frameCount;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
OVRMixedReality.RecenterPose();
#endif
}
/// <summary>
/// Gets the current linear acceleration of the head.
/// </summary>
public Vector3 acceleration
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Acceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current angular acceleration of the head in radians per second per second about each axis.
/// </summary>
public Vector3 angularAcceleration
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularAcceleration, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current linear velocity of the head in meters per second.
/// </summary>
public Vector3 velocity
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.Velocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the current angular velocity of the head in radians per second about each axis.
/// </summary>
public Vector3 angularVelocity
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
Vector3 retVec = Vector3.zero;
if (OVRNodeStateProperties.GetNodeStatePropertyVector3(Node.Head, NodeStatePropertyType.AngularVelocity, OVRPlugin.Node.Head, OVRPlugin.Step.Render, out retVec))
return retVec;
return Vector3.zero;
}
}
/// <summary>
/// Gets the resolution and field of view for the given eye.
/// </summary>
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
{
return eyeDescs[(int)eye];
}
/// <summary>
/// Gets the current measured latency values.
/// </summary>
public LatencyData latency
{
get {
if (!OVRManager.isHmdPresent)
return new LatencyData();
string latency = OVRPlugin.latency;
var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
var ret = new LatencyData();
Match match = r.Match(latency);
if (match.Success)
{
ret.render = float.Parse(match.Groups[1].Value);
ret.timeWarp = float.Parse(match.Groups[2].Value);
ret.postPresent = float.Parse(match.Groups[3].Value);
}
return ret;
}
}
/// <summary>
/// Gets application's frame rate reported by oculus plugin
/// </summary>
public float appFramerate
{
get
{
if (!OVRManager.isHmdPresent)
return 0;
return OVRPlugin.GetAppFramerate();
}
}
/// <summary>
/// Gets the recommended MSAA level for optimal quality/performance the current device.
/// </summary>
public int recommendedMSAALevel
{
get
{
int result = OVRPlugin.recommendedMSAALevel;
if (result == 1)
result = 0;
return result;
}
}
/// <summary>
/// Gets the list of available display frequencies supported by this hardware.
/// </summary>
public float[] displayFrequenciesAvailable
{
get { return OVRPlugin.systemDisplayFrequenciesAvailable; }
}
/// <summary>
/// Gets and sets the current display frequency.
/// </summary>
public float displayFrequency
{
get
{
return OVRPlugin.systemDisplayFrequency;
}
set
{
OVRPlugin.systemDisplayFrequency = value;
}
}
private void UpdateTextures()
{
ConfigureEyeDesc(Node.LeftEye);
ConfigureEyeDesc(Node.RightEye);
}
private void ConfigureEyeDesc(Node eye)
{
if (!OVRManager.isHmdPresent)
return;
int eyeTextureWidth = Settings.eyeTextureWidth;
int eyeTextureHeight = Settings.eyeTextureHeight;
eyeDescs[(int)eye] = new EyeRenderDesc();
eyeDescs[(int)eye].resolution = new Vector2(eyeTextureWidth, eyeTextureHeight);
OVRPlugin.Frustumf2 frust;
if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust))
{
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan);
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan);
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan);
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan);
}
else
{
OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
}
// Symmetric Fov uses the maximum fov angle
float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov);
float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov);
eyeDescs[(int)eye].fov.x = maxFovX * 2.0f;
eyeDescs[(int)eye].fov.y = maxFovY * 2.0f;
if (!OVRPlugin.AsymmetricFovEnabled)
{
eyeDescs[(int)eye].fullFov.LeftFov = maxFovX;
eyeDescs[(int)eye].fullFov.RightFov = maxFovX;
eyeDescs[(int)eye].fullFov.UpFov = maxFovY;
eyeDescs[(int)eye].fullFov.DownFov = maxFovY;
}
}
}