1 /* Converted to D from libovr\Src\OVR_CAPI.h by htod */
2 module ovr;
3 /************************************************************************************
4 
5 Filename    :   OVR_CAPI.h
6 Content     :   C Interface to Oculus tracking and rendering.
7 Created     :   November 23, 2013
8 Authors     :   Michael Antonov
9 
10 Copyright   :   Copyright 2014 Oculus VR, Inc. All Rights reserved.
11 
12 Licensed under the Oculus VR Rift SDK License Version 3.1 (the "License");
13 you may not use the Oculus VR Rift SDK except in compliance with the License,
14 which is provided at the time of installation or download, or which
15 otherwise accompanies this software in either electronic or hard copy form.
16 
17 You may obtain a copy of the License at
18 
19 http://www.oculusvr.com/licenses/LICENSE-3.1
20 
21 Unless required by applicable law or agreed to in writing, the Oculus VR SDK
22 distributed under the License is distributed on an "AS IS" BASIS,
23 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 See the License for the specific language governing permissions and
25 limitations under the License.
26 
27 ************************************************************************************/
28 
29 /// @file OVR_CAPI.h
30 /// Exposes all general Rift functionality.
31 ///
32 /// @mainpage Overview
33 /// Welcome to the Oculus SDK Reference Manual. All SDK functionality is exposed through a simple and portable C API, found in OVR_CAPI.h.
34 ///
35 /// Please see the Oculus Developer Guide for detailed information about using the SDK in your native applications.
36 
37 
38 import core.stdc.stdint;
39 
40 extern (C):
41 alias char ovrBool;
42 
43 //-----------------------------------------------------------------------------------
44 // ***** OVR_EXPORT definition
45 
46 
47 //#define ENABLE_LATENCY_TESTER
48 
49 //-----------------------------------------------------------------------------------
50 // ***** Simple Math Structures
51 
52 /// A 2D vector with integer components.
53 struct ovrVector2i_
54 {
55     int x;
56     int y;
57 }
58 alias ovrVector2i_ ovrVector2i;
59 /// A 2D size with integer components.
60 struct ovrSizei_
61 {
62     int w;
63     int h;
64 }
65 alias ovrSizei_ ovrSizei;
66 /// A 2D rectangle with a position and size.
67 /// All components are integers.
68 struct ovrRecti_
69 {
70     ovrVector2i Pos;
71     ovrSizei Size;
72 }
73 alias ovrRecti_ ovrRecti;
74 
75 /// A quaternion rotation.
76 struct ovrQuatf_
77 {
78     float x;
79     float y;
80     float z;
81     float w;
82 }
83 alias ovrQuatf_ ovrQuatf;
84 /// A 2D vector with float components.
85 struct ovrVector2f_
86 {
87     float x;
88     float y;
89 }
90 alias ovrVector2f_ ovrVector2f;
91 /// A 3D vector with float components.
92 struct ovrVector3f_
93 {
94     float x;
95     float y;
96     float z;
97 }
98 alias ovrVector3f_ ovrVector3f;
99 /// A 4x4 matrix with float elements.
100 struct ovrMatrix4f_
101 {
102     float [4][4]M;
103 }
104 alias ovrMatrix4f_ ovrMatrix4f;
105 /// Position and orientation together.
106 struct ovrPosef_
107 {
108     ovrQuatf Orientation;
109     ovrVector3f Position;
110 }
111 alias ovrPosef_ ovrPosef;
112 
113 /// A full pose (rigid body) configuration with first and second derivatives.
114 struct ovrPoseStatef_
115 {
116     ovrPosef ThePose;
117     ovrVector3f AngularVelocity;
118     ovrVector3f LinearVelocity;
119     ovrVector3f AngularAcceleration;
120     ovrVector3f LinearAcceleration;
121     double TimeInSeconds;
122 }
123 alias ovrPoseStatef_ ovrPoseStatef;
124 
125 /// Field Of View (FOV) in tangent of the angle units.
126 /// As an example, for a standard 90 degree vertical FOV, we would
127 /// have: { UpTan = tan(90 degrees / 2), DownTan = tan(90 degrees / 2) }.
128     /// The tangent of the angle between the viewing vector and the top edge of the field of view.
129     /// The tangent of the angle between the viewing vector and the bottom edge of the field of view.
130     /// The tangent of the angle between the viewing vector and the left edge of the field of view.
131     /// The tangent of the angle between the viewing vector and the right edge of the field of view.
132 struct ovrFovPort_
133 {
134     float UpTan;
135     float DownTan;
136     float LeftTan;
137     float RightTan;
138 }
139 alias ovrFovPort_ ovrFovPort;
140 
141 
142 //-----------------------------------------------------------------------------------
143 // ***** HMD Types
144 
145 /// Enumerates all HMD types that we support.
146 enum
147 {
148     ovrHmd_None,
149     ovrHmd_DK1 = 3,
150     ovrHmd_DKHD,
151     ovrHmd_DK2 = 6,
152     ovrHmd_Other,
153 }
154 alias int ovrHmdType;
155 
156 /// HMD capability bits reported by device.
157     // Read-only flags.
158 											///  i.e. it is not already owned by another application.
159 
160     // These flags are intended for use with the new driver display mode.
161 
162     // Modifiable flags (through ovrHmd_SetEnabledCaps).
163 											/// rendering performance slightly (only if 'ExtendDesktop' is off).
164 
165     /// Support rendering without VSync for debugging.
166 
167     // These bits can be modified by ovrHmd_SetEnabledCaps.
168     /// These flags are currently passed into the service. May change without notice.
169 enum
170 {
171     ovrHmdCap_Present = 1,
172     ovrHmdCap_Available,
173     ovrHmdCap_Captured = 4,
174     ovrHmdCap_ExtendDesktop = 8,
175     ovrHmdCap_NoMirrorToWindow = 8192,
176     ovrHmdCap_DisplayOff = 64,
177     ovrHmdCap_LowPersistence = 128,
178     ovrHmdCap_DynamicPrediction = 512,
179     ovrHmdCap_NoVSync = 4096,
180     ovrHmdCap_Writable_Mask = 13296,
181     ovrHmdCap_Service_Mask = 9200,
182 }
183 alias int ovrHmdCaps;
184 
185 
186 /// Tracking capability bits reported by the device.
187 /// Used with ovrHmd_ConfigureTracking.
188     ///  Overrides the other flags. Indicates that the application
189     ///  doesn't care about tracking settings. This is the internal
190     ///  default before ovrHmd_ConfigureTracking is called.
191 enum
192 {
193     ovrTrackingCap_Orientation = 16,
194     ovrTrackingCap_MagYawCorrection = 32,
195     ovrTrackingCap_Position = 64,
196     ovrTrackingCap_Idle = 256,
197 }
198 alias int ovrTrackingCaps;
199 
200 /// Distortion capability bits reported by device.
201 /// Used with ovrHmd_ConfigureRendering and ovrHmd_CreateDistortionMesh.
202 
203 enum
204 {
205     ovrDistortionCap_Chromatic = 1,
206     ovrDistortionCap_TimeWarp,
207     ovrDistortionCap_Vignette = 8,
208     ovrDistortionCap_NoRestore = 16,
209     ovrDistortionCap_FlipInput = 32,
210     ovrDistortionCap_SRGB = 64,
211     ovrDistortionCap_Overdrive = 128,
212     ovrDistortionCap_ProfileNoTimewarpSpinWaits = 65536,
213 }
214 alias int ovrDistortionCaps;
215 
216 
217 /// Specifies which eye is being used for rendering.
218 /// This type explicitly does not include a third "NoStereo" option, as such is
219 /// not required for an HMD-centered API.
220 enum
221 {
222     ovrEye_Left,
223     ovrEye_Right,
224     ovrEye_Count,
225 }
226 alias int ovrEyeType;
227 
228 
229 
230 /// This is a complete descriptor of the HMD.
231     /// Internal handle of this HMD.
232 
233     /// This HMD's type.
234 
235     /// Name string describing the product: "Oculus Rift DK1", etc.
236 
237     /// HID Vendor and ProductId of the device.
238     /// Sensor (and display) serial number.
239     /// Sensor firmware version.
240     /// External tracking camera frustum dimensions (if present).
241 
242     /// Capability bits described by ovrHmdCaps.
243 	/// Capability bits described by ovrTrackingCaps.
244     /// Capability bits described by ovrDistortionCaps.
245 
246     /// These define the recommended and maximum optical FOVs for the HMD.
247 
248     /// Preferred eye rendering order for best performance.
249     /// Can help reduce latency on sideways-scanned screens.
250 
251     /// Resolution of the full HMD screen (both eyes) in pixels.
252     /// Location of the application window on the desktop (or 0,0).
253 
254     /// Display that the HMD should present on.
255     /// TBD: It may be good to remove this information relying on WindowPos instead.
256     /// Ultimately, we may need to come up with a more convenient alternative,
257     /// such as API-specific functions that return adapter, or something that will
258     /// work with our monitor driver.
259     /// Windows: (e.g. "\\\\.\\DISPLAY3", can be used in EnumDisplaySettings/CreateDC).
260     /// MacOS:
261 
262 struct ovrHmdDesc_
263 {
264     void *Handle;
265     ovrHmdType Type;
266     char *ProductName;
267     char *Manufacturer;
268     short VendorId;
269     short ProductId;
270     char [24]SerialNumber;
271     short FirmwareMajor;
272     short FirmwareMinor;
273     float CameraFrustumHFovInRadians;
274     float CameraFrustumVFovInRadians;
275     float CameraFrustumNearZInMeters;
276     float CameraFrustumFarZInMeters;
277     uint HmdCaps;
278     uint TrackingCaps;
279     uint DistortionCaps;
280     ovrFovPort [2]DefaultEyeFov;
281     ovrFovPort [2]MaxEyeFov;
282     ovrEyeType [2]EyeRenderOrder;
283     ovrSizei Resolution;
284     ovrVector2i WindowsPos;
285     char *DisplayDeviceName;
286     int DisplayId;
287 }
288 alias ovrHmdDesc_ ovrHmdDesc;
289 
290 
291 /// Simple type ovrHmd is used in ovrHmd_* calls.
292 alias ovrHmdDesc *ovrHmd;
293 
294 
295 
296 /// Bit flags describing the current status of sensor tracking.
297 enum
298 {
299     ovrStatus_OrientationTracked = 1,
300     ovrStatus_PositionTracked,
301     ovrStatus_CameraPoseTracked = 4,
302     ovrStatus_PositionConnected = 32,
303     ovrStatus_HmdConnected = 128,
304 }
305 alias int ovrStatusBits;
306 
307 /// Specifies a reading we can query from the sensor.
308 struct ovrSensorData_
309 {
310     ovrVector3f Accelerometer;
311     ovrVector3f Gyro;
312     ovrVector3f Magnetometer;
313     float Temperature;
314     float TimeInSeconds;
315 }
316 alias ovrSensorData_ ovrSensorData;
317 
318 
319 /// Tracking state at a given absolute time (describes predicted HMD pose etc).
320 /// Returned by ovrHmd_GetTrackingState.
321     /// Predicted head pose (and derivatives) at the requested absolute time.
322     /// The look-ahead interval is equal to (HeadPose.TimeInSeconds - RawSensorData.TimeInSeconds).
323 
324     /// Current pose of the external camera (if present).
325     /// This pose includes camera tilt (roll and pitch). For a leveled coordinate
326     /// system use LeveledCameraPose.
327 
328     /// Camera frame aligned with gravity.
329     /// This value includes position and yaw of the camera, but not roll and pitch.
330     /// It can be used as a reference point to render real-world objects in the correct location.
331 
332     /// The most recent sensor data received from the HMD.
333 
334     /// Tracking status described by ovrStatusBits.
335 struct ovrTrackingState_
336 {
337     ovrPoseStatef HeadPose;
338     ovrPosef CameraPose;
339     ovrPosef LeveledCameraPose;
340     ovrSensorData RawSensorData;
341     uint StatusFlags;
342 }
343 alias ovrTrackingState_ ovrTrackingState;
344 
345 
346 /// Frame timing data reported by ovrHmd_BeginFrameTiming() or ovrHmd_BeginFrame().
347     /// The amount of time that has passed since the previous frame's
348 	/// ThisFrameSeconds value (usable for movement scaling).
349     /// This will be clamped to no more than 0.1 seconds to prevent
350     /// excessive movement after pauses due to loading or initialization.
351 
352     /// It is generally expected that the following holds:
353     /// ThisFrameSeconds < TimewarpPointSeconds < NextFrameSeconds <
354     /// EyeScanoutSeconds[EyeOrder[0]] <= ScanoutMidpointSeconds <= EyeScanoutSeconds[EyeOrder[1]].
355 
356     /// Absolute time value when rendering of this frame began or is expected to
357     /// begin. Generally equal to NextFrameSeconds of the previous frame. Can be used
358     /// for animation timing.
359     /// Absolute point when IMU expects to be sampled for this frame.
360     /// Absolute time when frame Present followed by GPU Flush will finish and the next frame begins.
361 
362     /// Time when when half of the screen will be scanned out. Can be passed as an absolute time
363 	/// to ovrHmd_GetTrackingState() to get the predicted general orientation.
364     /// Timing points when each eye will be scanned out to display. Used when rendering each eye.
365 struct ovrFrameTiming_
366 {
367     float DeltaSeconds;
368     double ThisFrameSeconds;
369     double TimewarpPointSeconds;
370     double NextFrameSeconds;
371     double ScanoutMidpointSeconds;
372     double [2]EyeScanoutSeconds;
373 }
374 alias ovrFrameTiming_ ovrFrameTiming;
375 
376 
377 
378 /// Rendering information for each eye. Computed by either ovrHmd_ConfigureRendering()
379 /// or ovrHmd_GetRenderDesc() based on the specified FOV. Note that the rendering viewport
380 /// is not included here as it can be specified separately and modified per frame through:
381 ///    (a) ovrHmd_GetRenderScaleAndOffset in the case of client rendered distortion,
382 /// or (b) passing different values via ovrTexture in the case of SDK rendered distortion.
383 struct ovrEyeRenderDesc_
384 {
385     ovrEyeType Eye;
386     ovrFovPort Fov;
387     ovrRecti DistortedViewport;
388     ovrVector2f PixelsPerTanAngleAtCenter;
389     ovrVector3f ViewAdjust;
390 }
391 alias ovrEyeRenderDesc_ ovrEyeRenderDesc;
392 
393 
394 //-----------------------------------------------------------------------------------
395 // ***** Platform-independent Rendering Configuration
396 
397 /// These types are used to hide platform-specific details when passing
398 /// render device, OS, and texture data to the API.
399 ///
400 /// The benefit of having these wrappers versus platform-specific API functions is
401 /// that they allow game glue code to be portable. A typical example is an
402 /// engine that has multiple back ends, say GL and D3D. Portable code that calls
403 /// these back ends may also use LibOVR. To do this, back ends can be modified
404 /// to return portable types such as ovrTexture and ovrRenderAPIConfig.
405 enum
406 {
407     ovrRenderAPI_None,
408     ovrRenderAPI_OpenGL,
409     ovrRenderAPI_Android_GLES,
410     ovrRenderAPI_D3D9,
411     ovrRenderAPI_D3D10,
412     ovrRenderAPI_D3D11,
413     ovrRenderAPI_Count,
414 }
415 alias int ovrRenderAPIType;
416 
417 /// Platform-independent part of rendering API-configuration data.
418 /// It is a part of ovrRenderAPIConfig, passed to ovrHmd_Configure.
419 struct ovrRenderAPIConfigHeader_
420 {
421     ovrRenderAPIType API;
422     ovrSizei RTSize;
423     int Multisample;
424 }
425 alias ovrRenderAPIConfigHeader_ ovrRenderAPIConfigHeader;
426 
427 /// Contains platform-specific information for rendering.
428 struct ovrRenderAPIConfig_
429 {
430     ovrRenderAPIConfigHeader Header;
431     uintptr_t [8]PlatformData;
432 }
433 alias ovrRenderAPIConfig_ ovrRenderAPIConfig;
434 
435 /// Platform-independent part of the eye texture descriptor.
436 /// It is a part of ovrTexture, passed to ovrHmd_EndFrame.
437 /// If RenderViewport is all zeros then the full texture will be used.
438 struct ovrTextureHeader_
439 {
440     ovrRenderAPIType API;
441     ovrSizei TextureSize;
442     ovrRecti RenderViewport;
443 }
444 alias ovrTextureHeader_ ovrTextureHeader;
445 
446 /// Contains platform-specific information about a texture.
447 struct ovrTexture_
448 {
449     ovrTextureHeader Header;
450     uintptr_t [8]PlatformData;
451 }
452 alias ovrTexture_ ovrTexture;
453 
454 
455 // -----------------------------------------------------------------------------------
456 // ***** API Interfaces
457 
458 // Basic steps to use the API:
459 //
460 // Setup:
461 //  1. ovrInitialize()
462 //  2. ovrHMD hmd = ovrHmd_Create(0)
463 //  3. Use hmd members and ovrHmd_GetFovTextureSize() to determine graphics configuration.
464 //  4. Call ovrHmd_ConfigureTracking() to configure and initialize tracking.
465 //  5. Call ovrHmd_ConfigureRendering() to setup graphics for SDK rendering,
466 //     which is the preferred approach.
467 //     Please refer to "Client Distorton Rendering" below if you prefer to do that instead.
468 //  6. If the ovrHmdCap_ExtendDesktop flag is not set, then use ovrHmd_AttachToWindow to
469 //     associate the relevant application window with the hmd.
470 //  5. Allocate render target textures as needed.
471 //
472 // Game Loop:
473 //  6. Call ovrHmd_BeginFrame() to get the current frame timing information.
474 //  7. Render each eye using ovrHmd_GetEyePose to get the predicted head pose.
475 //  8. Call ovrHmd_EndFrame() to render the distorted textures to the back buffer
476 //     and present them on the hmd.
477 //
478 // Shutdown:
479 //  9. ovrHmd_Destroy(hmd)
480 //  10. ovr_Shutdown()
481 //
482 
483 
484 // Library init/shutdown, must be called around all other OVR code.
485 // No other functions calls are allowed before ovr_Initialize succeeds or after ovr_Shutdown.
486 /// Initializes all Oculus functionality.
487 ovrBool  ovr_Initialize();
488 /// Shuts down all Oculus functionality.
489 void  ovr_Shutdown();
490 
491 /// Returns version string representing libOVR version. Static, so
492 /// string remains valid for app lifespan
493 char * ovr_GetVersionString();
494 
495 
496 
497 /// Detects or re-detects HMDs and reports the total number detected.
498 /// Users can get information about each HMD by calling ovrHmd_Create with an index.
499 int  ovrHmd_Detect();
500 
501 
502 /// Creates a handle to an HMD which doubles as a description structure.
503 /// Index can [0 .. ovrHmd_Detect()-1]. Index mappings can cange after each ovrHmd_Detect call.
504 /// If not null, then the returned handle must be freed with ovrHmd_Destroy.
505 ovrHmd  ovrHmd_Create(int index);
506 void  ovrHmd_Destroy(ovrHmd hmd);
507 
508 /// Creates a 'fake' HMD used for debugging only. This is not tied to specific hardware,
509 /// but may be used to debug some of the related rendering.
510 ovrHmd  ovrHmd_CreateDebug(ovrHmdType type);
511 
512 
513 /// Returns last error for HMD state. Returns null for no error.
514 /// String is valid until next call or GetLastError or HMD is destroyed.
515 /// Pass null hmd to get global errors (during create etc).
516 char * ovrHmd_GetLastError(ovrHmd hmd);
517 
518 /// Platform specific function to specify the application window whose output will be
519 /// displayed on the HMD. Only used if the ovrHmdCap_ExtendDesktop flag is false.
520 ///   Windows: SwapChain associated with this window will be displayed on the HMD.
521 ///            Specify 'destMirrorRect' in window coordinates to indicate an area
522 ///            of the render target output that will be mirrored from 'sourceRenderTargetRect'.
523 ///            Null pointers mean "full size".
524 /// @note Source and dest mirror rects are not yet implemented.
525 ovrBool  ovrHmd_AttachToWindow(ovrHmd hmd, void *window, ovrRecti *destMirrorRect, ovrRecti *sourceRenderTargetRect);
526 
527 //-------------------------------------------------------------------------------------
528 
529 /// Returns capability bits that are enabled at this time as described by ovrHmdCaps.
530 /// Note that this value is different font ovrHmdDesc::HmdCaps, which describes what
531 /// capabilities are available for that HMD.
532 uint  ovrHmd_GetEnabledCaps(ovrHmd hmd);
533 
534 /// Modifies capability bits described by ovrHmdCaps that can be modified,
535 /// such as ovrHmd_LowPersistance.
536 void  ovrHmd_SetEnabledCaps(ovrHmd hmd, uint hmdCaps);
537 
538 
539 //-------------------------------------------------------------------------------------
540 // ***** Tracking Interface
541 
542 /// All tracking interface functions are thread-safe, allowing tracking state to be sampled
543 /// from different threads.
544 /// ConfigureTracking starts sensor sampling, enabling specified capabilities,
545 ///    described by ovrTrackingCaps.
546 ///  - supportedTrackingCaps specifies support that is requested. The function will succeed
547 ///	  even if these caps are not available (i.e. sensor or camera is unplugged). Support
548 ///    will automatically be enabled if such device is plugged in later. Software should
549 ///    check ovrTrackingState.StatusFlags for real-time status.
550 ///  - requiredTrackingCaps specify sensor capabilities required at the time of the call.
551 ///    If they are not available, the function will fail. Pass 0 if only specifying
552 ///    supportedTrackingCaps.
553 ///  - Pass 0 for both supportedTrackingCaps and requiredTrackingCaps to disable tracking.
554 ovrBool  ovrHmd_ConfigureTracking(ovrHmd hmd, uint supportedTrackingCaps, uint requiredTrackingCaps);
555 
556 /// Re-centers the sensor orientation.
557 /// Normally this will recenter the (x,y,z) translational components and the yaw
558 /// component of orientation.
559 void  ovrHmd_RecenterPose(ovrHmd hmd);
560 
561 /// Returns tracking state reading based on the specified absolute system time.
562 /// Pass an absTime value of 0.0 to request the most recent sensor reading. In this case
563 /// both PredictedPose and SamplePose will have the same value.
564 /// ovrHmd_GetEyePose relies on this internally.
565 /// This may also be used for more refined timing of FrontBuffer rendering logic, etc.
566 ovrTrackingState  ovrHmd_GetTrackingState(ovrHmd hmd, double absTime);
567 
568 
569 //-------------------------------------------------------------------------------------
570 // ***** Graphics Setup
571 
572 
573 /// Calculates the recommended texture size for rendering a given eye within the HMD
574 /// with a given FOV cone. Higher FOV will generally require larger textures to
575 /// maintain quality.
576 ///  - pixelsPerDisplayPixel specifies the ratio of the number of render target pixels
577 ///    to display pixels at the center of distortion. 1.0 is the default value. Lower
578 ///    values can improve performance.
579 ovrSizei  ovrHmd_GetFovTextureSize(ovrHmd hmd, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel);
580 
581 
582 
583 //-------------------------------------------------------------------------------------
584 // *****  Rendering API Thread Safety
585 
586 //  All of rendering functions including the configure and frame functions
587 // are *NOT thread safe*. It is ok to use ConfigureRendering on one thread and handle
588 //  frames on another thread, but explicit synchronization must be done since
589 //  functions that depend on configured state are not reentrant.
590 //
591 //  As an extra requirement, any of the following calls must be done on
592 //  the render thread, which is the same thread that calls ovrHmd_BeginFrame
593 //  or ovrHmd_BeginFrameTiming.
594 //    - ovrHmd_EndFrame
595 //    - ovrHmd_GetEyePose
596 //    - ovrHmd_GetEyeTimewarpMatrices
597 
598 
599 //-------------------------------------------------------------------------------------
600 // *****  SDK Distortion Rendering Functions
601 
602 // These functions support rendering of distortion by the SDK through direct
603 // access to the underlying rendering API, such as D3D or GL.
604 // This is the recommended approach since it allows better support for future
605 // Oculus hardware, and enables a range of low-level optimizations.
606 
607 
608 /// Configures rendering and fills in computed render parameters.
609 /// This function can be called multiple times to change rendering settings.
610 /// eyeRenderDescOut is a pointer to an array of two ovrEyeRenderDesc structs
611 /// that are used to return complete rendering information for each eye.
612 ///
613 ///  - apiConfig provides D3D/OpenGL specific parameters. Pass null
614 ///    to shutdown rendering and release all resources.
615 ///  - distortionCaps describe desired distortion settings.
616 ///
617 ovrBool  ovrHmd_ConfigureRendering(ovrHmd hmd, ovrRenderAPIConfig *apiConfig, uint distortionCaps, ovrFovPort *eyeFovIn, ovrEyeRenderDesc *eyeRenderDescOut);
618 
619 
620 /// Begins a frame, returning timing information.
621 /// This should be called at the beginning of the game rendering loop (on the render thread).
622 /// Pass 0 for the frame index if not using ovrHmd_GetFrameTiming.
623 ovrFrameTiming  ovrHmd_BeginFrame(ovrHmd hmd, uint frameIndex);
624 
625 /// Ends a frame, submitting the rendered textures to the frame buffer.
626 /// - RenderViewport within each eyeTexture can change per frame if necessary.
627 /// - 'renderPose' will typically be the value returned from ovrHmd_GetEyePose,
628 ///   but can be different if a different head pose was used for rendering.
629 /// - This may perform distortion and scaling internally, assuming is it not
630 ///   delegated to another thread.
631 /// - Must be called on the same thread as BeginFrame.
632 /// - *** This Function will call Present/SwapBuffers and potentially wait for GPU Sync ***.
633 void  ovrHmd_EndFrame(ovrHmd hmd, ovrPosef *renderPose, ovrTexture *eyeTexture);
634 
635 
636 /// Returns the predicted head pose to use when rendering the specified eye.
637 /// - Must be called between ovrHmd_BeginFrameTiming and ovrHmd_EndFrameTiming.
638 /// - If the pose is used for rendering the eye, it should be passed to ovrHmd_EndFrame.
639 ovrPosef  ovrHmd_GetEyePose(ovrHmd hmd, ovrEyeType eye);
640 
641 
642 
643 //-------------------------------------------------------------------------------------
644 // *****  Client Distortion Rendering Functions
645 
646 // These functions provide the distortion data and render timing support necessary to allow
647 // client rendering of distortion. Client-side rendering involves the following steps:
648 //
649 //  1. Setup ovrEyeDesc based on the desired texture size and FOV.
650 //     Call ovrHmd_GetRenderDesc to get the necessary rendering parameters for each eye.
651 //
652 //  2. Use ovrHmd_CreateDistortionMesh to generate the distortion mesh.
653 //
654 //  3. Use ovrHmd_BeginFrameTiming, ovrHmd_GetEyePose, and ovrHmd_BeginFrameTiming
655 //     in the rendering loop to obtain timing and predicted head orientation when
656 //     rendering each eye.
657 //      - When using timewarp, use ovr_WaitTillTime after the rendering and gpu flush, followed
658 //        by ovrHmd_GetEyeTimewarpMatrices to obtain the timewarp matrices used
659 //        by the distortion pixel shader. This will minimize latency.
660 //
661 
662 /// Computes the distortion viewport, view adjust, and other rendering parameters for
663 /// the specified eye. This can be used instead of ovrHmd_ConfigureRendering to do
664 /// setup for client rendered distortion.
665 ovrEyeRenderDesc  ovrHmd_GetRenderDesc(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov);
666 
667 
668 /// Describes a vertex used by the distortion mesh. This is intended to be converted into
669 /// the engine-specific format. Some fields may be unused based on the ovrDistortionCaps
670 /// flags selected. TexG and TexB, for example, are not used if chromatic correction is
671 /// not requested.
672 struct ovrDistortionVertex_
673 {
674     ovrVector2f ScreenPosNDC;
675     float TimeWarpFactor;
676     float VignetteFactor;
677     ovrVector2f TanEyeAnglesR;
678     ovrVector2f TanEyeAnglesG;
679     ovrVector2f TanEyeAnglesB;
680 }
681 alias ovrDistortionVertex_ ovrDistortionVertex;
682 
683 /// Describes a full set of distortion mesh data, filled in by ovrHmd_CreateDistortionMesh.
684 /// Contents of this data structure, if not null, should be freed by ovrHmd_DestroyDistortionMesh.
685 struct ovrDistortionMesh_
686 {
687     ovrDistortionVertex *pVertexData;
688     ushort *pIndexData;
689     uint VertexCount;
690     uint IndexCount;
691 }
692 alias ovrDistortionMesh_ ovrDistortionMesh;
693 
694 /// Generate distortion mesh per eye.
695 /// Distortion capabilities will depend on 'distortionCaps' flags. Users should
696 /// render using the appropriate shaders based on their settings.
697 /// Distortion mesh data will be allocated and written into the ovrDistortionMesh data structure,
698 /// which should be explicitly freed with ovrHmd_DestroyDistortionMesh.
699 /// Users should call ovrHmd_GetRenderScaleAndOffset to get uvScale and Offset values for rendering.
700 /// The function shouldn't fail unless theres is a configuration or memory error, in which case
701 /// ovrDistortionMesh values will be set to null.
702 /// This is the only function in the SDK reliant on eye relief, currently imported from profiles,
703 /// or overriden here.
704 ovrBool  ovrHmd_CreateDistortionMesh(ovrHmd hmd, ovrEyeType eyeType, ovrFovPort fov, uint distortionCaps, ovrDistortionMesh *meshData);
705 
706 /// Used to free the distortion mesh allocated by ovrHmd_GenerateDistortionMesh. meshData elements
707 /// are set to null and zeroes after the call.
708 void  ovrHmd_DestroyDistortionMesh(ovrDistortionMesh *meshData);
709 
710 /// Computes updated 'uvScaleOffsetOut' to be used with a distortion if render target size or
711 /// viewport changes after the fact. This can be used to adjust render size every frame if desired.
712 void  ovrHmd_GetRenderScaleAndOffset(ovrFovPort fov, ovrSizei textureSize, ovrRecti renderViewport, ovrVector2f *uvScaleOffsetOut);
713 
714 
715 /// Thread-safe timing function for the main thread. Caller should increment frameIndex
716 /// with every frame and pass the index where applicable to functions called on the
717 /// rendering thread.
718 ovrFrameTiming  ovrHmd_GetFrameTiming(ovrHmd hmd, uint frameIndex);
719 
720 /// Called at the beginning of the frame on the rendering thread.
721 /// Pass frameIndex == 0 if ovrHmd_GetFrameTiming isn't being used. Otherwise,
722 /// pass the same frame index as was used for GetFrameTiming on the main thread.
723 ovrFrameTiming  ovrHmd_BeginFrameTiming(ovrHmd hmd, uint frameIndex);
724 
725 /// Marks the end of client distortion rendered frame, tracking the necessary timing information.
726 /// This function must be called immediately after Present/SwapBuffers + GPU sync. GPU sync is
727 /// important before this call to reduce latency and ensure proper timing.
728 void  ovrHmd_EndFrameTiming(ovrHmd hmd);
729 
730 /// Initializes and resets frame time tracking. This is typically not necessary, but
731 /// is helpful if game changes vsync state or video mode. vsync is assumed to be on if this
732 /// isn't called. Resets internal frame index to the specified number.
733 void  ovrHmd_ResetFrameTiming(ovrHmd hmd, uint frameIndex);
734 
735 
736 /// Computes timewarp matrices used by distortion mesh shader, these are used to adjust
737 /// for head orientation change since the last call to ovrHmd_GetEyePose when rendering
738 /// this eye. The ovrDistortionVertex::TimeWarpFactor is used to blend between the
739 /// matrices, usually representing two different sides of the screen.
740 /// Must be called on the same thread as ovrHmd_BeginFrameTiming.
741 void  ovrHmd_GetEyeTimewarpMatrices(ovrHmd hmd, ovrEyeType eye, ovrPosef renderPose, ovrMatrix4f *twmOut);
742 
743 
744 //-------------------------------------------------------------------------------------
745 // ***** Stateless math setup functions
746 
747 /// Used to generate projection from ovrEyeDesc::Fov.
748 ovrMatrix4f  ovrMatrix4f_Projection(ovrFovPort fov, float znear, float zfar, ovrBool rightHanded);
749 
750 /// Used for 2D rendering, Y is down
751 /// orthoScale = 1.0f / pixelsPerTanAngleAtCenter
752 /// orthoDistance = distance from camera, such as 0.8m
753 ovrMatrix4f  ovrMatrix4f_OrthoSubProjection(ovrMatrix4f projection, ovrVector2f orthoScale, float orthoDistance, float eyeViewAdjustX);
754 
755 /// Returns global, absolute high-resolution time in seconds. This is the same
756 /// value as used in sensor messages.
757 double  ovr_GetTimeInSeconds();
758 
759 /// Waits until the specified absolute time.
760 double  ovr_WaitTillTime(double absTime);
761 
762 
763 
764 // -----------------------------------------------------------------------------------
765 // ***** Latency Test interface
766 
767 /// Does latency test processing and returns 'TRUE' if specified rgb color should
768 /// be used to clear the screen.
769 ovrBool  ovrHmd_ProcessLatencyTest(ovrHmd hmd, ubyte *rgbColorOut);
770 
771 /// Returns non-null string once with latency test result, when it is available.
772 /// Buffer is valid until next call.
773 char * ovrHmd_GetLatencyTestResult(ovrHmd hmd);
774 
775 
776 
777 //-------------------------------------------------------------------------------------
778 // ***** Health and Safety Warning Display interface
779 //
780 
781 /// Used by ovrhmd_GetHSWDisplayState to report the current display state.
782     /// If true then the warning should be currently visible
783     /// and the following variables have meaning. Else there is no
784     /// warning being displayed for this application on the given HMD.
785 struct ovrHSWDisplayState_
786 {
787     ovrBool Displayed;
788     double StartTime;
789     double DismissibleTime;
790 }
791 alias ovrHSWDisplayState_ ovrHSWDisplayState;
792 
793 /// Returns the current state of the HSW display. If the application is doing the rendering of
794 /// the HSW display then this function serves to indicate that the the warning should be
795 /// currently displayed. If the application is using SDK-based eye rendering then the SDK by
796 /// default automatically handles the drawing of the HSW display. An application that uses
797 /// application-based eye rendering should use this function to know when to start drawing the
798 /// HSW display itself and can optionally use it in conjunction with ovrhmd_DismissHSWDisplay
799 /// as described below.
800 ///
801 /// Example usage for application-based rendering:
802 ///    bool HSWDisplayCurrentlyDisplayed = false; // global or class member variable
803 ///    ovrHSWDisplayState hswDisplayState;
804 ///    ovrhmd_GetHSWDisplayState(Hmd, &hswDisplayState);
805 ///
806 ///    if (hswDisplayState.Displayed && !HSWDisplayCurrentlyDisplayed) {
807 ///        <insert model into the scene that stays in front of the user>
808 ///        HSWDisplayCurrentlyDisplayed = true;
809 ///    }
810 void  ovrHmd_GetHSWDisplayState(ovrHmd hmd, ovrHSWDisplayState *hasWarningState);
811 
812 /// Dismisses the HSW display if the warning is dismissible and the earliest dismissal time
813 /// has occurred. Returns true if the display is valid and could be dismissed. The application
814 /// should recognize that the HSW display is being displayed (via ovrhmd_GetHSWDisplayState)
815 /// and if so then call this function when the appropriate user input to dismiss the warning
816 /// occurs.
817 ///
818 /// Example usage :
819 ///    void ProcessEvent(int key) {
820 ///        if(key == escape) {
821 ///            ovrHSWDisplayState hswDisplayState;
822 ///            ovrhmd_GetHSWDisplayState(hmd, &hswDisplayState);
823 ///
824 ///            if(hswDisplayState.Displayed && ovrhmd_DismissHSWDisplay(hmd)) {
825 ///                <remove model from the scene>
826 ///                HSWDisplayCurrentlyDisplayed = false;
827 ///            }
828 ///        }
829 ///    }
830 ovrBool  ovrHmd_DismissHSWDisplay(ovrHmd hmd);
831 
832 
833 
834 
835 
836 // -----------------------------------------------------------------------------------
837 // ***** Property Access
838 
839 // NOTICE: This is experimental part of API that is likely to go away or change.
840 
841 // These allow accessing different properties of the HMD and profile.
842 // Some of the properties may go away with profile/HMD versions, so software should
843 // use defaults and/or proper fallbacks.
844 //
845 
846 // For now, access profile entries; this will change.
847 
848 
849 	// TODO: remove this duplication with OVR_Profile.h
850 	// Default measurements empirically determined at Oculus to make us happy
851 	// The neck model numbers were derived as an average of the male and female averages from ANSUR-88
852 	// NECK_TO_EYE_HORIZONTAL = H22 - H43 = INFRAORBITALE_BACK_OF_HEAD - TRAGION_BACK_OF_HEAD
853 	// NECK_TO_EYE_VERTICAL = H21 - H15 = GONION_TOP_OF_HEAD - ECTOORBITALE_TOP_OF_HEAD
854 	// These were determined to be the best in a small user study, clearly beating out the previous default values
855 const OVR_DEFAULT_PLAYER_HEIGHT = 1.778f;
856 const OVR_DEFAULT_EYE_HEIGHT = 1.675f;
857 const OVR_DEFAULT_IPD = 0.064f;
858 const OVR_DEFAULT_NECK_TO_EYE_HORIZONTAL = 0.0805f;
859 const OVR_DEFAULT_NECK_TO_EYE_VERTICAL = 0.075f;
860 const OVR_DEFAULT_EYE_RELIEF_DIAL = 3;
861 
862 /// Get boolean property. Returns first element if property is a boolean array.
863 /// Returns defaultValue if property doesn't exist.
864 ovrBool  ovrHmd_GetBool(ovrHmd hmd, char *propertyName, ovrBool defaultVal);
865 
866 /// Modify bool property; false if property doesn't exist or is readonly.
867 ovrBool  ovrHmd_SetBool(ovrHmd hmd, char *propertyName, ovrBool value);
868 
869 /// Get integer property. Returns first element if property is an integer array.
870 /// Returns defaultValue if property doesn't exist.
871 int  ovrHmd_GetInt(ovrHmd hmd, char *propertyName, int defaultVal);
872 
873 /// Modify integer property; false if property doesn't exist or is readonly.
874 ovrBool  ovrHmd_SetInt(ovrHmd hmd, char *propertyName, int value);
875 
876 /// Get float property. Returns first element if property is a float array.
877 /// Returns defaultValue if property doesn't exist.
878 float  ovrHmd_GetFloat(ovrHmd hmd, char *propertyName, float defaultVal);
879 
880 /// Modify float property; false if property doesn't exist or is readonly.
881 ovrBool  ovrHmd_SetFloat(ovrHmd hmd, char *propertyName, float value);
882 
883 /// Get float[] property. Returns the number of elements filled in, 0 if property doesn't exist.
884 /// Maximum of arraySize elements will be written.
885 uint  ovrHmd_GetFloatArray(ovrHmd hmd, char *propertyName, float *values, uint arraySize);
886 
887 /// Modify float[] property; false if property doesn't exist or is readonly.
888 ovrBool  ovrHmd_SetFloatArray(ovrHmd hmd, char *propertyName, float *values, uint arraySize);
889 
890 /// Get string property. Returns first element if property is a string array.
891 /// Returns defaultValue if property doesn't exist.
892 /// String memory is guaranteed to exist until next call to GetString or GetStringArray, or HMD is destroyed.
893 char * ovrHmd_GetString(ovrHmd hmd, char *propertyName, char *defaultVal);
894 
895 /// Set string property
896 ovrBool  ovrHmd_SetString(ovrHmd hmddesc, char *propertyName, char *value);