Added VR libraries

This commit is contained in:
Chris Midkiff
2018-10-08 23:54:11 -04:00
parent d9eb2a9763
commit 7ce1036e39
1037 changed files with 195630 additions and 348 deletions

View File

@@ -0,0 +1,56 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle chromatic aberration correction with a gamepad button press.
/// </summary>
public class OVRChromaticAberration : MonoBehaviour
{
/// <summary>
/// The button that will toggle chromatic aberration correction.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.X;
private bool chromatic = false;
void Start ()
{
// Enable/Disable Chromatic Aberration Correction.
// NOTE: Enabling Chromatic Aberration for mobile has a large performance cost.
OVRManager.instance.chromatic = chromatic;
}
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle chromatic aberration correction
//*************************
chromatic = !chromatic;
OVRManager.instance.chromatic = chromatic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3b56515a831f2fb44bc7ae02679aeebc
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,268 @@
using UnityEngine;
using System.Collections;
using System.IO;
/// <summary>
/// Helper script for capture cubemap and save it into PNG or JPG file
/// </summary>
/// <description>
/// How it works:
/// 1) This script can be attached to a existing game object, you can also use prefab Assets\OVR\Prefabs\OVRCubemapCaptureProbe
/// There are 2 ways to trigger a capture if you attached this script to a game object.
/// * Automatic capturing: if [autoTriggerAfterLaunch] is true, a automatic capturing will be triggered after [autoTriggerDelay] seconds.
/// * Keyboard trigger: press key [triggeredByKey], a capturing will be triggered.
/// 2) If you like to trigger the screen capture in your code logic, just call static function [OVRCubemapCapture.TriggerCubemapCapture] with proper input arguments.
/// </description>
public class OVRCubemapCapture : MonoBehaviour
{
/// <summary>
/// Enable the automatic screenshot trigger, which will capture a cubemap after autoTriggerDelay (seconds)
/// </summary>
public bool autoTriggerAfterLaunch = true;
public float autoTriggerDelay = 1.0f;
private float autoTriggerElapse = 0.0f;
/// <summary>
/// Trigger cubemap screenshot if user pressed key triggeredByKey
/// </summary>
public KeyCode triggeredByKey = KeyCode.F8;
/// <summary>
/// The complete file path for saving the cubemap screenshot, including the filename and extension
/// if pathName is blank, screenshots will be saved into %USERPROFILE%\Documents\OVR_ScreenShot360
/// </summary>
public string pathName;
/// <summary>
/// The cube face resolution
/// </summary>
public int cubemapSize = 2048;
// Update is called once per frame
void Update()
{
// Trigger after autoTriggerDelay
if (autoTriggerAfterLaunch)
{
autoTriggerElapse += Time.deltaTime;
if (autoTriggerElapse >= autoTriggerDelay)
{
autoTriggerAfterLaunch = false;
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
// Trigger by press triggeredByKey
if ( Input.GetKeyDown( triggeredByKey ) )
{
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
/// <summary>
/// Generate unity cubemap at specific location and save into JPG/PNG
/// </summary>
/// <description>
/// Default save folder: your app's persistentDataPath
/// Default file name: using current time OVR_hh_mm_ss.png
/// Note1: this will take a few seconds to finish
/// Note2: if you only want to specify path not filename, please end [pathName] with "/"
/// </description>
public static void TriggerCubemapCapture(Vector3 capturePos, int cubemapSize = 2048, string pathName = null)
{
GameObject ownerObj = new GameObject("CubemapCamera", typeof(Camera));
ownerObj.hideFlags = HideFlags.HideAndDontSave;
ownerObj.transform.position = capturePos;
ownerObj.transform.rotation = Quaternion.identity;
Camera camComponent = ownerObj.GetComponent<Camera>();
camComponent.farClipPlane = 10000.0f;
camComponent.enabled = false;
Cubemap cubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
RenderIntoCubemap(camComponent, cubemap);
SaveCubemapCapture(cubemap, pathName);
DestroyImmediate(cubemap);
DestroyImmediate(ownerObj);
}
public static void RenderIntoCubemap(Camera ownerCamera, Cubemap outCubemap)
{
int width = (int)outCubemap.width;
int height = (int)outCubemap.height;
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
Vector3[] faceAngles = new Vector3[] { new Vector3(0.0f, 90.0f, 0.0f), new Vector3(0.0f, -90.0f, 0.0f), new Vector3(-90.0f, 0.0f, 0.0f), new Vector3(90.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 180.0f, 0.0f) };
// Backup states
RenderTexture backupRenderTex = RenderTexture.active;
float backupFieldOfView = ownerCamera.fieldOfView;
float backupAspect = ownerCamera.aspect;
Quaternion backupRot = ownerCamera.transform.rotation;
//RenderTexture backupRT = ownerCamera.targetTexture;
// Enable 8X MSAA
RenderTexture faceTexture = new RenderTexture(width, height, 24);
faceTexture.antiAliasing = 8;
faceTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
faceTexture.hideFlags = HideFlags.HideAndDontSave;
// For intermediate saving
Texture2D swapTex = new Texture2D(width, height, TextureFormat.RGB24, false);
swapTex.hideFlags = HideFlags.HideAndDontSave;
// Capture 6 Directions
ownerCamera.targetTexture = faceTexture;
ownerCamera.fieldOfView = 90;
ownerCamera.aspect = 1.0f;
Color[] mirroredPixels = new Color[swapTex.height * swapTex.width];
for (int i = 0; i < faces.Length; i++)
{
ownerCamera.transform.eulerAngles = faceAngles[i];
ownerCamera.Render();
RenderTexture.active = faceTexture;
swapTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
// Mirror vertically to meet the standard of unity cubemap
Color[] OrignalPixels = swapTex.GetPixels();
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
mirroredPixels[y1 * width + x1] = OrignalPixels[((height - 1 - y1) * width) + x1];
}
};
outCubemap.SetPixels(mirroredPixels, faces[i]);
}
outCubemap.SmoothEdges();
// Restore states
RenderTexture.active = backupRenderTex;
ownerCamera.fieldOfView = backupFieldOfView;
ownerCamera.aspect = backupAspect;
ownerCamera.transform.rotation = backupRot;
ownerCamera.targetTexture = backupRenderTex;
DestroyImmediate(swapTex);
DestroyImmediate(faceTexture);
}
/// <summary>
/// Save unity cubemap into NPOT 6x1 cubemap/texture atlas in the following format PX NX PY NY PZ NZ
/// </summary>
/// <description>
/// Supported format: PNG/JPG
/// Default file name: using current time OVR_hh_mm_ss.png
/// </description>
public static bool SaveCubemapCapture(Cubemap cubemap, string pathName = null)
{
string fileName;
string dirName;
int width = cubemap.width;
int height = cubemap.height;
int x = 0;
int y = 0;
bool saveToPNG = true;
if (string.IsNullOrEmpty(pathName))
{
dirName = Application.persistentDataPath + "/OVR_ScreenShot360/";
fileName = null;
}
else
{
dirName = Path.GetDirectoryName(pathName);
fileName = Path.GetFileName(pathName);
if (dirName[dirName.Length - 1] != '/' || dirName[dirName.Length - 1] != '\\')
dirName += "/";
}
if (string.IsNullOrEmpty(fileName))
fileName = "OVR_" + System.DateTime.Now.ToString("hh_mm_ss") + ".png";
string extName = Path.GetExtension(fileName);
if (extName == ".png")
{
saveToPNG = true;
}
else if (extName == ".jpg")
{
saveToPNG = false;
}
else
{
Debug.LogError("Unsupported file format" + extName);
return false;
}
// Validate path
try
{
System.IO.Directory.CreateDirectory(dirName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to create path " + dirName + " since " + e.ToString());
return false;
}
// Create the new texture
Texture2D tex = new Texture2D(width * 6, height, TextureFormat.RGB24, false);
if (tex == null)
{
Debug.LogError("[OVRScreenshotWizard] Failed creating the texture!");
return false;
}
// Merge all the cubemap faces into the texture
// Reference cubemap format: http://docs.unity3d.com/Manual/class-Cubemap.html
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
for (int i = 0; i < faces.Length; i++)
{
// get the pixels from the cubemap
Color[] srcPixels = null;
Color[] pixels = cubemap.GetPixels(faces[i]);
// if desired, flip them as they are ordered left to right, bottom to top
srcPixels = new Color[pixels.Length];
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
srcPixels[y1 * width + x1] = pixels[((height - 1 - y1) * width) + x1];
}
}
// Copy them to the dest texture
tex.SetPixels(x, y, width, height, srcPixels);
x += width;
}
try
{
// Encode the texture and save it to disk
byte[] bytes = saveToPNG ? tex.EncodeToPNG() : tex.EncodeToJPG();
System.IO.File.WriteAllBytes(dirName + fileName, bytes);
Debug.Log("Cubemap file created " + dirName + fileName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to save cubemap file since " + e.ToString());
return false;
}
DestroyImmediate(tex);
return true;
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7a489178b0acf0147846b3873447beaf
timeCreated: 1464728890
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,438 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//-------------------------------------------------------------------------------------
/// <summary>
/// Shows debug information on a heads-up display.
/// </summary>
public class OVRDebugInfo : MonoBehaviour
{
#region GameObjects for Debug Information UIs
GameObject debugUIManager;
GameObject debugUIObject;
GameObject riftPresent;
GameObject fps;
GameObject ipd;
GameObject fov;
GameObject height;
GameObject depth;
GameObject resolutionEyeTexture;
GameObject latencies;
GameObject texts;
#endregion
#region Debug strings
string strRiftPresent = null; // "VR DISABLED"
string strFPS = null; // "FPS: 0";
string strIPD = null; // "IPD: 0.000";
string strFOV = null; // "FOV: 0.0f";
string strHeight = null; // "Height: 0.0f";
string strDepth = null; // "Depth: 0.0f";
string strResolutionEyeTexture = null; // "Resolution : {0} x {1}"
string strLatencies = null; // "R: {0:F3} TW: {1:F3} PP: {2:F3} RE: {3:F3} TWE: {4:F3}"
#endregion
/// <summary>
/// Variables for FPS
/// </summary>
float updateInterval = 0.5f;
float accum = 0.0f;
int frames = 0;
float timeLeft = 0.0f;
/// <summary>
/// Managing for UI initialization
/// </summary>
bool initUIComponent = false;
bool isInited = false;
/// <summary>
/// UIs Y offset
/// </summary>
float offsetY = 55.0f;
/// <summary>
/// Managing for rift detection UI
/// </summary>
float riftPresentTimeout = 0.0f;
/// <summary>
/// Turn on / off VR variables
/// </summary>
bool showVRVars = false;
#region MonoBehaviour handler
/// <summary>
/// Initialization
/// </summary>
void Awake()
{
// Create canvas for using new GUI
debugUIManager = new GameObject();
debugUIManager.name = "DebugUIManager";
debugUIManager.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform rectTransform = debugUIManager.AddComponent<RectTransform>();
rectTransform.sizeDelta = new Vector2(100f, 100f);
rectTransform.localScale = new Vector3(0.001f, 0.001f, 0.001f);
rectTransform.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
rectTransform.localEulerAngles = Vector3.zero;
Canvas canvas = debugUIManager.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
canvas.pixelPerfect = false;
}
/// <summary>
/// Updating VR variables and managing UI present
/// </summary>
void Update()
{
if (initUIComponent && !isInited)
{
InitUIComponents();
}
if (Input.GetKeyDown(KeyCode.Space) && riftPresentTimeout < 0.0f)
{
initUIComponent = true;
showVRVars ^= true;
}
UpdateDeviceDetection();
// Presenting VR variables
if (showVRVars)
{
debugUIManager.SetActive(true);
UpdateVariable();
UpdateStrings();
}
else
{
debugUIManager.SetActive(false);
}
}
/// <summary>
/// Initialize isInited value on OnDestroy
/// </summary>
void OnDestroy()
{
isInited = false;
}
#endregion
#region Private Functions
/// <summary>
/// Initialize UI GameObjects
/// </summary>
void InitUIComponents()
{
float posY = 0.0f;
int fontSize = 20;
debugUIObject = new GameObject();
debugUIObject.name = "DebugInfo";
debugUIObject.transform.parent = GameObject.Find("DebugUIManager").transform;
debugUIObject.transform.localPosition = new Vector3(0.0f, 100.0f, 0.0f);
debugUIObject.transform.localEulerAngles = Vector3.zero;
debugUIObject.transform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
// Print out for FPS
if (!string.IsNullOrEmpty(strFPS))
{
fps = VariableObjectManager(fps, "FPS", posY -= offsetY, strFPS, fontSize);
}
// Print out for IPD
if (!string.IsNullOrEmpty(strIPD))
{
ipd = VariableObjectManager(ipd, "IPD", posY -= offsetY, strIPD, fontSize);
}
// Print out for FOV
if (!string.IsNullOrEmpty(strFOV))
{
fov = VariableObjectManager(fov, "FOV", posY -= offsetY, strFOV, fontSize);
}
// Print out for Height
if (!string.IsNullOrEmpty(strHeight))
{
height = VariableObjectManager(height, "Height", posY -= offsetY, strHeight, fontSize);
}
// Print out for Depth
if (!string.IsNullOrEmpty(strDepth))
{
depth = VariableObjectManager(depth, "Depth", posY -= offsetY, strDepth, fontSize);
}
// Print out for Resoulution of Eye Texture
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
{
resolutionEyeTexture = VariableObjectManager(resolutionEyeTexture, "Resolution", posY -= offsetY, strResolutionEyeTexture, fontSize);
}
// Print out for Latency
if (!string.IsNullOrEmpty(strLatencies))
{
latencies = VariableObjectManager(latencies, "Latency", posY -= offsetY, strLatencies, 17);
posY = 0.0f;
}
initUIComponent = false;
isInited = true;
}
/// <summary>
/// Update VR Variables
/// </summary>
void UpdateVariable()
{
UpdateIPD();
UpdateEyeHeightOffset();
UpdateEyeDepthOffset();
UpdateFOV();
UpdateResolutionEyeTexture();
UpdateLatencyValues();
UpdateFPS();
}
/// <summary>
/// Update Strings
/// </summary>
void UpdateStrings()
{
if (debugUIObject == null)
return;
if (!string.IsNullOrEmpty(strFPS))
fps.GetComponentInChildren<Text>().text = strFPS;
if (!string.IsNullOrEmpty(strIPD))
ipd.GetComponentInChildren<Text>().text = strIPD;
if (!string.IsNullOrEmpty(strFOV))
fov.GetComponentInChildren<Text>().text = strFOV;
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
resolutionEyeTexture.GetComponentInChildren<Text>().text = strResolutionEyeTexture;
if (!string.IsNullOrEmpty(strLatencies))
{
latencies.GetComponentInChildren<Text>().text = strLatencies;
latencies.GetComponentInChildren<Text>().fontSize = 14;
}
if (!string.IsNullOrEmpty(strHeight))
height.GetComponentInChildren<Text>().text = strHeight;
if (!string.IsNullOrEmpty(strDepth))
depth.GetComponentInChildren<Text>().text = strDepth;
}
/// <summary>
/// It's for rift present GUI
/// </summary>
void RiftPresentGUI(GameObject guiMainOBj)
{
riftPresent = ComponentComposition(riftPresent);
riftPresent.transform.SetParent(guiMainOBj.transform);
riftPresent.name = "RiftPresent";
RectTransform rectTransform = riftPresent.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, 0.0f, 0.0f);
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
rectTransform.localEulerAngles = Vector3.zero;
Text text = riftPresent.GetComponentInChildren<Text>();
text.text = strRiftPresent;
text.fontSize = 20;
}
/// <summary>
/// Updates the device detection.
/// </summary>
void UpdateDeviceDetection()
{
if (riftPresentTimeout >= 0.0f)
{
riftPresentTimeout -= Time.deltaTime;
}
}
/// <summary>
/// Object Manager for Variables
/// </summary>
/// <returns> gameobject for each Variable </returns>
GameObject VariableObjectManager(GameObject gameObject, string name, float posY, string str, int fontSize)
{
gameObject = ComponentComposition(gameObject);
gameObject.name = name;
gameObject.transform.SetParent(debugUIObject.transform);
RectTransform rectTransform = gameObject.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, posY -= offsetY, 0.0f);
Text text = gameObject.GetComponentInChildren<Text>();
text.text = str;
text.fontSize = fontSize;
gameObject.transform.localEulerAngles = Vector3.zero;
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
return gameObject;
}
/// <summary>
/// Component composition
/// </summary>
/// <returns> Composed gameobject. </returns>
GameObject ComponentComposition(GameObject GO)
{
GO = new GameObject();
GO.AddComponent<RectTransform>();
GO.AddComponent<CanvasRenderer>();
GO.AddComponent<Image>();
GO.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
GO.GetComponent<Image>().color = new Color(7f / 255f, 45f / 255f, 71f / 255f, 200f / 255f);
texts = new GameObject();
texts.AddComponent<RectTransform>();
texts.AddComponent<CanvasRenderer>();
texts.AddComponent<Text>();
texts.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
texts.GetComponent<Text>().font = Resources.GetBuiltinResource(typeof(Font), "Arial.ttf") as Font;
texts.GetComponent<Text>().alignment = TextAnchor.MiddleCenter;
texts.transform.SetParent(GO.transform);
texts.name = "TextBox";
return GO;
}
#endregion
#region Debugging variables handler
/// <summary>
/// Updates the IPD.
/// </summary>
void UpdateIPD()
{
strIPD = System.String.Format("IPD (mm): {0:F4}", OVRManager.profile.ipd * 1000.0f);
}
/// <summary>
/// Updates the eye height offset.
/// </summary>
void UpdateEyeHeightOffset()
{
float eyeHeight = OVRManager.profile.eyeHeight;
strHeight = System.String.Format("Eye Height (m): {0:F3}", eyeHeight);
}
/// <summary>
/// Updates the eye depth offset.
/// </summary>
void UpdateEyeDepthOffset()
{
float eyeDepth = OVRManager.profile.eyeDepth;
strDepth = System.String.Format("Eye Depth (m): {0:F3}", eyeDepth);
}
/// <summary>
/// Updates the FOV.
/// </summary>
void UpdateFOV()
{
#if UNITY_2017_2_OR_NEWER
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
#else
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.LeftEye);
#endif
strFOV = System.String.Format("FOV (deg): {0:F3}", eyeDesc.fov.y);
}
/// <summary>
/// Updates resolution of eye texture
/// </summary>
void UpdateResolutionEyeTexture()
{
#if UNITY_2017_2_OR_NEWER
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.RightEye);
float scale = UnityEngine.XR.XRSettings.renderViewportScale;
#else
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.RightEye);
float scale = UnityEngine.VR.VRSettings.renderViewportScale;
#endif
float w = (int)(scale * (float)(leftEyeDesc.resolution.x + rightEyeDesc.resolution.x));
float h = (int)(scale * (float)Mathf.Max(leftEyeDesc.resolution.y, rightEyeDesc.resolution.y));
strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
}
/// <summary>
/// Updates latency values
/// </summary>
void UpdateLatencyValues()
{
#if !UNITY_ANDROID || UNITY_EDITOR
OVRDisplay.LatencyData latency = OVRManager.display.latency;
if (latency.render < 0.000001f && latency.timeWarp < 0.000001f && latency.postPresent < 0.000001f)
strLatencies = System.String.Format("Latency values are not available.");
else
strLatencies = System.String.Format("Render: {0:F3} TimeWarp: {1:F3} Post-Present: {2:F3}\nRender Error: {3:F3} TimeWarp Error: {4:F3}",
latency.render,
latency.timeWarp,
latency.postPresent,
latency.renderError,
latency.timeWarpError);
#endif
}
/// <summary>
/// Updates the FPS.
/// </summary>
void UpdateFPS()
{
timeLeft -= Time.unscaledDeltaTime;
accum += Time.unscaledDeltaTime;
++frames;
// Interval ended - update GUI text and start new interval
if (timeLeft <= 0.0)
{
// display two fractional digits (f2 format)
float fps = frames / accum;
strFPS = System.String.Format("FPS: {0:F2}", fps);
timeLeft += updateInterval;
accum = 0.0f;
frames = 0;
}
}
#endregion
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b71d1996d67004241a3b69960856ffcb
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,289 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
/// <summary>
/// UI pointer driven by gaze input.
/// </summary>
public class OVRGazePointer : MonoBehaviour {
private Transform gazeIcon; //the transform that rotates according to our movement
[Tooltip("Should the pointer be hidden when not over interactive objects.")]
public bool hideByDefault = true;
[Tooltip("Time after leaving interactive object before pointer fades.")]
public float showTimeoutPeriod = 1;
[Tooltip("Time after mouse pointer becoming inactive before pointer unfades.")]
public float hideTimeoutPeriod = 0.1f;
[Tooltip("Keep a faint version of the pointer visible while using a mouse")]
public bool dimOnHideRequest = true;
[Tooltip("Angular scale of pointer")]
public float depthScaleMultiplier = 0.03f;
/// <summary>
/// The gaze ray.
/// </summary>
public Transform rayTransform;
/// <summary>
/// Is gaze pointer current visible
/// </summary>
public bool hidden { get; private set; }
/// <summary>
/// Current scale applied to pointer
/// </summary>
public float currentScale { get; private set; }
/// <summary>
/// Current depth of pointer from camera
/// </summary>
private float depth;
private float hideUntilTime;
/// <summary>
/// How many times position has been set this frame. Used to detect when there are no position sets in a frame.
/// </summary>
private int positionSetsThisFrame = 0;
/// <summary>
/// Last time code requested the pointer be shown. Usually when pointer passes over interactive elements.
/// </summary>
private float lastShowRequestTime;
/// <summary>
/// Last time pointer was requested to be hidden. Usually mouse pointer activity.
/// </summary>
private float lastHideRequestTime;
[Tooltip("Radius of the cursor. Used for preventing geometry intersections.")]
public float cursorRadius = 1f;
// Optionally present GUI element displaying progress when using gaze-to-select mechanics
private OVRProgressIndicator progressIndicator;
private static OVRGazePointer _instance;
public static OVRGazePointer instance
{
// If there's no GazePointer already in the scene, instanciate one now.
get
{
if (_instance == null)
{
Debug.Log(string.Format("Instanciating GazePointer", 0));
_instance = (OVRGazePointer)GameObject.Instantiate((OVRGazePointer)Resources.Load("Prefabs/GazePointerRing", typeof(OVRGazePointer)));
}
return _instance;
}
}
/// <summary>
/// Used to determine alpha level of gaze cursor. Could also be used to determine cursor size, for example, as the cursor fades out.
/// </summary>
public float visibilityStrength
{
get
{
// It's possible there are reasons to show the cursor - such as it hovering over some UI - and reasons to hide
// the cursor - such as another input method (e.g. mouse) being used. We take both of these in to account.
float strengthFromShowRequest;
if (hideByDefault)
{
// fade the cursor out with time
strengthFromShowRequest = Mathf.Clamp01(1 - (Time.time - lastShowRequestTime) / showTimeoutPeriod);
}
else
{
// keep it fully visible
strengthFromShowRequest = 1;
}
// Now consider factors requesting pointer to be hidden
float strengthFromHideRequest;
strengthFromHideRequest = (lastHideRequestTime + hideTimeoutPeriod > Time.time) ? (dimOnHideRequest ? 0.1f : 0) : 1;
// Hide requests take priority
return Mathf.Min(strengthFromShowRequest, strengthFromHideRequest);
}
}
public float SelectionProgress
{
get
{
return progressIndicator ? progressIndicator.currentProgress : 0;
}
set
{
if (progressIndicator)
progressIndicator.currentProgress = value;
}
}
public void Awake()
{
currentScale = 1;
// Only allow one instance at runtime.
if (_instance != null && _instance != this)
{
enabled = false;
DestroyImmediate(this);
return;
}
_instance = this;
gazeIcon = transform.Find("GazeIcon");
progressIndicator = transform.GetComponent<OVRProgressIndicator>();
}
void Update ()
{
if (rayTransform == null && Camera.main != null)
rayTransform = Camera.main.transform;
// Move the gaze cursor to keep it in the middle of the view
transform.position = rayTransform.position + rayTransform.forward * depth;
// Should we show or hide the gaze cursor?
if (visibilityStrength == 0 && !hidden)
{
Hide();
}
else if (visibilityStrength > 0 && hidden)
{
Show();
}
}
/// <summary>
/// Set position and orientation of pointer
/// </summary>
/// <param name="pos"></param>
/// <param name="normal"></param>
public void SetPosition(Vector3 pos, Vector3 normal)
{
transform.position = pos;
// Set the rotation to match the normal of the surface it's on.
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(normal, rayTransform.up);
transform.rotation = newRot;
// record depth so that distance doesn't pop when pointer leaves an object
depth = (rayTransform.position - pos).magnitude;
//set scale based on depth
currentScale = depth * depthScaleMultiplier;
transform.localScale = new Vector3(currentScale, currentScale, currentScale);
positionSetsThisFrame++;
}
/// <summary>
/// SetPosition overload without normal. Just makes cursor face user
/// </summary>
/// <param name="pos"></param>
public void SetPosition(Vector3 pos)
{
SetPosition(pos, rayTransform.forward);
}
public float GetCurrentRadius()
{
return cursorRadius * currentScale;
}
void LateUpdate()
{
// This happens after all Updates so we know that if positionSetsThisFrame is zero then nothing set the position this frame
if (positionSetsThisFrame == 0)
{
// No geometry intersections, so gazing into space. Make the cursor face directly at the camera
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(rayTransform.forward, rayTransform.up);
transform.rotation = newRot;
}
Quaternion iconRotation = gazeIcon.rotation;
iconRotation.SetLookRotation(transform.rotation * new Vector3(0, 0, 1));
gazeIcon.rotation = iconRotation;
positionSetsThisFrame = 0;
}
/// <summary>
/// Request the pointer be hidden
/// </summary>
public void RequestHide()
{
if (!dimOnHideRequest)
{
Hide();
}
lastHideRequestTime = Time.time;
}
/// <summary>
/// Request the pointer be shown. Hide requests take priority
/// </summary>
public void RequestShow()
{
Show();
lastShowRequestTime = Time.time;
}
// Disable/Enable child elements when we show/hide the cursor. For performance reasons.
void Hide()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(false);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = false;
hidden = true;
}
void Show()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(true);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = true;
hidden = false;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 30530ad0e40d0a64ea26d753ee4996ea
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,217 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRGearVrControllerTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
// virtual
new BoolMonitor("WasRecentered", () => OVRInput.GetControllerWasRecentered()),
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
new BoolMonitor("OneDown", () => OVRInput.GetDown(OVRInput.Button.One)),
new BoolMonitor("OneUp", () => OVRInput.GetUp(OVRInput.Button.One)),
new BoolMonitor("One (Touch)", () => OVRInput.Get(OVRInput.Touch.One)),
new BoolMonitor("OneDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.One)),
new BoolMonitor("OneUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.One)),
new BoolMonitor("Two", () => OVRInput.Get(OVRInput.Button.Two)),
new BoolMonitor("TwoDown", () => OVRInput.GetDown(OVRInput.Button.Two)),
new BoolMonitor("TwoUp", () => OVRInput.GetUp(OVRInput.Button.Two)),
new BoolMonitor("PrimaryIndexTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTrigger (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryHandTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("Up", () => OVRInput.Get(OVRInput.Button.Up)),
new BoolMonitor("Down", () => OVRInput.Get(OVRInput.Button.Down)),
new BoolMonitor("Left", () => OVRInput.Get(OVRInput.Button.Left)),
new BoolMonitor("Right", () => OVRInput.Get(OVRInput.Button.Right)),
new BoolMonitor("Touchpad (Click)", () => OVRInput.Get(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("TouchpadDown (Click)", () => OVRInput.GetDown(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("TouchpadUp (Click)", () => OVRInput.GetUp(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("Touchpad (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryTouchpad)),
new BoolMonitor("TouchpadDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryTouchpad)),
new BoolMonitor("TouchpadUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryTouchpad)),
// raw
new BoolMonitor("Start", () => OVRInput.Get(OVRInput.RawButton.Start)),
new BoolMonitor("StartDown", () => OVRInput.GetDown(OVRInput.RawButton.Start)),
new BoolMonitor("StartUp", () => OVRInput.GetUp(OVRInput.RawButton.Start)),
new BoolMonitor("Back", () => OVRInput.Get(OVRInput.RawButton.Back)),
new BoolMonitor("BackDown", () => OVRInput.GetDown(OVRInput.RawButton.Back)),
new BoolMonitor("BackUp", () => OVRInput.GetUp(OVRInput.RawButton.Back)),
new BoolMonitor("A", () => OVRInput.Get(OVRInput.RawButton.A)),
new BoolMonitor("ADown", () => OVRInput.GetDown(OVRInput.RawButton.A)),
new BoolMonitor("AUp", () => OVRInput.GetUp(OVRInput.RawButton.A)),
};
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed", () => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
OVRInput.Controller activeController = OVRInput.GetActiveController();
data.Length = 0;
byte recenterCount = OVRInput.GetControllerRecenterCount();
data.AppendFormat("RecenterCount: {0}\n", recenterCount);
byte battery = OVRInput.GetControllerBatteryPercentRemaining();
data.AppendFormat("Battery: {0}\n", battery);
float framerate = OVRPlugin.GetAppFramerate();
data.AppendFormat("Framerate: {0:F2}\n", framerate);
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
Vector3 angVel = OVRInput.GetLocalControllerAngularVelocity(activeController);
data.AppendFormat("AngVel: ({0:F2}, {1:F2}, {2:F2})\n", angVel.x, angVel.y, angVel.z);
Vector3 angAcc = OVRInput.GetLocalControllerAngularAcceleration(activeController);
data.AppendFormat("AngAcc: ({0:F2}, {1:F2}, {2:F2})\n", angAcc.x, angAcc.y, angAcc.z);
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Vector3 vel = OVRInput.GetLocalControllerVelocity(activeController);
data.AppendFormat("Vel: ({0:F2}, {1:F2}, {2:F2})\n", vel.x, vel.y, vel.z);
Vector3 acc = OVRInput.GetLocalControllerAcceleration(activeController);
data.AppendFormat("Acc: ({0:F2}, {1:F2}, {2:F2})\n", acc.x, acc.y, acc.z);
Vector2 primaryTouchpad = OVRInput.Get(OVRInput.Axis2D.PrimaryTouchpad);
data.AppendFormat("PrimaryTouchpad: ({0:F2}, {1:F2})\n", primaryTouchpad.x, primaryTouchpad.y);
Vector2 secondaryTouchpad = OVRInput.Get(OVRInput.Axis2D.SecondaryTouchpad);
data.AppendFormat("SecondaryTouchpad: ({0:F2}, {1:F2})\n", secondaryTouchpad.x, secondaryTouchpad.y);
float indexTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
data.AppendFormat("PrimaryIndexTriggerAxis1D: ({0:F2})\n", indexTrigger);
float handTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger);
data.AppendFormat("PrimaryHandTriggerAxis1D: ({0:F2})\n", handTrigger);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7acc4619d4cb5e64e9ed05e5a7a8099f
timeCreated: 1486173066
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,169 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 02d61468f8b77ae4b92c344bc9a600fb
timeCreated: 1481833527
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,379 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Allows grabbing and throwing of objects with the OVRGrabbable component on them.
/// </summary>
[RequireComponent(typeof(Rigidbody))]
public class OVRGrabber : MonoBehaviour
{
// Grip trigger thresholds for picking up objects, with some hysteresis.
public float grabBegin = 0.55f;
public float grabEnd = 0.35f;
// Demonstrates parenting the held object to the hand's transform when grabbed.
// When false, the grabbed object is moved every FixedUpdate using MovePosition.
// Note that MovePosition is required for proper physics simulation. If you set this to true, you can
// easily observe broken physics simulation by, for example, moving the bottom cube of a stacked
// tower and noting a complete loss of friction.
[SerializeField]
protected bool m_parentHeldObject = false;
// Child/attached transforms of the grabber, indicating where to snap held objects to (if you snap them).
// Also used for ranking grab targets in case of multiple candidates.
[SerializeField]
protected Transform m_gripTransform = null;
// Child/attached Colliders to detect candidate grabbable objects.
[SerializeField]
protected Collider[] m_grabVolumes = null;
// Should be OVRInput.Controller.LTouch or OVRInput.Controller.RTouch.
[SerializeField]
protected OVRInput.Controller m_controller;
[SerializeField]
protected Transform m_parentTransform;
protected bool m_grabVolumeEnabled = true;
protected Vector3 m_lastPos;
protected Quaternion m_lastRot;
protected Quaternion m_anchorOffsetRotation;
protected Vector3 m_anchorOffsetPosition;
protected float m_prevFlex;
protected OVRGrabbable m_grabbedObj = null;
protected Vector3 m_grabbedObjectPosOff;
protected Quaternion m_grabbedObjectRotOff;
protected Dictionary<OVRGrabbable, int> m_grabCandidates = new Dictionary<OVRGrabbable, int>();
protected bool operatingWithoutOVRCameraRig = true;
/// <summary>
/// The currently grabbed object.
/// </summary>
public OVRGrabbable grabbedObject
{
get { return m_grabbedObj; }
}
public void ForceRelease(OVRGrabbable grabbable)
{
bool canRelease = (
(m_grabbedObj != null) &&
(m_grabbedObj == grabbable)
);
if (canRelease)
{
GrabEnd();
}
}
protected virtual void Awake()
{
m_anchorOffsetPosition = transform.localPosition;
m_anchorOffsetRotation = transform.localRotation;
// If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.
OVRCameraRig rig = null;
if (transform.parent != null && transform.parent.parent != null)
rig = transform.parent.parent.GetComponent<OVRCameraRig>();
if (rig != null)
{
rig.UpdatedAnchors += (r) => {OnUpdatedAnchors();};
operatingWithoutOVRCameraRig = false;
}
}
protected virtual void Start()
{
m_lastPos = transform.position;
m_lastRot = transform.rotation;
if(m_parentTransform == null)
{
if(gameObject.transform.parent != null)
{
m_parentTransform = gameObject.transform.parent.transform;
}
else
{
m_parentTransform = new GameObject().transform;
m_parentTransform.position = Vector3.zero;
m_parentTransform.rotation = Quaternion.identity;
}
}
}
void FixedUpdate()
{
if (operatingWithoutOVRCameraRig)
OnUpdatedAnchors();
}
// Hands follow the touch anchors by calling MovePosition each frame to reach the anchor.
// This is done instead of parenting to achieve workable physics. If you don't require physics on
// your hands or held objects, you may wish to switch to parenting.
void OnUpdatedAnchors()
{
Vector3 handPos = OVRInput.GetLocalControllerPosition(m_controller);
Quaternion handRot = OVRInput.GetLocalControllerRotation(m_controller);
Vector3 destPos = m_parentTransform.TransformPoint(m_anchorOffsetPosition + handPos);
Quaternion destRot = m_parentTransform.rotation * handRot * m_anchorOffsetRotation;
GetComponent<Rigidbody>().MovePosition(destPos);
GetComponent<Rigidbody>().MoveRotation(destRot);
if (!m_parentHeldObject)
{
MoveGrabbedObject(destPos, destRot);
}
m_lastPos = transform.position;
m_lastRot = transform.rotation;
float prevFlex = m_prevFlex;
// Update values from inputs
m_prevFlex = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller);
CheckForGrabOrRelease(prevFlex);
}
void OnDestroy()
{
if (m_grabbedObj != null)
{
GrabEnd();
}
}
void OnTriggerEnter(Collider otherCollider)
{
// Get the grab trigger
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Add the grabbable
int refCount = 0;
m_grabCandidates.TryGetValue(grabbable, out refCount);
m_grabCandidates[grabbable] = refCount + 1;
}
void OnTriggerExit(Collider otherCollider)
{
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Remove the grabbable
int refCount = 0;
bool found = m_grabCandidates.TryGetValue(grabbable, out refCount);
if (!found)
{
return;
}
if (refCount > 1)
{
m_grabCandidates[grabbable] = refCount - 1;
}
else
{
m_grabCandidates.Remove(grabbable);
}
}
protected void CheckForGrabOrRelease(float prevFlex)
{
if ((m_prevFlex >= grabBegin) && (prevFlex < grabBegin))
{
GrabBegin();
}
else if ((m_prevFlex <= grabEnd) && (prevFlex > grabEnd))
{
GrabEnd();
}
}
protected virtual void GrabBegin()
{
float closestMagSq = float.MaxValue;
OVRGrabbable closestGrabbable = null;
Collider closestGrabbableCollider = null;
// Iterate grab candidates and find the closest grabbable candidate
foreach (OVRGrabbable grabbable in m_grabCandidates.Keys)
{
bool canGrab = !(grabbable.isGrabbed && !grabbable.allowOffhandGrab);
if (!canGrab)
{
continue;
}
for (int j = 0; j < grabbable.grabPoints.Length; ++j)
{
Collider grabbableCollider = grabbable.grabPoints[j];
// Store the closest grabbable
Vector3 closestPointOnBounds = grabbableCollider.ClosestPointOnBounds(m_gripTransform.position);
float grabbableMagSq = (m_gripTransform.position - closestPointOnBounds).sqrMagnitude;
if (grabbableMagSq < closestMagSq)
{
closestMagSq = grabbableMagSq;
closestGrabbable = grabbable;
closestGrabbableCollider = grabbableCollider;
}
}
}
// Disable grab volumes to prevent overlaps
GrabVolumeEnable(false);
if (closestGrabbable != null)
{
if (closestGrabbable.isGrabbed)
{
closestGrabbable.grabbedBy.OffhandGrabbed(closestGrabbable);
}
m_grabbedObj = closestGrabbable;
m_grabbedObj.GrabBegin(this, closestGrabbableCollider);
m_lastPos = transform.position;
m_lastRot = transform.rotation;
// Set up offsets for grabbed object desired position relative to hand.
if(m_grabbedObj.snapPosition)
{
m_grabbedObjectPosOff = m_gripTransform.localPosition;
if(m_grabbedObj.snapOffset)
{
Vector3 snapOffset = m_grabbedObj.snapOffset.position;
if (m_controller == OVRInput.Controller.LTouch) snapOffset.x = -snapOffset.x;
m_grabbedObjectPosOff += snapOffset;
}
}
else
{
Vector3 relPos = m_grabbedObj.transform.position - transform.position;
relPos = Quaternion.Inverse(transform.rotation) * relPos;
m_grabbedObjectPosOff = relPos;
}
if (m_grabbedObj.snapOrientation)
{
m_grabbedObjectRotOff = m_gripTransform.localRotation;
if(m_grabbedObj.snapOffset)
{
m_grabbedObjectRotOff = m_grabbedObj.snapOffset.rotation * m_grabbedObjectRotOff;
}
}
else
{
Quaternion relOri = Quaternion.Inverse(transform.rotation) * m_grabbedObj.transform.rotation;
m_grabbedObjectRotOff = relOri;
}
// Note: force teleport on grab, to avoid high-speed travel to dest which hits a lot of other objects at high
// speed and sends them flying. The grabbed object may still teleport inside of other objects, but fixing that
// is beyond the scope of this demo.
MoveGrabbedObject(m_lastPos, m_lastRot, true);
if(m_parentHeldObject)
{
m_grabbedObj.transform.parent = transform;
}
}
}
protected virtual void MoveGrabbedObject(Vector3 pos, Quaternion rot, bool forceTeleport = false)
{
if (m_grabbedObj == null)
{
return;
}
Rigidbody grabbedRigidbody = m_grabbedObj.grabbedRigidbody;
Vector3 grabbablePosition = pos + rot * m_grabbedObjectPosOff;
Quaternion grabbableRotation = rot * m_grabbedObjectRotOff;
if (forceTeleport)
{
grabbedRigidbody.transform.position = grabbablePosition;
grabbedRigidbody.transform.rotation = grabbableRotation;
}
else
{
grabbedRigidbody.MovePosition(grabbablePosition);
grabbedRigidbody.MoveRotation(grabbableRotation);
}
}
protected void GrabEnd()
{
if (m_grabbedObj != null)
{
OVRPose localPose = new OVRPose { position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller) };
OVRPose offsetPose = new OVRPose { position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation };
localPose = localPose * offsetPose;
OVRPose trackingSpace = transform.ToOVRPose() * localPose.Inverse();
Vector3 linearVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);
GrabbableRelease(linearVelocity, angularVelocity);
}
// Re-enable grab volumes to allow overlap events
GrabVolumeEnable(true);
}
protected void GrabbableRelease(Vector3 linearVelocity, Vector3 angularVelocity)
{
m_grabbedObj.GrabEnd(linearVelocity, angularVelocity);
if(m_parentHeldObject) m_grabbedObj.transform.parent = null;
m_grabbedObj = null;
}
protected virtual void GrabVolumeEnable(bool enabled)
{
if (m_grabVolumeEnabled == enabled)
{
return;
}
m_grabVolumeEnabled = enabled;
for (int i = 0; i < m_grabVolumes.Length; ++i)
{
Collider grabVolume = m_grabVolumes[i];
grabVolume.enabled = m_grabVolumeEnabled;
}
if (!m_grabVolumeEnabled)
{
m_grabCandidates.Clear();
}
}
protected virtual void OffhandGrabbed(OVRGrabbable grabbable)
{
if (m_grabbedObj == grabbable)
{
GrabbableRelease(Vector3.zero, Vector3.zero);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fd425c2d06f39bf4899d07c05d0f10eb
timeCreated: 1481832436
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,195 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Diagnostic display with a regular grid of cubes for visual testing of
/// tracking and distortion.
/// </summary>
public class OVRGridCube : MonoBehaviour
{
/// <summary>
/// The key that toggles the grid of cubes.
/// </summary>
public KeyCode GridKey = KeyCode.G;
private GameObject CubeGrid = null;
private bool CubeGridOn = false;
private bool CubeSwitchColorOld = false;
private bool CubeSwitchColor = false;
private int gridSizeX = 6;
private int gridSizeY = 4;
private int gridSizeZ = 6;
private float gridScale = 0.3f;
private float cubeScale = 0.03f;
// Handle to OVRCameraRig
private OVRCameraRig CameraController = null;
/// <summary>
/// Update this instance.
/// </summary>
void Update ()
{
UpdateCubeGrid();
}
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
}
void UpdateCubeGrid()
{
// Toggle the grid cube display on 'G'
if(Input.GetKeyDown(GridKey))
{
if(CubeGridOn == false)
{
CubeGridOn = true;
Debug.LogWarning("CubeGrid ON");
if(CubeGrid != null)
CubeGrid.SetActive(true);
else
CreateCubeGrid();
}
else
{
CubeGridOn = false;
Debug.LogWarning("CubeGrid OFF");
if(CubeGrid != null)
CubeGrid.SetActive(false);
}
}
if(CubeGrid != null)
{
// Set cube colors to let user know if camera is tracking
CubeSwitchColor = !OVRManager.tracker.isPositionTracked;
if(CubeSwitchColor != CubeSwitchColorOld)
CubeGridSwitchColor(CubeSwitchColor);
CubeSwitchColorOld = CubeSwitchColor;
}
}
void CreateCubeGrid()
{
Debug.LogWarning("Create CubeGrid");
// Create the visual cube grid
CubeGrid = new GameObject("CubeGrid");
// Set a layer to target a specific camera
CubeGrid.layer = CameraController.gameObject.layer;
for (int x = -gridSizeX; x <= gridSizeX; x++)
for (int y = -gridSizeY; y <= gridSizeY; y++)
for (int z = -gridSizeZ; z <= gridSizeZ; z++)
{
// Set the cube type:
// 0 = non-axis cube
// 1 = axis cube
// 2 = center cube
int CubeType = 0;
if ((x == 0 && y == 0) || (x == 0 && z == 0) || (y == 0 && z == 0))
{
if((x == 0) && (y == 0) && (z == 0))
CubeType = 2;
else
CubeType = 1;
}
GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
BoxCollider bc = cube.GetComponent<BoxCollider>();
bc.enabled = false;
cube.layer = CameraController.gameObject.layer;
// No shadows
Renderer r = cube.GetComponent<Renderer>();
#if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_5 || UNITY_4_6
// Renderer.castShadows was deprecated starting in Unity 5.0
r.castShadows = false;
#else
r.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
#endif
r.receiveShadows = false;
// Cube line is white down the middle
if (CubeType == 0)
r.material.color = Color.red;
else if (CubeType == 1)
r.material.color = Color.white;
else
r.material.color = Color.yellow;
cube.transform.position =
new Vector3(((float)x * gridScale),
((float)y * gridScale),
((float)z * gridScale));
float s = 0.7f;
// Axis cubes are bigger
if(CubeType == 1)
s = 1.0f;
// Center cube is the largest
if(CubeType == 2)
s = 2.0f;
cube.transform.localScale =
new Vector3(cubeScale * s, cubeScale * s, cubeScale * s);
cube.transform.parent = CubeGrid.transform;
}
}
/// <summary>
/// Switch the Cube grid color.
/// </summary>
/// <param name="CubeSwitchColor">If set to <c>true</c> cube switch color.</param>
void CubeGridSwitchColor(bool CubeSwitchColor)
{
Color c = Color.red;
if(CubeSwitchColor == true)
c = Color.blue;
foreach(Transform child in CubeGrid.transform)
{
Material m = child.GetComponent<Renderer>().material;
// Cube line is white down the middle
if(m.color == Color.red || m.color == Color.blue)
m.color = c;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4988596c8a187f94f8e6a345ebb4254b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,913 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// VR extension of PointerInputModule which supports gaze and controller pointing.
/// </summary>
public class OVRInputModule : PointerInputModule
{
[Tooltip("Object which points with Z axis. E.g. CentreEyeAnchor from OVRCameraRig")]
public Transform rayTransform;
[Tooltip("Gamepad button to act as gaze click")]
public OVRInput.Button joyPadClickButton = OVRInput.Button.One;
[Tooltip("Keyboard button to act as gaze click")]
public KeyCode gazeClickKey = KeyCode.Space;
[Header("Physics")]
[Tooltip("Perform an sphere cast to determine correct depth for gaze pointer")]
public bool performSphereCastForGazepointer;
[Tooltip("Match the gaze pointer normal to geometry normal for physics colliders")]
public bool matchNormalOnPhysicsColliders;
[Header("Gamepad Stick Scroll")]
[Tooltip("Enable scrolling with the right stick on a gamepad")]
public bool useRightStickScroll = true;
[Tooltip("Deadzone for right stick to prevent accidental scrolling")]
public float rightStickDeadZone = 0.15f;
[Header("Touchpad Swipe Scroll")]
[Tooltip("Enable scrolling by swiping the GearVR touchpad")]
public bool useSwipeScroll = true;
[Tooltip("Minimum trackpad movement in pixels to start swiping")]
public float swipeDragThreshold = 2;
[Tooltip("Distance scrolled when swipe scroll occurs")]
public float swipeDragScale = 1f;
/* It's debatable which way left and right are on the Gear VR touchpad since it's facing away from you
* the following bool allows this to be swapped*/
[Tooltip("Invert X axis on touchpad")]
public bool InvertSwipeXAxis = false;
// The raycaster that gets to do pointer interaction (e.g. with a mouse), gaze interaction always works
[NonSerialized]
public OVRRaycaster activeGraphicRaycaster;
[Header("Dragging")]
[Tooltip("Minimum pointer movement in degrees to start dragging")]
public float angleDragThreshold = 1;
// The following region contains code exactly the same as the implementation
// of StandaloneInputModule. It is copied here rather than inheriting from StandaloneInputModule
// because most of StandaloneInputModule is private so it isn't possible to easily derive from.
// Future changes from Unity to StandaloneInputModule will make it possible for this class to
// derive from StandaloneInputModule instead of PointerInput module.
//
// The following functions are not present in the following region since they have modified
// versions in the next region:
// Process
// ProcessMouseEvent
// UseMouse
#region StandaloneInputModule code
private float m_NextAction;
private Vector2 m_LastMousePosition;
private Vector2 m_MousePosition;
protected OVRInputModule()
{}
#if UNITY_EDITOR
protected override void Reset()
{
allowActivationOnMobileDevice = true;
}
#endif
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public enum InputMode
{
Mouse,
Buttons
}
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public InputMode inputMode
{
get { return InputMode.Mouse; }
}
[Header("Standalone Input Module")]
[SerializeField]
private string m_HorizontalAxis = "Horizontal";
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
[SerializeField]
private string m_VerticalAxis = "Vertical";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_SubmitButton = "Submit";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_CancelButton = "Cancel";
[SerializeField]
private float m_InputActionsPerSecond = 10;
[SerializeField]
private bool m_AllowActivationOnMobileDevice;
public bool allowActivationOnMobileDevice
{
get { return m_AllowActivationOnMobileDevice; }
set { m_AllowActivationOnMobileDevice = value; }
}
public float inputActionsPerSecond
{
get { return m_InputActionsPerSecond; }
set { m_InputActionsPerSecond = value; }
}
/// <summary>
/// Name of the horizontal axis for movement (if axis events are used).
/// </summary>
public string horizontalAxis
{
get { return m_HorizontalAxis; }
set { m_HorizontalAxis = value; }
}
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
public string verticalAxis
{
get { return m_VerticalAxis; }
set { m_VerticalAxis = value; }
}
public string submitButton
{
get { return m_SubmitButton; }
set { m_SubmitButton = value; }
}
public string cancelButton
{
get { return m_CancelButton; }
set { m_CancelButton = value; }
}
public override void UpdateModule()
{
m_LastMousePosition = m_MousePosition;
m_MousePosition = Input.mousePosition;
}
public override bool IsModuleSupported()
{
// Check for mouse presence instead of whether touch is supported,
// as you can connect mouse to a tablet and in that case we'd want
// to use StandaloneInputModule for non-touch input events.
return m_AllowActivationOnMobileDevice || Input.mousePresent;
}
public override bool ShouldActivateModule()
{
if (!base.ShouldActivateModule())
return false;
var shouldActivate = Input.GetButtonDown(m_SubmitButton);
shouldActivate |= Input.GetButtonDown(m_CancelButton);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_HorizontalAxis), 0.0f);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_VerticalAxis), 0.0f);
shouldActivate |= (m_MousePosition - m_LastMousePosition).sqrMagnitude > 0.0f;
shouldActivate |= Input.GetMouseButtonDown(0);
return shouldActivate;
}
public override void ActivateModule()
{
base.ActivateModule();
m_MousePosition = Input.mousePosition;
m_LastMousePosition = Input.mousePosition;
var toSelect = eventSystem.currentSelectedGameObject;
if (toSelect == null)
toSelect = eventSystem.firstSelectedGameObject;
eventSystem.SetSelectedGameObject(toSelect, GetBaseEventData());
}
public override void DeactivateModule()
{
base.DeactivateModule();
ClearSelection();
}
/// <summary>
/// Process submit keys.
/// </summary>
private bool SendSubmitEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
if (Input.GetButtonDown(m_SubmitButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.submitHandler);
if (Input.GetButtonDown(m_CancelButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.cancelHandler);
return data.used;
}
private bool AllowMoveEventProcessing(float time)
{
bool allow = Input.GetButtonDown(m_HorizontalAxis);
allow |= Input.GetButtonDown(m_VerticalAxis);
allow |= (time > m_NextAction);
return allow;
}
private Vector2 GetRawMoveVector()
{
Vector2 move = Vector2.zero;
move.x = Input.GetAxisRaw(m_HorizontalAxis);
move.y = Input.GetAxisRaw(m_VerticalAxis);
if (Input.GetButtonDown(m_HorizontalAxis))
{
if (move.x < 0)
move.x = -1f;
if (move.x > 0)
move.x = 1f;
}
if (Input.GetButtonDown(m_VerticalAxis))
{
if (move.y < 0)
move.y = -1f;
if (move.y > 0)
move.y = 1f;
}
return move;
}
/// <summary>
/// Process keyboard events.
/// </summary>
private bool SendMoveEventToSelectedObject()
{
float time = Time.unscaledTime;
if (!AllowMoveEventProcessing(time))
return false;
Vector2 movement = GetRawMoveVector();
// Debug.Log(m_ProcessingEvent.rawType + " axis:" + m_AllowAxisEvents + " value:" + "(" + x + "," + y + ")");
var axisEventData = GetAxisEventData(movement.x, movement.y, 0.6f);
if (!Mathf.Approximately(axisEventData.moveVector.x, 0f)
|| !Mathf.Approximately(axisEventData.moveVector.y, 0f))
{
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, axisEventData, ExecuteEvents.moveHandler);
}
m_NextAction = time + 1f / m_InputActionsPerSecond;
return axisEventData.used;
}
private bool SendUpdateEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.updateSelectedHandler);
return data.used;
}
/// <summary>
/// Process the current mouse press.
/// </summary>
private void ProcessMousePress(MouseButtonEventData data)
{
var pointerEvent = data.buttonData;
var currentOverGo = pointerEvent.pointerCurrentRaycast.gameObject;
// PointerDown notification
if (data.PressedThisFrame())
{
pointerEvent.eligibleForClick = true;
pointerEvent.delta = Vector2.zero;
pointerEvent.dragging = false;
pointerEvent.useDragThreshold = true;
pointerEvent.pressPosition = pointerEvent.position;
if (pointerEvent.IsVRPointer())
{
pointerEvent.SetSwipeStart(Input.mousePosition);
}
pointerEvent.pointerPressRaycast = pointerEvent.pointerCurrentRaycast;
DeselectIfSelectionChanged(currentOverGo, pointerEvent);
// search for the control that will receive the press
// if we can't find a press handler set the press
// handler to be what would receive a click.
var newPressed = ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.pointerDownHandler);
// didnt find a press handler... search for a click handler
if (newPressed == null)
newPressed = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// Debug.Log("Pressed: " + newPressed);
float time = Time.unscaledTime;
if (newPressed == pointerEvent.lastPress)
{
var diffTime = time - pointerEvent.clickTime;
if (diffTime < 0.3f)
++pointerEvent.clickCount;
else
pointerEvent.clickCount = 1;
pointerEvent.clickTime = time;
}
else
{
pointerEvent.clickCount = 1;
}
pointerEvent.pointerPress = newPressed;
pointerEvent.rawPointerPress = currentOverGo;
pointerEvent.clickTime = time;
// Save the drag handler as well
pointerEvent.pointerDrag = ExecuteEvents.GetEventHandler<IDragHandler>(currentOverGo);
if (pointerEvent.pointerDrag != null)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.initializePotentialDrag);
}
// PointerUp notification
if (data.ReleasedThisFrame())
{
// Debug.Log("Executing pressup on: " + pointer.pointerPress);
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
// Debug.Log("KeyCode: " + pointer.eventData.keyCode);
// see if we mouse up on the same element that we clicked on...
var pointerUpHandler = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// PointerClick and Drop events
if (pointerEvent.pointerPress == pointerUpHandler && pointerEvent.eligibleForClick)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerClickHandler);
}
else if (pointerEvent.pointerDrag != null)
{
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.dropHandler);
}
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.endDragHandler);
pointerEvent.dragging = false;
pointerEvent.pointerDrag = null;
// redo pointer enter / exit to refresh state
// so that if we moused over somethign that ignored it before
// due to having pressed on something else
// it now gets it.
if (currentOverGo != pointerEvent.pointerEnter)
{
HandlePointerExitAndEnter(pointerEvent, null);
HandlePointerExitAndEnter(pointerEvent, currentOverGo);
}
}
}
#endregion
#region Modified StandaloneInputModule methods
/// <summary>
/// Process all mouse events. This is the same as the StandaloneInputModule version except that
/// it takes MouseState as a parameter, allowing it to be used for both Gaze and Mouse
/// pointerss.
/// </summary>
private void ProcessMouseEvent(MouseState mouseData)
{
var pressed = mouseData.AnyPressesThisFrame();
var released = mouseData.AnyReleasesThisFrame();
var leftButtonData = mouseData.GetButtonState(PointerEventData.InputButton.Left).eventData;
if (!UseMouse(pressed, released, leftButtonData.buttonData))
return;
// Process the first mouse button fully
ProcessMousePress(leftButtonData);
ProcessMove(leftButtonData.buttonData);
ProcessDrag(leftButtonData.buttonData);
// Now process right / middle clicks
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData.buttonData);
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData.buttonData);
if (!Mathf.Approximately(leftButtonData.buttonData.scrollDelta.sqrMagnitude, 0.0f))
{
var scrollHandler = ExecuteEvents.GetEventHandler<IScrollHandler>(leftButtonData.buttonData.pointerCurrentRaycast.gameObject);
ExecuteEvents.ExecuteHierarchy(scrollHandler, leftButtonData.buttonData, ExecuteEvents.scrollHandler);
}
}
/// <summary>
/// Process this InputModule. Same as the StandaloneInputModule version, except that it calls
/// ProcessMouseEvent twice, once for gaze pointers, and once for mouse pointers.
/// </summary>
public override void Process()
{
bool usedEvent = SendUpdateEventToSelectedObject();
if (eventSystem.sendNavigationEvents)
{
if (!usedEvent)
usedEvent |= SendMoveEventToSelectedObject();
if (!usedEvent)
SendSubmitEventToSelectedObject();
}
ProcessMouseEvent(GetGazePointerData());
#if !UNITY_ANDROID
ProcessMouseEvent(GetCanvasPointerData());
#endif
}
/// <summary>
/// Decide if mouse events need to be processed this frame. Same as StandloneInputModule except
/// that the IsPointerMoving method from this class is used, instead of the method on PointerEventData
/// </summary>
private static bool UseMouse(bool pressed, bool released, PointerEventData pointerData)
{
if (pressed || released || IsPointerMoving(pointerData) || pointerData.IsScrolling())
return true;
return false;
}
#endregion
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected void CopyFromTo(OVRPointerEventData @from, OVRPointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
@to.worldSpaceRay = @from.worldSpaceRay;
}
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected new void CopyFromTo(PointerEventData @from, PointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
}
// In the following region we extend the PointerEventData system implemented in PointerInputModule
// We define an additional dictionary for ray(e.g. gaze) based pointers. Mouse pointers still use the dictionary
// in PointerInputModule
#region PointerEventData pool
// Pool for OVRRayPointerEventData for ray based pointers
protected Dictionary<int, OVRPointerEventData> m_VRRayPointerData = new Dictionary<int, OVRPointerEventData>();
protected bool GetPointerData(int id, out OVRPointerEventData data, bool create)
{
if (!m_VRRayPointerData.TryGetValue(id, out data) && create)
{
data = new OVRPointerEventData(eventSystem)
{
pointerId = id,
};
m_VRRayPointerData.Add(id, data);
return true;
}
return false;
}
/// <summary>
/// Clear pointer state for both types of pointer
/// </summary>
protected new void ClearSelection()
{
var baseEventData = GetBaseEventData();
foreach (var pointer in m_PointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
foreach (var pointer in m_VRRayPointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
m_PointerData.Clear();
eventSystem.SetSelectedGameObject(null, baseEventData);
}
#endregion
/// <summary>
/// For RectTransform, calculate it's normal in world space
/// </summary>
static Vector3 GetRectTransformNormal(RectTransform rectTransform)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
rectTransform.GetWorldCorners(corners);
return Vector3.Cross(BottomEdge, LeftEdge).normalized;
}
private readonly MouseState m_MouseState = new MouseState();
// The following 2 functions are equivalent to PointerInputModule.GetMousePointerEventData but are customized to
// get data for ray pointers and canvas mouse pointers.
/// <summary>
/// State for a pointer controlled by a world space ray. E.g. gaze pointer
/// </summary>
/// <returns></returns>
virtual protected MouseState GetGazePointerData()
{
// Get the OVRRayPointerEventData reference
OVRPointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
//Now set the world space ray. This ray is what the user uses to point at UI elements
leftData.worldSpaceRay = new Ray(rayTransform.position, rayTransform.forward);
leftData.scrollDelta = GetExtraScrollDelta();
//Populate some default values
leftData.button = PointerEventData.InputButton.Left;
leftData.useDragThreshold = true;
// Perform raycast to find intersections with world
eventSystem.RaycastAll(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
// We're only interested in intersections from OVRRaycasters
if (ovrRaycaster)
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
leftData.position = ovrRaycaster.GetScreenPosition(raycast);
// Find the world position and normal the Graphic the ray intersected
RectTransform graphicRect = raycast.gameObject.GetComponent<RectTransform>();
if (graphicRect != null)
{
// Set are gaze indicator with this world position and normal
Vector3 worldPos = raycast.worldPosition;
Vector3 normal = GetRectTransformNormal(graphicRect);
OVRGazePointer.instance.SetPosition(worldPos, normal);
// Make sure it's being shown
OVRGazePointer.instance.RequestShow();
}
}
// Now process physical raycast intersections
OVRPhysicsRaycaster physicsRaycaster = raycast.module as OVRPhysicsRaycaster;
if (physicsRaycaster)
{
Vector3 position = raycast.worldPosition;
if (performSphereCastForGazepointer)
{
// Here we cast a sphere into the scene rather than a ray. This gives a more accurate depth
// for positioning a circular gaze pointer
List<RaycastResult> results = new List<RaycastResult>();
physicsRaycaster.Spherecast(leftData, results, OVRGazePointer.instance.GetCurrentRadius());
if (results.Count > 0 && results[0].distance < raycast.distance)
{
position = results[0].worldPosition;
}
}
leftData.position = physicsRaycaster.GetScreenPos(raycast.worldPosition);
// Show the cursor while pointing at an interactable object
OVRGazePointer.instance.RequestShow();
if (matchNormalOnPhysicsColliders)
{
OVRGazePointer.instance.SetPosition(position, raycast.worldNormal);
}
else
{
OVRGazePointer.instance.SetPosition(position);
}
}
// Stick default data values in right and middle slots for compatability
// copy the apropriate data into right and middle slots
OVRPointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
OVRPointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, GetGazeButtonState(), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, PointerEventData.FramePressState.NotChanged, rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, PointerEventData.FramePressState.NotChanged, middleData);
return m_MouseState;
}
/// <summary>
/// Get state for pointer which is a pointer moving in world space across the surface of a world space canvas.
/// </summary>
/// <returns></returns>
protected MouseState GetCanvasPointerData()
{
// Get the OVRRayPointerEventData reference
PointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
// Setup default values here. Set position to zero because we don't actually know the pointer
// positions. Each canvas knows the position of its canvas pointer.
leftData.position = Vector2.zero;
leftData.scrollDelta = Input.mouseScrollDelta;
leftData.button = PointerEventData.InputButton.Left;
if (activeGraphicRaycaster)
{
// Let the active raycaster find intersections on its canvas
activeGraphicRaycaster.RaycastPointer(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
if (ovrRaycaster) // raycast may not actually contain a result
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
Vector2 position = ovrRaycaster.GetScreenPosition(raycast);
leftData.delta = position - leftData.position;
leftData.position = position;
}
}
// copy the apropriate data into right and middle slots
PointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
PointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, StateForMouseButton(0), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, StateForMouseButton(1), rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, StateForMouseButton(2), middleData);
return m_MouseState;
}
/// <summary>
/// New version of ShouldStartDrag implemented first in PointerInputModule. This version differs in that
/// for ray based pointers it makes a decision about whether a drag should start based on the angular change
/// the pointer has made so far, as seen from the camera. This also works when the world space ray is
/// translated rather than rotated, since the beginning and end of the movement are considered as angle from
/// the same point.
/// </summary>
private bool ShouldStartDrag(PointerEventData pointerEvent)
{
if (!pointerEvent.useDragThreshold)
return true;
if (!pointerEvent.IsVRPointer())
{
// Same as original behaviour for canvas based pointers
return (pointerEvent.pressPosition - pointerEvent.position).sqrMagnitude >= eventSystem.pixelDragThreshold * eventSystem.pixelDragThreshold;
}
else
{
#if UNITY_ANDROID && !UNITY_EDITOR // On android allow swiping to start drag
if (useSwipeScroll && ((Vector3)pointerEvent.GetSwipeStart() - Input.mousePosition).magnitude > swipeDragThreshold)
{
return true;
}
#endif
// When it's not a screen space pointer we have to look at the angle it moved rather than the pixels distance
// For gaze based pointing screen-space distance moved will always be near 0
Vector3 cameraPos = pointerEvent.pressEventCamera.transform.position;
Vector3 pressDir = (pointerEvent.pointerPressRaycast.worldPosition - cameraPos).normalized;
Vector3 currentDir = (pointerEvent.pointerCurrentRaycast.worldPosition - cameraPos).normalized;
return Vector3.Dot(pressDir, currentDir) < Mathf.Cos(Mathf.Deg2Rad * (angleDragThreshold));
}
}
/// <summary>
/// The purpose of this function is to allow us to switch between using the standard IsPointerMoving
/// method for mouse driven pointers, but to always return true when it's a ray based pointer.
/// All real-world ray-based input devices are always moving so for simplicity we just return true
/// for them.
///
/// If PointerEventData.IsPointerMoving was virtual we could just override that in
/// OVRRayPointerEventData.
/// </summary>
/// <param name="pointerEvent"></param>
/// <returns></returns>
static bool IsPointerMoving(PointerEventData pointerEvent)
{
if (pointerEvent.IsVRPointer())
return true;
else
return pointerEvent.IsPointerMoving();
}
protected Vector2 SwipeAdjustedPosition(Vector2 originalPosition, PointerEventData pointerEvent)
{
#if UNITY_ANDROID && !UNITY_EDITOR
// On android we use the touchpad position (accessed through Input.mousePosition) to modify
// the effective cursor position for events related to dragging. This allows the user to
// use the touchpad to drag draggable UI elements
if (useSwipeScroll)
{
Vector2 delta = (Vector2)Input.mousePosition - pointerEvent.GetSwipeStart();
if (InvertSwipeXAxis)
delta.x *= -1;
return originalPosition + delta * swipeDragScale;
}
#endif
// If not Gear VR or swipe scroll isn't enabled just return original position
return originalPosition;
}
/// <summary>
/// Exactly the same as the code from PointerInputModule, except that we call our own
/// IsPointerMoving.
///
/// This would also not be necessary if PointerEventData.IsPointerMoving was virtual
/// </summary>
/// <param name="pointerEvent"></param>
protected override void ProcessDrag(PointerEventData pointerEvent)
{
Vector2 originalPosition = pointerEvent.position;
bool moving = IsPointerMoving(pointerEvent);
if (moving && pointerEvent.pointerDrag != null
&& !pointerEvent.dragging
&& ShouldStartDrag(pointerEvent))
{
if (pointerEvent.IsVRPointer())
{
//adjust the position used based on swiping action. Allowing the user to
//drag items by swiping on the GearVR touchpad
pointerEvent.position = SwipeAdjustedPosition (originalPosition, pointerEvent);
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.beginDragHandler);
pointerEvent.dragging = true;
}
// Drag notification
if (pointerEvent.dragging && moving && pointerEvent.pointerDrag != null)
{
if (pointerEvent.IsVRPointer())
{
pointerEvent.position = SwipeAdjustedPosition(originalPosition, pointerEvent);
}
// Before doing drag we should cancel any pointer down state
// And clear selection!
if (pointerEvent.pointerPress != pointerEvent.pointerDrag)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.dragHandler);
}
}
/// <summary>
/// Get state of button corresponding to gaze pointer
/// </summary>
/// <returns></returns>
virtual protected PointerEventData.FramePressState GetGazeButtonState()
{
var pressed = Input.GetKeyDown(gazeClickKey) || OVRInput.GetDown(joyPadClickButton);
var released = Input.GetKeyUp(gazeClickKey) || OVRInput.GetUp(joyPadClickButton);
#if UNITY_ANDROID && !UNITY_EDITOR
// On Gear VR the mouse button events correspond to touch pad events. We only use these as gaze pointer clicks
// on Gear VR because on PC the mouse clicks are used for actual mouse pointer interactions.
pressed |= Input.GetMouseButtonDown(0);
released |= Input.GetMouseButtonUp(0);
#endif
if (pressed && released)
return PointerEventData.FramePressState.PressedAndReleased;
if (pressed)
return PointerEventData.FramePressState.Pressed;
if (released)
return PointerEventData.FramePressState.Released;
return PointerEventData.FramePressState.NotChanged;
}
/// <summary>
/// Get extra scroll delta from gamepad
/// </summary>
protected Vector2 GetExtraScrollDelta()
{
Vector2 scrollDelta = new Vector2();
if (useRightStickScroll)
{
Vector2 s = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(s.x) < rightStickDeadZone) s.x = 0;
if (Mathf.Abs(s.y) < rightStickDeadZone) s.y = 0;
scrollDelta = s;
}
return scrollDelta;
}
};
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8f1a9a1d119a5944aacfb87d1ec283a2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,122 @@
using UnityEngine;
using System;
using System.IO;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRMixedRealityCaptureSettings : ScriptableObject
{
public bool enableMixedReality = false;
public LayerMask extraHiddenLayers;
public OVRManager.CompositionMethod compositionMethod = OVRManager.CompositionMethod.External;
public OVRManager.CameraDevice capturingCameraDevice = OVRManager.CameraDevice.WebCamera0;
public bool flipCameraFrameHorizontally = false;
public bool flipCameraFrameVertically = false;
public float handPoseStateLatency = 0.0f;
public float sandwichCompositionRenderLatency = 0.0f;
public int sandwichCompositionBufferedFrames = 8;
public Color chromaKeyColor = Color.green;
public float chromaKeySimilarity = 0.6f;
public float chromaKeySmoothRange = 0.03f;
public float chromaKeySpillRange = 0.04f;
public bool useDynamicLighting = false;
public OVRManager.DepthQuality depthQuality = OVRManager.DepthQuality.Medium;
public float dynamicLightingSmoothFactor = 8.0f;
public float dynamicLightingDepthVariationClampingValue = 0.001f;
public OVRManager.VirtualGreenScreenType virtualGreenScreenType = OVRManager.VirtualGreenScreenType.Off;
public float virtualGreenScreenTopY;
public float virtualGreenScreenBottomY;
public bool virtualGreenScreenApplyDepthCulling = false;
public float virtualGreenScreenDepthTolerance = 0.2f;
public void ReadFrom(OVRManager manager)
{
enableMixedReality = manager.enableMixedReality;
compositionMethod = manager.compositionMethod;
extraHiddenLayers = manager.extraHiddenLayers;
capturingCameraDevice = manager.capturingCameraDevice;
flipCameraFrameHorizontally = manager.flipCameraFrameHorizontally;
flipCameraFrameVertically = manager.flipCameraFrameVertically;
handPoseStateLatency = manager.handPoseStateLatency;
sandwichCompositionRenderLatency = manager.sandwichCompositionRenderLatency;
sandwichCompositionBufferedFrames = manager.sandwichCompositionBufferedFrames;
chromaKeyColor = manager.chromaKeyColor;
chromaKeySimilarity = manager.chromaKeySimilarity;
chromaKeySmoothRange = manager.chromaKeySmoothRange;
chromaKeySpillRange = manager.chromaKeySpillRange;
useDynamicLighting = manager.useDynamicLighting;
depthQuality = manager.depthQuality;
dynamicLightingSmoothFactor = manager.dynamicLightingSmoothFactor;
dynamicLightingDepthVariationClampingValue = manager.dynamicLightingDepthVariationClampingValue;
virtualGreenScreenType = manager.virtualGreenScreenType;
virtualGreenScreenTopY = manager.virtualGreenScreenTopY;
virtualGreenScreenBottomY = manager.virtualGreenScreenBottomY;
virtualGreenScreenApplyDepthCulling = manager.virtualGreenScreenApplyDepthCulling;
virtualGreenScreenDepthTolerance = manager.virtualGreenScreenDepthTolerance;
}
public void ApplyTo(OVRManager manager)
{
manager.enableMixedReality = enableMixedReality;
manager.compositionMethod = compositionMethod;
manager.extraHiddenLayers = extraHiddenLayers;
manager.capturingCameraDevice = capturingCameraDevice;
manager.flipCameraFrameHorizontally = flipCameraFrameHorizontally;
manager.flipCameraFrameVertically = flipCameraFrameVertically;
manager.handPoseStateLatency = handPoseStateLatency;
manager.sandwichCompositionRenderLatency = sandwichCompositionRenderLatency;
manager.sandwichCompositionBufferedFrames = sandwichCompositionBufferedFrames;
manager.chromaKeyColor = chromaKeyColor;
manager.chromaKeySimilarity = chromaKeySimilarity;
manager.chromaKeySmoothRange = chromaKeySmoothRange;
manager.chromaKeySpillRange = chromaKeySpillRange;
manager.useDynamicLighting = useDynamicLighting;
manager.depthQuality = depthQuality;
manager.dynamicLightingSmoothFactor = dynamicLightingSmoothFactor;
manager.dynamicLightingDepthVariationClampingValue = dynamicLightingDepthVariationClampingValue;
manager.virtualGreenScreenType = virtualGreenScreenType;
manager.virtualGreenScreenTopY = virtualGreenScreenTopY;
manager.virtualGreenScreenBottomY = virtualGreenScreenBottomY;
manager.virtualGreenScreenApplyDepthCulling = virtualGreenScreenApplyDepthCulling;
manager.virtualGreenScreenDepthTolerance = virtualGreenScreenDepthTolerance;
}
const string configFileName = "mrc.config";
public void WriteToConfigurationFile()
{
string text = JsonUtility.ToJson(this, true);
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
Debug.Log("Write OVRMixedRealityCaptureSettings to " + configPath);
File.WriteAllText(configPath, text);
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
public void CombineWithConfigurationFile()
{
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
if (File.Exists(configPath))
{
Debug.Log("MixedRealityCapture configuration file found at " + configPath);
string text = File.ReadAllText(configPath);
Debug.Log("Apply MixedRealityCapture configuration");
JsonUtility.FromJsonOverwrite(text, this);
}
else
{
Debug.Log("MixedRealityCapture configuration file doesn't exist at " + configPath);
}
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 99bbd170d56da4248941de890e6d7af5
timeCreated: 1501004238
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,86 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Logs when the application enters power save mode and allows you to a low-power CPU/GPU level with a button press.
/// </summary>
public class OVRModeParms : MonoBehaviour
{
#region Member Variables
/// <summary>
/// The gamepad button that will switch the application to CPU level 0 and GPU level 1.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.X;
#endregion
/// <summary>
/// Invoke power state mode test.
/// </summary>
void Start()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
// Call TestPowerLevelState after 10 seconds
// and repeats every 10 seconds.
InvokeRepeating ( "TestPowerStateMode", 10, 10.0f );
}
/// <summary>
/// Change default vr mode parms dynamically.
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if ( OVRInput.GetDown(resetButton))
{
//*************************
// Dynamically change VrModeParms cpu and gpu level.
// NOTE: Reset will cause 1 frame of flicker as it leaves
// and re-enters Vr mode.
//*************************
OVRPlugin.cpuLevel = 0;
OVRPlugin.gpuLevel = 1;
}
}
/// <summary>
/// Check current power state mode.
/// </summary>
void TestPowerStateMode()
{
//*************************
// Check power-level state mode
//*************************
if (OVRPlugin.powerSaving)
{
// The device has been throttled
Debug.Log("POWER SAVE MODE ACTIVATED");
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6a6ae8e8def81df429a8fdfc00f63e5c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,53 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle monoscopic rendering with a gamepad button press.
/// </summary>
public class OVRMonoscopic : MonoBehaviour
{
/// <summary>
/// The gamepad button that will toggle monoscopic rendering.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.B;
private bool monoscopic = false;
/// <summary>
/// Check input and toggle monoscopic rendering mode if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle monoscopic rendering mode
//*************************
monoscopic = !monoscopic;
OVRManager.instance.monoscopic = monoscopic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 06ef2a389c534554c848533f88dbb32c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,185 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Simple event system using physics raycasts. Very closely based on UnityEngine.EventSystems.PhysicsRaycaster
/// </summary>
[RequireComponent(typeof(OVRCameraRig))]
public class OVRPhysicsRaycaster : BaseRaycaster
{
/// <summary>
/// Const to use for clarity when no event mask is set
/// </summary>
protected const int kNoEventMaskSet = -1;
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
[SerializeField]
protected LayerMask m_EventMask = kNoEventMaskSet;
protected OVRPhysicsRaycaster()
{ }
public override Camera eventCamera
{
get
{
return GetComponent<OVRCameraRig>().leftEyeCamera;
}
}
/// <summary>
/// Depth used to determine the order of event processing.
/// </summary>
public virtual int depth
{
get { return (eventCamera != null) ? (int)eventCamera.depth : 0xFFFFFF; }
}
public int sortOrder = 20;
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
/// <summary>
/// Event mask used to determine which objects will receive events.
/// </summary>
public int finalEventMask
{
get { return (eventCamera != null) ? eventCamera.cullingMask & m_EventMask : kNoEventMaskSet; }
}
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
public LayerMask eventMask
{
get { return m_EventMask; }
set { m_EventMask = value; }
}
/// <summary>
/// Perform a raycast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
// This function is closely based on PhysicsRaycaster.Raycast
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.RaycastAll(ray, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Perform a Spherecast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
/// <param name="radius">Radius of the sphere</param>
public void Spherecast(PointerEventData eventData, List<RaycastResult> resultAppendList, float radius)
{
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.SphereCastAll(ray, radius, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Get screen position of this world position as seen by the event camera of this OVRPhysicsRaycaster
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPos(Vector3 worldPosition)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(worldPosition);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f8e7ff1cdf4c4e74db00c3684108bc9a
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,593 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// Controls the player's movement in virtual reality.
/// </summary>
[RequireComponent(typeof(CharacterController))]
public class OVRPlayerController : MonoBehaviour
{
/// <summary>
/// The rate acceleration during movement.
/// </summary>
public float Acceleration = 0.1f;
/// <summary>
/// The rate of damping on movement.
/// </summary>
public float Damping = 0.3f;
/// <summary>
/// The rate of additional damping when moving sideways or backwards.
/// </summary>
public float BackAndSideDampen = 0.5f;
/// <summary>
/// The force applied to the character when jumping.
/// </summary>
public float JumpForce = 0.3f;
/// <summary>
/// The rate of rotation when using a gamepad.
/// </summary>
public float RotationAmount = 1.5f;
/// <summary>
/// The rate of rotation when using the keyboard.
/// </summary>
public float RotationRatchet = 45.0f;
/// <summary>
/// The player will rotate in fixed steps if Snap Rotation is enabled.
/// </summary>
[Tooltip("The player will rotate in fixed steps if Snap Rotation is enabled.")]
public bool SnapRotation = true;
/// <summary>
/// How many fixed speeds to use with linear movement? 0=linear control
/// </summary>
[Tooltip("How many fixed speeds to use with linear movement? 0=linear control")]
public int FixedSpeedSteps;
/// <summary>
/// If true, reset the initial yaw of the player controller when the Hmd pose is recentered.
/// </summary>
public bool HmdResetsY = true;
/// <summary>
/// If true, tracking data from a child OVRCameraRig will update the direction of movement.
/// </summary>
public bool HmdRotatesY = true;
/// <summary>
/// Modifies the strength of gravity.
/// </summary>
public float GravityModifier = 0.379f;
/// <summary>
/// If true, each OVRPlayerController will use the player's physical height.
/// </summary>
public bool useProfileData = true;
/// <summary>
/// The CameraHeight is the actual height of the HMD and can be used to adjust the height of the character controller, which will affect the
/// ability of the character to move into areas with a low ceiling.
/// </summary>
[NonSerialized]
public float CameraHeight;
/// <summary>
/// This event is raised after the character controller is moved. This is used by the OVRAvatarLocomotion script to keep the avatar transform synchronized
/// with the OVRPlayerController.
/// </summary>
public event Action<Transform> TransformUpdated;
/// <summary>
/// This bool is set to true whenever the player controller has been teleported. It is reset after every frame. Some systems, such as
/// CharacterCameraConstraint, test this boolean in order to disable logic that moves the character controller immediately
/// following the teleport.
/// </summary>
[NonSerialized] // This doesn't need to be visible in the inspector.
public bool Teleported;
/// <summary>
/// This event is raised immediately after the camera transform has been updated, but before movement is updated.
/// </summary>
public event Action CameraUpdated;
/// <summary>
/// This event is raised right before the character controller is actually moved in order to provide other systems the opportunity to
/// move the character controller in response to things other than user input, such as movement of the HMD. See CharacterCameraConstraint.cs
/// for an example of this.
/// </summary>
public event Action PreCharacterMove;
/// <summary>
/// When true, user input will be applied to linear movement. Set this to false whenever the player controller needs to ignore input for
/// linear movement.
/// </summary>
public bool EnableLinearMovement = true;
/// <summary>
/// When true, user input will be applied to rotation. Set this to false whenever the player controller needs to ignore input for rotation.
/// </summary>
public bool EnableRotation = true;
protected CharacterController Controller = null;
protected OVRCameraRig CameraRig = null;
private float MoveScale = 1.0f;
private Vector3 MoveThrottle = Vector3.zero;
private float FallSpeed = 0.0f;
private OVRPose? InitialPose;
public float InitialYRotation { get; private set; }
private float MoveScaleMultiplier = 1.0f;
private float RotationScaleMultiplier = 1.0f;
private bool SkipMouseRotation = true; // It is rare to want to use mouse movement in VR, so ignore the mouse by default.
private bool HaltUpdateMovement = false;
private bool prevHatLeft = false;
private bool prevHatRight = false;
private float SimulationRate = 60f;
private float buttonRotation = 0f;
private bool ReadyToSnapTurn; // Set to true when a snap turn has occurred, code requires one frame of centered thumbstick to enable another snap turn.
void Start()
{
// Add eye-depth as a camera offset from the player controller
var p = CameraRig.transform.localPosition;
p.z = OVRManager.profile.eyeDepth;
CameraRig.transform.localPosition = p;
}
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if(Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if(CameraRigs.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
InitialYRotation = transform.rotation.eulerAngles.y;
}
void OnEnable()
{
OVRManager.display.RecenteredPose += ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors += UpdateTransform;
}
}
void OnDisable()
{
OVRManager.display.RecenteredPose -= ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors -= UpdateTransform;
}
}
void Update()
{
//Use keys to ratchet rotation
if (Input.GetKeyDown(KeyCode.Q))
buttonRotation -= RotationRatchet;
if (Input.GetKeyDown(KeyCode.E))
buttonRotation += RotationRatchet;
}
protected virtual void UpdateController()
{
if (useProfileData)
{
if (InitialPose == null)
{
// Save the initial pose so it can be recovered if useProfileData
// is turned off later.
InitialPose = new OVRPose()
{
position = CameraRig.transform.localPosition,
orientation = CameraRig.transform.localRotation
};
}
var p = CameraRig.transform.localPosition;
if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel)
{
p.y = OVRManager.profile.eyeHeight - (0.5f * Controller.height) + Controller.center.y;
}
else if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.FloorLevel)
{
p.y = - (0.5f * Controller.height) + Controller.center.y;
}
CameraRig.transform.localPosition = p;
}
else if (InitialPose != null)
{
// Return to the initial pose if useProfileData was turned off at runtime
CameraRig.transform.localPosition = InitialPose.Value.position;
CameraRig.transform.localRotation = InitialPose.Value.orientation;
InitialPose = null;
}
CameraHeight = CameraRig.centerEyeAnchor.localPosition.y;
if (CameraUpdated != null)
{
CameraUpdated();
}
UpdateMovement();
Vector3 moveDirection = Vector3.zero;
float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime));
MoveThrottle.x /= motorDamp;
MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
MoveThrottle.z /= motorDamp;
moveDirection += MoveThrottle * SimulationRate * Time.deltaTime;
// Gravity
if (Controller.isGrounded && FallSpeed <= 0)
FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
else
FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime);
moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime;
if (Controller.isGrounded && MoveThrottle.y <= transform.lossyScale.y * 0.001f)
{
// Offset correction for uneven ground
float bumpUpOffset = Mathf.Max(Controller.stepOffset, new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
moveDirection -= bumpUpOffset * Vector3.up;
}
if (PreCharacterMove != null)
{
PreCharacterMove();
Teleported = false;
}
Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1));
// Move contoller
Controller.Move(moveDirection);
Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));
if (predictedXZ != actualXZ)
MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime);
}
public virtual void UpdateMovement()
{
if (HaltUpdateMovement)
return;
if (EnableLinearMovement)
{
bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow);
bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow);
bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow);
bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow);
bool dpad_move = false;
if (OVRInput.Get(OVRInput.Button.DpadUp))
{
moveForward = true;
dpad_move = true;
}
if (OVRInput.Get(OVRInput.Button.DpadDown))
{
moveBack = true;
dpad_move = true;
}
MoveScale = 1.0f;
if ((moveForward && moveLeft) || (moveForward && moveRight) ||
(moveBack && moveLeft) || (moveBack && moveRight))
MoveScale = 0.70710678f;
// No positional movement if we are in the air
if (!Controller.isGrounded)
MoveScale = 0.0f;
MoveScale *= SimulationRate * Time.deltaTime;
// Compute this for key movement
float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
// Run!
if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift))
moveInfluence *= 2.0f;
Quaternion ort = transform.rotation;
Vector3 ortEuler = ort.eulerAngles;
ortEuler.z = ortEuler.x = 0f;
ort = Quaternion.Euler(ortEuler);
if (moveForward)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward);
if (moveBack)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back);
if (moveLeft)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left);
if (moveRight)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right);
moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
#if !UNITY_ANDROID // LeftTrigger not avail on Android game pad
moveInfluence *= 1.0f + OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
#endif
Vector2 primaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
// If speed quantization is enabled, adjust the input to the number of fixed speed steps.
if (FixedSpeedSteps > 0)
{
primaryAxis.y = Mathf.Round(primaryAxis.y * FixedSpeedSteps) / FixedSpeedSteps;
primaryAxis.x = Mathf.Round(primaryAxis.x * FixedSpeedSteps) / FixedSpeedSteps;
}
if (primaryAxis.y > 0.0f)
MoveThrottle += ort * (primaryAxis.y * transform.lossyScale.z * moveInfluence * Vector3.forward);
if (primaryAxis.y < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.y) * transform.lossyScale.z * moveInfluence *
BackAndSideDampen * Vector3.back);
if (primaryAxis.x < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.x) * transform.lossyScale.x * moveInfluence *
BackAndSideDampen * Vector3.left);
if (primaryAxis.x > 0.0f)
MoveThrottle += ort * (primaryAxis.x * transform.lossyScale.x * moveInfluence * BackAndSideDampen *
Vector3.right);
}
if (EnableRotation)
{
Vector3 euler = transform.rotation.eulerAngles;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
bool curHatLeft = OVRInput.Get(OVRInput.Button.PrimaryShoulder);
if (curHatLeft && !prevHatLeft)
euler.y -= RotationRatchet;
prevHatLeft = curHatLeft;
bool curHatRight = OVRInput.Get(OVRInput.Button.SecondaryShoulder);
if (curHatRight && !prevHatRight)
euler.y += RotationRatchet;
prevHatRight = curHatRight;
euler.y += buttonRotation;
buttonRotation = 0f;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!SkipMouseRotation)
euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f;
#endif
if (SnapRotation)
{
if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickLeft))
{
if (ReadyToSnapTurn)
{
euler.y -= RotationRatchet;
ReadyToSnapTurn = false;
}
}
else if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickRight))
{
if (ReadyToSnapTurn)
{
euler.y += RotationRatchet;
ReadyToSnapTurn = false;
}
}
else
{
ReadyToSnapTurn = true;
}
}
else
{
Vector2 secondaryAxis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
euler.y += secondaryAxis.x * rotateInfluence;
}
transform.rotation = Quaternion.Euler(euler);
}
}
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY && !Teleported)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
UpdateController();
if (TransformUpdated != null)
{
TransformUpdated(root);
}
}
/// <summary>
/// Jump! Must be enabled manually.
/// </summary>
public bool Jump()
{
if (!Controller.isGrounded)
return false;
MoveThrottle += new Vector3(0, transform.lossyScale.y * JumpForce, 0);
return true;
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop()
{
Controller.Move(Vector3.zero);
MoveThrottle = Vector3.zero;
FallSpeed = 0.0f;
}
/// <summary>
/// Gets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void GetMoveScaleMultiplier(ref float moveScaleMultiplier)
{
moveScaleMultiplier = MoveScaleMultiplier;
}
/// <summary>
/// Sets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void SetMoveScaleMultiplier(float moveScaleMultiplier)
{
MoveScaleMultiplier = moveScaleMultiplier;
}
/// <summary>
/// Gets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier)
{
rotationScaleMultiplier = RotationScaleMultiplier;
}
/// <summary>
/// Sets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void SetRotationScaleMultiplier(float rotationScaleMultiplier)
{
RotationScaleMultiplier = rotationScaleMultiplier;
}
/// <summary>
/// Gets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">Allow mouse rotation.</param>
public void GetSkipMouseRotation(ref bool skipMouseRotation)
{
skipMouseRotation = SkipMouseRotation;
}
/// <summary>
/// Sets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param>
public void SetSkipMouseRotation(bool skipMouseRotation)
{
SkipMouseRotation = skipMouseRotation;
}
/// <summary>
/// Gets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">Halt update movement.</param>
public void GetHaltUpdateMovement(ref bool haltUpdateMovement)
{
haltUpdateMovement = HaltUpdateMovement;
}
/// <summary>
/// Sets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param>
public void SetHaltUpdateMovement(bool haltUpdateMovement)
{
HaltUpdateMovement = haltUpdateMovement;
}
/// <summary>
/// Resets the player look rotation when the device orientation is reset.
/// </summary>
public void ResetOrientation()
{
if (HmdResetsY && !HmdRotatesY)
{
Vector3 euler = transform.rotation.eulerAngles;
euler.y = InitialYRotation;
transform.rotation = Quaternion.Euler(euler);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0950df82e7936c84983497630bde5b54
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,99 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Text;
using UnityEngine;
using UnityEngine.Assertions;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Extension of Unity's PointerEventData to support ray based pointing and also touchpad swiping
/// </summary>
public class OVRPointerEventData : PointerEventData
{
public OVRPointerEventData(EventSystem eventSystem)
: base(eventSystem)
{
}
public Ray worldSpaceRay;
public Vector2 swipeStart;
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine("<b>Position</b>: " + position);
sb.AppendLine("<b>delta</b>: " + delta);
sb.AppendLine("<b>eligibleForClick</b>: " + eligibleForClick);
sb.AppendLine("<b>pointerEnter</b>: " + pointerEnter);
sb.AppendLine("<b>pointerPress</b>: " + pointerPress);
sb.AppendLine("<b>lastPointerPress</b>: " + lastPress);
sb.AppendLine("<b>pointerDrag</b>: " + pointerDrag);
sb.AppendLine("<b>worldSpaceRay</b>: " + worldSpaceRay);
sb.AppendLine("<b>swipeStart</b>: " + swipeStart);
sb.AppendLine("<b>Use Drag Threshold</b>: " + useDragThreshold);
return sb.ToString();
}
}
/// <summary>
/// Static helpers for OVRPointerEventData.
/// </summary>
public static class PointerEventDataExtension
{
public static bool IsVRPointer(this PointerEventData pointerEventData)
{
return (pointerEventData is OVRPointerEventData);
}
public static Ray GetRay(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.worldSpaceRay;
}
public static Vector2 GetSwipeStart(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.swipeStart;
}
public static void SetSwipeStart(this PointerEventData pointerEventData, Vector2 start)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
vrPointerEventData.swipeStart = start;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 646c937ce12610744adc2b5e487f77ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,163 @@
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using Assets.OVR.Scripts;
public class OVRProfiler : EditorWindow
{
enum TargetPlatform
{
OculusGo,
GearVR,
SantaCruz,
OculusRift
};
private static List<RangedRecord> mRecords = new List<RangedRecord>();
private Vector2 mScrollPosition;
static private TargetPlatform mTargetPlatform;
[MenuItem("Tools/Oculus/OVR Profiler")]
static void Init()
{
// Get existing open window or if none, make a new one:
EditorWindow.GetWindow(typeof(OVRProfiler));
#if UNITY_ANDROID
mTargetPlatform = TargetPlatform.OculusGo;
#else
mTargetPlatform = TargetPlatform.OculusRift;
#endif
}
void OnGUI()
{
GUILayout.Label("OVR Profiler", EditorStyles.boldLabel);
string[] options = new string[]
{
"Oculus Go", "Gear VR", "Santa Cruz", "Oculus Rift",
};
mTargetPlatform = (TargetPlatform)EditorGUILayout.Popup("Target Oculus Platform", (int)mTargetPlatform, options);
if (EditorApplication.isPlaying)
{
UpdateRecords();
DrawResults();
}
else
{
ShowCenterAlignedMessageLabel("Click Run in Unity to view stats.");
}
}
void OnInspectorUpdate()
{
Repaint();
}
void DrawResults()
{
string lastCategory = "";
mScrollPosition = EditorGUILayout.BeginScrollView(mScrollPosition);
foreach (RangedRecord record in mRecords)
{
// Add separator and label for new category
if (!record.category.Equals(lastCategory))
{
lastCategory = record.category;
EditorGUILayout.Separator();
EditorGUILayout.BeginHorizontal();
GUILayout.Label(lastCategory, EditorStyles.label, GUILayout.Width(200));
EditorGUILayout.EndHorizontal();
}
// Draw records
EditorGUILayout.BeginHorizontal();
Rect r = EditorGUILayout.BeginVertical();
EditorGUI.ProgressBar(r, record.value / (record.max * 2), record.category + " " + record.value.ToString());
GUILayout.Space(16);
EditorGUILayout.EndVertical();
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
GUILayout.Label(record.message);
EditorGUILayout.EndHorizontal();
GUI.enabled = true;
}
EditorGUILayout.EndScrollView();
}
private void UpdateRecords()
{
mRecords.Clear();
if (mTargetPlatform == TargetPlatform.OculusRift)
{
AddRecord("Client Frame CPU Time (ms)", "", UnityStats.frameTime * 1000, 0, 11);
AddRecord("Render Frame CPU Time (ms)", "", UnityStats.renderTime * 1000, 0, 11);
}
else
{
// Graphics memory
long memSizeByte = UnityStats.usedTextureMemorySize + UnityStats.vboTotalBytes;
AddRecord("Graphics Memory (MB)", "Please use less than 1024 MB of vertex and texture memory.", ConvertBytes(memSizeByte, "MB"), 0, 1024);
}
float triVertRec = mTargetPlatform == TargetPlatform.OculusRift ? 1000000 : 100000;
// Triangle count
AddRecord("Triangles", "Please use less than 100000 triangles.", UnityStats.triangles, 0, triVertRec);
// Vertices count
AddRecord("Vertices", "Please use less than 100000 vertices.", UnityStats.vertices, 0, triVertRec);
float dcRec = mTargetPlatform == TargetPlatform.OculusRift ? 1000 : 100;
// Draw call count
AddRecord("Draw Call", "Please use less than 100 draw calls.", UnityStats.drawCalls, 0, dcRec);
}
private string FormatBytes(long bytes, string target)
{
return System.String.Format("{0:0.##} {1}", ConvertBytes(bytes, target), target);
}
private float ConvertBytes(long bytes, string target)
{
string[] Suffix = { "B", "KB", "MB", "GB", "TB" };
int i;
double dblSByte = bytes;
for (i = 0; i < Suffix.Length; i++, bytes /= 1024)
{
if (Suffix[i] == target)
return (float)dblSByte;
dblSByte = bytes / 1024.0;
}
return 0;
}
private void ShowCenterAlignedMessageLabel(string message)
{
GUILayout.BeginVertical();
GUILayout.FlexibleSpace();
GUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
GUILayout.Label(message, EditorStyles.boldLabel);
GUILayout.FlexibleSpace();
GUILayout.EndHorizontal();
GUILayout.FlexibleSpace();
GUILayout.EndVertical();
}
private void AddRecord(string category, string message, float value, float min, float max)
{
RangedRecord record = new RangedRecord(category, message, value, min, max);
mRecords.Add(record);
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3303d4232ee59ac40a9fdc223870fbbc
timeCreated: 1520636357
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
/// <summary>
/// Visualizes progress for operations such as loading.
/// </summary>
public class OVRProgressIndicator : MonoBehaviour
{
public MeshRenderer progressImage;
[Range(0, 1)]
public float currentProgress = 0.7f;
void Awake()
{
progressImage.sortingOrder = 150;
}
// Update is called once per frame
void Update()
{
progressImage.sharedMaterial.SetFloat("_AlphaCutoff", 1-currentProgress);
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f14ece5575e2b1e4d80619901d65b428
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,320 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
using UnityEngine.Serialization;
/// <summary>
/// Extension of GraphicRaycaster to support ray casting with world space rays instead of just screen-space
/// pointer positions
/// </summary>
[RequireComponent(typeof(Canvas))]
public class OVRRaycaster : GraphicRaycaster, IPointerEnterHandler
{
[Tooltip("A world space pointer for this canvas")]
public GameObject pointer;
public int sortOrder = 0;
protected OVRRaycaster()
{ }
[NonSerialized]
private Canvas m_Canvas;
private Canvas canvas
{
get
{
if (m_Canvas != null)
return m_Canvas;
m_Canvas = GetComponent<Canvas>();
return m_Canvas;
}
}
public override Camera eventCamera
{
get
{
return canvas.worldCamera;
}
}
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
/// <summary>
/// For the given ray, find graphics on this canvas which it intersects and are not blocked by other
/// world objects
/// </summary>
[NonSerialized]
private List<RaycastHit> m_RaycastResults = new List<RaycastHit>();
private void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList, Ray ray, bool checkForBlocking)
{
//This function is closely based on
//void GraphicRaycaster.Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
if (canvas == null)
return;
float hitDistance = float.MaxValue;
if (checkForBlocking && blockingObjects != BlockingObjects.None)
{
float dist = eventCamera.farClipPlane;
if (blockingObjects == BlockingObjects.ThreeD || blockingObjects == BlockingObjects.All)
{
var hits = Physics.RaycastAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].distance < hitDistance)
{
hitDistance = hits[0].distance;
}
}
if (blockingObjects == BlockingObjects.TwoD || blockingObjects == BlockingObjects.All)
{
var hits = Physics2D.GetRayIntersectionAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].fraction * dist < hitDistance)
{
hitDistance = hits[0].fraction * dist;
}
}
}
m_RaycastResults.Clear();
GraphicRaycast(canvas, ray, m_RaycastResults);
for (var index = 0; index < m_RaycastResults.Count; index++)
{
var go = m_RaycastResults[index].graphic.gameObject;
bool appendGraphic = true;
if (ignoreReversedGraphics)
{
// If we have a camera compare the direction against the cameras forward.
var cameraFoward = ray.direction;
var dir = go.transform.rotation * Vector3.forward;
appendGraphic = Vector3.Dot(cameraFoward, dir) > 0;
}
// Ignore points behind us (can happen with a canvas pointer)
if (eventCamera.transform.InverseTransformPoint(m_RaycastResults[index].worldPos).z <= 0)
{
appendGraphic = false;
}
if (appendGraphic)
{
float distance = Vector3.Distance(ray.origin, m_RaycastResults[index].worldPos);
if (distance >= hitDistance)
{
continue;
}
var castResult = new RaycastResult
{
gameObject = go,
module = this,
distance = distance,
index = resultAppendList.Count,
depth = m_RaycastResults[index].graphic.depth,
worldPosition = m_RaycastResults[index].worldPos
};
resultAppendList.Add(castResult);
}
}
}
/// <summary>
/// Performs a raycast using eventData.worldSpaceRay
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (eventData.IsVRPointer())
{
Raycast(eventData, resultAppendList, eventData.GetRay(), true);
}
}
/// <summary>
/// Performs a raycast using the pointer object attached to this OVRRaycaster
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public void RaycastPointer(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (pointer != null && pointer.activeInHierarchy)
{
Raycast(eventData, resultAppendList, new Ray(eventCamera.transform.position, (pointer.transform.position - eventCamera.transform.position).normalized), false);
}
}
/// <summary>
/// Perform a raycast into the screen and collect all graphics underneath it.
/// </summary>
[NonSerialized]
static readonly List<RaycastHit> s_SortedGraphics = new List<RaycastHit>();
private void GraphicRaycast(Canvas canvas, Ray ray, List<RaycastHit> results)
{
//This function is based closely on :
// void GraphicRaycaster.Raycast(Canvas canvas, Camera eventCamera, Vector2 pointerPosition, List<Graphic> results)
// But modified to take a Ray instead of a canvas pointer, and also to explicitly ignore
// the graphic associated with the pointer
// Necessary for the event system
var foundGraphics = GraphicRegistry.GetGraphicsForCanvas(canvas);
s_SortedGraphics.Clear();
for (int i = 0; i < foundGraphics.Count; ++i)
{
Graphic graphic = foundGraphics[i];
// -1 means it hasn't been processed by the canvas, which means it isn't actually drawn
if (graphic.depth == -1 || (pointer == graphic.gameObject))
continue;
Vector3 worldPos;
if (RayIntersectsRectTransform(graphic.rectTransform, ray, out worldPos))
{
//Work out where this is on the screen for compatibility with existing Unity UI code
Vector2 screenPos = eventCamera.WorldToScreenPoint(worldPos);
// mask/image intersection - See Unity docs on eventAlphaThreshold for when this does anything
if (graphic.Raycast(screenPos, eventCamera))
{
RaycastHit hit;
hit.graphic = graphic;
hit.worldPos = worldPos;
hit.fromMouse = false;
s_SortedGraphics.Add(hit);
}
}
}
s_SortedGraphics.Sort((g1, g2) => g2.graphic.depth.CompareTo(g1.graphic.depth));
for (int i = 0; i < s_SortedGraphics.Count; ++i)
{
results.Add(s_SortedGraphics[i]);
}
}
/// <summary>
/// Get screen position of worldPosition contained in this RaycastResult
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPosition(RaycastResult raycastResult)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(raycastResult.worldPosition);
}
/// <summary>
/// Detects whether a ray intersects a RectTransform and if it does also
/// returns the world position of the intersection.
/// </summary>
/// <param name="rectTransform"></param>
/// <param name="ray"></param>
/// <param name="worldPos"></param>
/// <returns></returns>
static bool RayIntersectsRectTransform(RectTransform rectTransform, Ray ray, out Vector3 worldPos)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Plane plane = new Plane(corners[0], corners[1], corners[2]);
float enter;
if (!plane.Raycast(ray, out enter))
{
worldPos = Vector3.zero;
return false;
}
Vector3 intersection = ray.GetPoint(enter);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
float BottomDot = Vector3.Dot(intersection - corners[0], BottomEdge);
float LeftDot = Vector3.Dot(intersection - corners[0], LeftEdge);
if (BottomDot < BottomEdge.sqrMagnitude && // Can use sqrMag because BottomEdge is not normalized
LeftDot < LeftEdge.sqrMagnitude &&
BottomDot >= 0 &&
LeftDot >= 0)
{
worldPos = corners[0] + LeftDot * LeftEdge / LeftEdge.sqrMagnitude + BottomDot * BottomEdge / BottomEdge.sqrMagnitude;
return true;
}
else
{
worldPos = Vector3.zero;
return false;
}
}
struct RaycastHit
{
public Graphic graphic;
public Vector3 worldPos;
public bool fromMouse;
};
/// <summary>
/// Is this the currently focussed Raycaster according to the InputModule
/// </summary>
/// <returns></returns>
public bool IsFocussed()
{
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
return inputModule && inputModule.activeGraphicRaycaster == this;
}
public void OnPointerEnter(PointerEventData e)
{
if (e.IsVRPointer())
{
// Gaze has entered this canvas. We'll make it the active one so that canvas-mouse pointer can be used.
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
inputModule.activeGraphicRaycaster = this;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7aaf960227867044282d921171d2d7ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,51 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Assets.OVR.Scripts
{
public class Record
{
public string category;
public string message;
public Record(string cat, string msg)
{
category = cat;
message = msg;
}
}
public class RangedRecord : Record
{
public float value;
public float min;
public float max;
public RangedRecord(string cat, string msg, float val, float minVal, float maxVal)
: base(cat, msg)
{
value = val;
min = minVal;
max = maxVal;
}
}
public delegate void FixMethodDelegate(UnityEngine.Object obj, bool isLastInSet, int selectedIndex);
public class FixRecord : Record
{
public FixMethodDelegate fixMethod;
public UnityEngine.Object targetObject;
public string[] buttonNames;
public bool complete;
public FixRecord(string cat, string msg, FixMethodDelegate fix, UnityEngine.Object target, string[] buttons)
: base(cat, msg)
{
buttonNames = buttons;
fixMethod = fix;
targetObject = target;
complete = false;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 63f0fe0d60ddeb54f9f43d701286af2d
timeCreated: 1520636357
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to reset VR input tracking with a gamepad button press.
/// </summary>
public class OVRResetOrientation : MonoBehaviour
{
/// <summary>
/// The gamepad button that will reset VR input tracking.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.Y;
/// <summary>
/// Check input and reset orientation if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(resetButton))
{
//*************************
// reset orientation
//*************************
OVRManager.display.RecenterPose();
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 09bb0a17b6a704298b65be4fb08ef480
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,218 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Sample that allows you to play with various VR settings.
/// </summary>
public class OVRSceneSampleController : MonoBehaviour
{
/// <summary>
/// The key that quits the application.
/// </summary>
public KeyCode quitKey = KeyCode.Escape;
/// <summary>
/// An optional texture that appears before the menu fades in.
/// </summary>
public Texture fadeInTexture = null;
/// <summary>
/// Controls how quickly the player's speed and rotation change based on input.
/// </summary>
public float speedRotationIncrement = 0.05f;
private OVRPlayerController playerController = null;
// Handle to OVRCameraRig
private OVRCameraRig cameraController = null;
/// <summary>
/// We can set the layer to be anything we want to, this allows
/// a specific camera to render it.
/// </summary>
public string layerName = "Default";
// Vision mode on/off
private bool visionMode = true;
// We want to hold onto GridCube, for potential sharing
// of the menu RenderTarget
OVRGridCube gridCube = null;
#if SHOW_DK2_VARIABLES
private string strVisionMode = "Vision Enabled: ON";
#endif
#region MonoBehaviour Message Handlers
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// Make sure to hide cursor
if (Application.isEditor == false)
{
Cursor.visible = false;
Cursor.lockState = CursorLockMode.Locked;
}
// CameraController updates
if (cameraController != null)
{
// Add a GridCube component to this object
gridCube = gameObject.AddComponent<OVRGridCube>();
gridCube.SetOVRCameraController(ref cameraController);
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
// Recenter pose
UpdateRecenterPose();
// Turn On/Off Vision Mode
UpdateVisionMode();
// Update Speed and Rotation Scale
if (playerController != null)
UpdateSpeedAndRotationScaleMultiplier();
// Toggle Fullscreen
if (Input.GetKeyDown(KeyCode.F11))
Screen.fullScreen = !Screen.fullScreen;
if (Input.GetKeyDown(KeyCode.M))
#if UNITY_2017_2_OR_NEWER
UnityEngine.XR.XRSettings.showDeviceView = !UnityEngine.XR.XRSettings.showDeviceView;
#else
UnityEngine.VR.VRSettings.showDeviceView = !UnityEngine.VR.VRSettings.showDeviceView;
#endif
#if !UNITY_ANDROID || UNITY_EDITOR
// Escape Application
if (Input.GetKeyDown(quitKey))
Application.Quit();
#endif
}
#endregion
/// <summary>
/// Updates the vision mode.
/// </summary>
void UpdateVisionMode()
{
if (Input.GetKeyDown(KeyCode.F2))
{
visionMode ^= visionMode;
OVRManager.tracker.isEnabled = visionMode;
}
}
/// <summary>
/// Updates the speed and rotation scale multiplier.
/// </summary>
void UpdateSpeedAndRotationScaleMultiplier()
{
float moveScaleMultiplier = 0.0f;
playerController.GetMoveScaleMultiplier(ref moveScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha7))
{
moveScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha8))
{
moveScaleMultiplier += speedRotationIncrement;
}
playerController.SetMoveScaleMultiplier(moveScaleMultiplier);
float rotationScaleMultiplier = 0.0f;
playerController.GetRotationScaleMultiplier(ref rotationScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha9))
{
rotationScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha0))
{
rotationScaleMultiplier += speedRotationIncrement;
}
playerController.SetRotationScaleMultiplier(rotationScaleMultiplier);
}
/// <summary>
/// Recenter pose
/// </summary>
void UpdateRecenterPose()
{
if (Input.GetKeyDown(KeyCode.R))
OVRManager.display.RecenterPose();
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4f07515ada089df47868559a20dd6783
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,209 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections; // required for Coroutines
/// <summary>
/// Fades the screen from black after a new scene is loaded. Fade can also be controlled mid-scene using SetUIFade and SetFadeLevel
/// </summary>
public class OVRScreenFade : MonoBehaviour
{
[Tooltip("Fade duration")]
public float fadeTime = 2.0f;
[Tooltip("Screen color at maximum fade")]
public Color fadeColor = new Color(0.01f, 0.01f, 0.01f, 1.0f);
public bool fadeOnStart = true;
/// <summary>
/// The render queue used by the fade mesh. Reduce this if you need to render on top of it.
/// </summary>
public int renderQueue = 5000;
private float uiFadeAlpha = 0;
private MeshRenderer fadeRenderer;
private MeshFilter fadeMesh;
private Material fadeMaterial = null;
private bool isFading = false;
public float currentAlpha { get; private set; }
void Awake()
{
// create the fade material
fadeMaterial = new Material(Shader.Find("Oculus/Unlit Transparent Color"));
fadeMesh = gameObject.AddComponent<MeshFilter>();
fadeRenderer = gameObject.AddComponent<MeshRenderer>();
var mesh = new Mesh();
fadeMesh.mesh = mesh;
Vector3[] vertices = new Vector3[4];
float width = 2f;
float height = 2f;
float depth = 1f;
vertices[0] = new Vector3(-width, -height, depth);
vertices[1] = new Vector3(width, -height, depth);
vertices[2] = new Vector3(-width, height, depth);
vertices[3] = new Vector3(width, height, depth);
mesh.vertices = vertices;
int[] tri = new int[6];
tri[0] = 0;
tri[1] = 2;
tri[2] = 1;
tri[3] = 2;
tri[4] = 3;
tri[5] = 1;
mesh.triangles = tri;
Vector3[] normals = new Vector3[4];
normals[0] = -Vector3.forward;
normals[1] = -Vector3.forward;
normals[2] = -Vector3.forward;
normals[3] = -Vector3.forward;
mesh.normals = normals;
Vector2[] uv = new Vector2[4];
uv[0] = new Vector2(0, 0);
uv[1] = new Vector2(1, 0);
uv[2] = new Vector2(0, 1);
uv[3] = new Vector2(1, 1);
mesh.uv = uv;
SetFadeLevel(0);
}
/// <summary>
/// Start a fade out
/// </summary>
public void FadeOut()
{
StartCoroutine(Fade(0,1));
}
/// <summary>
/// Starts a fade in when a new level is loaded
/// </summary>
void OnLevelFinishedLoading(int level)
{
StartCoroutine(Fade(1,0));
}
/// <summary>
/// Automatically starts a fade in
/// </summary>
void Start()
{
if (fadeOnStart)
{
StartCoroutine(Fade(1,0));
}
}
void OnEnable()
{
if (!fadeOnStart)
{
SetFadeLevel(0);
}
}
/// <summary>
/// Cleans up the fade material
/// </summary>
void OnDestroy()
{
if (fadeRenderer != null)
Destroy(fadeRenderer);
if (fadeMaterial != null)
Destroy(fadeMaterial);
if (fadeMesh != null)
Destroy(fadeMesh);
}
/// <summary>
/// Set the UI fade level - fade due to UI in foreground
/// </summary>
public void SetUIFade(float level)
{
uiFadeAlpha = Mathf.Clamp01(level);
SetMaterialAlpha();
}
/// <summary>
/// Override current fade level
/// </summary>
/// <param name="level"></param>
public void SetFadeLevel(float level)
{
currentAlpha = level;
SetMaterialAlpha();
}
/// <summary>
/// Fades alpha from 1.0 to 0.0
/// </summary>
IEnumerator Fade(float startAlpha, float endAlpha)
{
float elapsedTime = 0.0f;
while (elapsedTime < fadeTime)
{
elapsedTime += Time.deltaTime;
currentAlpha = Mathf.Lerp(startAlpha, endAlpha, Mathf.Clamp01(elapsedTime / fadeTime));
SetMaterialAlpha();
yield return new WaitForEndOfFrame();
}
}
/// <summary>
/// Update material alpha. UI fade and the current fade due to fade in/out animations (or explicit control)
/// both affect the fade. (The max is taken)
/// </summary>
private void SetMaterialAlpha()
{
Color color = fadeColor;
color.a = Mathf.Max(currentAlpha, uiFadeAlpha);
isFading = color.a > 0;
if (fadeMaterial != null)
{
fadeMaterial.color = color;
fadeMaterial.renderQueue = renderQueue;
fadeRenderer.material = fadeMaterial;
fadeRenderer.enabled = isFading;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df8e1d778abf442e4bec449c360e9e1c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,73 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Simple helper script that conditionally enables rendering of a controller if it is connected.
/// </summary>
public class OVRTrackedRemote : MonoBehaviour
{
/// <summary>
/// The root GameObject that represents the GearVr Controller model.
/// </summary>
public GameObject m_modelGearVrController;
/// <summary>
/// The root GameObject that represents the Oculus Go Controller model.
/// </summary>
public GameObject m_modelOculusGoController;
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
private bool m_isOculusGo;
private bool m_prevControllerConnected = false;
private bool m_prevControllerConnectedCached = false;
void Start()
{
m_isOculusGo = (OVRPlugin.productName == "Oculus Go");
}
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if ((controllerConnected != m_prevControllerConnected) || !m_prevControllerConnectedCached)
{
m_modelOculusGoController.SetActive(controllerConnected && m_isOculusGo);
m_modelGearVrController.SetActive(controllerConnected && !m_isOculusGo);
m_prevControllerConnected = controllerConnected;
m_prevControllerConnectedCached = true;
}
if (!controllerConnected)
{
return;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: aed62bf3ae2456c408f247f96808ce96
timeCreated: 1486166271
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,38 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Rotates this GameObject at a given speed.
/// </summary>
public class OVRWaitCursor : MonoBehaviour
{
public Vector3 rotateSpeeds = new Vector3(0.0f, 0.0f, -60.0f);
/// <summary>
/// Auto rotates the attached cursor.
/// </summary>
void Update()
{
transform.Rotate(rotateSpeeds * Time.smoothDeltaTime);
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: d313011a8bc474fe49260bde01cffcd3
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData: