Added VR libraries

This commit is contained in:
Chris Midkiff
2018-10-08 23:54:11 -04:00
parent d9eb2a9763
commit 7ce1036e39
1037 changed files with 195630 additions and 348 deletions

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 1f92828e69374384b8cb197653871a6e
folderAsset: yes
timeCreated: 1502989983
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,294 @@
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public abstract class OVRCameraComposition : OVRComposition {
protected GameObject cameraFramePlaneObject;
protected float cameraFramePlaneDistance;
protected readonly bool hasCameraDeviceOpened = false;
protected readonly bool useDynamicLighting = false;
internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
private OVRCameraRig cameraRig;
private Mesh boundaryMesh = null;
private float boundaryMeshTopY = 0.0f;
private float boundaryMeshBottomY = 0.0f;
private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
protected OVRCameraComposition(OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
{
cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);
Debug.Assert(!hasCameraDeviceOpened);
Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
hasCameraDeviceOpened = false;
useDynamicLighting = inUseDynamicLighting;
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (useDynamicLighting && !cameraSupportsDepth)
{
Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
}
if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
}
if (useDynamicLighting)
{
OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
if (depthQuality == OVRManager.DepthQuality.Low)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Low;
}
else if (depthQuality == OVRManager.DepthQuality.Medium)
{
quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
}
else if (depthQuality == OVRManager.DepthQuality.High)
{
quality = OVRPlugin.CameraDeviceDepthQuality.High;
}
else
{
Debug.LogWarning("Unknown depth quality");
}
OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
}
OVRPlugin.OpenCameraDevice(cameraDevice);
if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
{
hasCameraDeviceOpened = true;
}
}
}
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
if (hasCameraDeviceOpened)
{
OVRPlugin.CloseCameraDevice(cameraDevice);
}
}
public override void RecenterPose()
{
boundaryMesh = null;
}
protected void CreateCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, bool useDynamicLighting)
{
Debug.Assert(cameraFramePlaneObject == null);
cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
cameraFramePlaneObject.name = "MRCameraFrame";
cameraFramePlaneObject.transform.parent = parentObject.transform;
cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
Material cameraFrameMaterial = new Material(Shader.Find(useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
cameraFrameMaterial.SetColor("_Color", Color.white);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
cameraFramePlaneObject.SetActive(true);
OVRCameraFrameCompositionManager cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
cameraFrameCompositionManager.composition = this;
}
private bool nullcameraRigWarningDisplayed = false;
protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, RenderTexture boundaryMeshMaskTexture)
{
bool hasError = false;
Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
Texture2D colorTexture = Texture2D.blackTexture;
Texture2D depthTexture = Texture2D.whiteTexture;
if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
{
colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: color frame not ready");
hasError = true;
}
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
if (useDynamicLighting && cameraSupportsDepth)
{
if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
{
depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
}
else
{
Debug.LogWarning("Camera: depth frame not ready");
hasError = true;
}
}
if (!hasError)
{
Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
cameraFramePlaneDistance = distance;
cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
float worldHeight = distance * tanFov * 2.0f;
float worldWidth = worldHeight * mixedRealityCamera.aspect;
float cullingDistance = float.MaxValue;
cameraRig = null;
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
{
cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
if (cameraRig != null)
{
if (cameraRig.centerEyeAnchor == null)
{
cameraRig = null;
}
}
RefreshBoundaryMesh(mixedRealityCamera, out cullingDistance);
}
cameraFrameMaterial.mainTexture = colorTexture;
cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
cameraFrameMaterial.SetVector("_FlipParams", new Vector4((OVRManager.instance.flipCameraFrameHorizontally ? 1.0f : 0.0f), (OVRManager.instance.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
cameraFrameMaterial.SetColor("_ChromaKeyColor", OVRManager.instance.chromaKeyColor);
cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", OVRManager.instance.chromaKeySimilarity);
cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", OVRManager.instance.chromaKeySmoothRange);
cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", OVRManager.instance.chromaKeySpillRange);
cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
cameraFrameMaterial.SetFloat("_SmoothFactor", OVRManager.instance.dynamicLightingSmoothFactor);
cameraFrameMaterial.SetFloat("_DepthVariationClamp", OVRManager.instance.dynamicLightingDepthVariationClampingValue);
cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
if (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
{
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (cameraRig == null)
{
if (!nullcameraRigWarningDisplayed)
{
Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
nullcameraRigWarningDisplayed = true;
}
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
}
else
{
if (nullcameraRigWarningDisplayed)
{
Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
nullcameraRigWarningDisplayed = false;
}
cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
}
}
}
}
protected void RefreshBoundaryMesh(Camera camera, out float cullingDistance)
{
float depthTolerance = OVRManager.instance.virtualGreenScreenApplyDepthCulling ? OVRManager.instance.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType)) + depthTolerance;
if (boundaryMesh == null || boundaryMeshType != OVRManager.instance.virtualGreenScreenType || boundaryMeshTopY != OVRManager.instance.virtualGreenScreenTopY || boundaryMeshBottomY != OVRManager.instance.virtualGreenScreenBottomY)
{
boundaryMeshTopY = OVRManager.instance.virtualGreenScreenTopY;
boundaryMeshBottomY = OVRManager.instance.virtualGreenScreenBottomY;
boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
boundaryMeshType = OVRManager.instance.virtualGreenScreenType;
// Creating GameObject for testing purpose only
//GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
//boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
//boundaryMeshObject.AddComponent<MeshRenderer>();
}
}
public class OVRCameraFrameCompositionManager : MonoBehaviour
{
public GameObject cameraFrameGameObj;
public OVRCameraComposition composition;
public RenderTexture boundaryMeshMaskTexture;
private Material cameraFrameMaterial;
private Material whiteMaterial;
void Start()
{
Shader shader = Shader.Find("Oculus/Unlit");
if (!shader)
{
Debug.LogError("Oculus/Unlit shader does not exist");
return;
}
whiteMaterial = new Material(shader);
whiteMaterial.color = Color.white;
}
void OnPreRender()
{
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
{
RenderTexture oldRT = RenderTexture.active;
RenderTexture.active = boundaryMeshMaskTexture;
// The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
GL.PushMatrix();
GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
GL.Clear(false, true, Color.black);
for (int i = 0; i < whiteMaterial.passCount; ++i)
{
if (whiteMaterial.SetPass(i))
{
Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
}
}
GL.PopMatrix();
RenderTexture.active = oldRT;
}
if (cameraFrameGameObj)
{
if (cameraFrameMaterial == null)
cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
cameraFrameMaterial.SetFloat("_Visible", 1.0f);
}
}
void OnPostRender()
{
if (cameraFrameGameObj)
{
Debug.Assert(cameraFrameMaterial);
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
}
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 70818bad1fe6859439b190a61dfb6eb8
timeCreated: 1503089686
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,56 @@
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public abstract class OVRComposition {
public abstract OVRManager.CompositionMethod CompositionMethod();
public abstract void Update(Camera mainCamera);
public abstract void Cleanup();
public virtual void RecenterPose() { }
protected bool usingLastAttachedNodePose = false;
protected OVRPose lastAttachedNodePose = new OVRPose(); // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens
internal OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
trackingSpacePose = cameraTrackingSpacePose;
if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
{
if (usingLastAttachedNodePose)
{
Debug.Log("The camera attached node get tracked");
usingLastAttachedNodePose = false;
}
OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
lastAttachedNodePose = attachedNodePose;
trackingSpacePose = attachedNodePose * trackingSpacePose;
}
else
{
if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
{
if (!usingLastAttachedNodePose)
{
Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
usingLastAttachedNodePose = true;
}
trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
}
}
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
return worldSpacePose;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 829a382f3380d4b46ad9670463232a0b
timeCreated: 1502990005
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,149 @@
using UnityEngine;
using System.Collections.Generic;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
internal class OVRCompositionUtil {
public static void SafeDestroy(GameObject obj)
{
if (Application.isPlaying)
{
GameObject.Destroy(obj);
}
else
{
GameObject.DestroyImmediate(obj);
}
}
public static void SafeDestroy(ref GameObject obj)
{
SafeDestroy(obj);
obj = null;
}
public static OVRPlugin.CameraDevice ConvertCameraDevice(OVRManager.CameraDevice cameraDevice)
{
if (cameraDevice == OVRManager.CameraDevice.WebCamera0)
{
return OVRPlugin.CameraDevice.WebCamera0;
}
else if (cameraDevice == OVRManager.CameraDevice.WebCamera1)
{
return OVRPlugin.CameraDevice.WebCamera1;
}
else if (cameraDevice == OVRManager.CameraDevice.ZEDCamera)
{
return OVRPlugin.CameraDevice.ZEDCamera;
}
else
{
return OVRPlugin.CameraDevice.None;
}
}
public static OVRBoundary.BoundaryType ToBoundaryType(OVRManager.VirtualGreenScreenType type)
{
if (type == OVRManager.VirtualGreenScreenType.OuterBoundary)
{
return OVRBoundary.BoundaryType.OuterBoundary;
}
else if (type == OVRManager.VirtualGreenScreenType.PlayArea)
{
return OVRBoundary.BoundaryType.PlayArea;
}
else
{
Debug.LogWarning("Unmatched VirtualGreenScreenType");
return OVRBoundary.BoundaryType.OuterBoundary;
}
}
public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
{
OVRPose tsPose;
tsPose.position = trackingSpacePosition;
tsPose.orientation = Quaternion.identity;
OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
Vector3 pos = wsPose.position;
return pos;
}
public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
{
if (!OVRManager.boundary.GetConfigured())
{
return float.MaxValue;
}
Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
if (geometry.Length == 0)
{
return float.MaxValue;
}
float maxDistance = -float.MaxValue;
foreach (Vector3 v in geometry)
{
Vector3 pos = GetWorldPosition(v);
float distance = Vector3.Dot(camera.transform.forward, pos);
if (maxDistance < distance)
{
maxDistance = distance;
}
}
return maxDistance;
}
public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
{
if (!OVRManager.boundary.GetConfigured())
{
return null;
}
List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType));
if (geometry.Count == 0)
{
return null;
}
geometry.Add(geometry[0]);
int numPoints = geometry.Count;
Vector3[] vertices = new Vector3[numPoints * 2];
Vector2[] uvs = new Vector2[numPoints * 2];
for (int i = 0; i < numPoints; ++i)
{
Vector3 v = geometry[i];
vertices[i] = new Vector3(v.x, bottomY, v.z);
vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f);
uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
}
int[] triangles = new int[(numPoints - 1) * 2 * 3];
for (int i = 0; i < numPoints - 1; ++i)
{
// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask
triangles[i * 6 + 0] = i;
triangles[i * 6 + 1] = i + numPoints;
triangles[i * 6 + 2] = i + 1 + numPoints;
triangles[i * 6 + 3] = i;
triangles[i * 6 + 4] = i + 1 + numPoints;
triangles[i * 6 + 5] = i + 1;
}
Mesh mesh = new Mesh();
mesh.vertices = vertices;
mesh.uv = uvs;
mesh.triangles = triangles;
return mesh;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 43bf91d46b2eb874a842be95aee2cc9a
timeCreated: 1502992822
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,118 @@
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRDirectComposition : OVRCameraComposition
{
public GameObject directCompositionCameraGameObject;
public Camera directCompositionCamera;
public RenderTexture boundaryMeshMaskTexture = null;
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Direct; }
public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice cameraDevice, bool useDynamicLighting, OVRManager.DepthQuality depthQuality)
: base(cameraDevice, useDynamicLighting, depthQuality)
{
Debug.Assert(directCompositionCameraGameObject == null);
directCompositionCameraGameObject = new GameObject();
directCompositionCameraGameObject.name = "MRDirectCompositionCamera";
directCompositionCameraGameObject.transform.parent = parentObject.transform;
directCompositionCamera = directCompositionCameraGameObject.AddComponent<Camera>();
directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
directCompositionCamera.depth = float.MaxValue;
directCompositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
directCompositionCamera.clearFlags = mainCamera.clearFlags;
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
if (!hasCameraDeviceOpened)
{
Debug.LogError("Unable to open camera device " + cameraDevice);
}
else
{
Debug.Log("DirectComposition activated : useDynamicLighting " + (useDynamicLighting ? "ON" : "OFF"));
CreateCameraFramePlaneObject(parentObject, directCompositionCamera, useDynamicLighting);
}
}
public override void Update(Camera mainCamera)
{
if (!hasCameraDeviceOpened)
{
return;
}
if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
{
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
}
directCompositionCamera.clearFlags = mainCamera.clearFlags;
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect;
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
directCompositionCamera.fieldOfView = fovY;
directCompositionCamera.aspect = aspect;
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
}
else
{
Debug.LogWarning("Failed to get external camera information");
}
}
if (hasCameraDeviceOpened)
{
if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
{
boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
boundaryMeshMaskTexture.Create();
}
UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture);
directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
}
}
public override void Cleanup()
{
base.Cleanup();
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
directCompositionCamera = null;
Debug.Log("DirectComposition deactivated");
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 8e9d1c62d6c68c7429ce265558cfd2b2
timeCreated: 1502990248
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,165 @@
using UnityEngine;
using System.Collections;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRExternalComposition : OVRComposition
{
private GameObject foregroundCameraGameObject;
private Camera foregroundCamera;
private GameObject backgroundCameraGameObject;
private Camera backgroundCamera;
private GameObject cameraProxyPlane;
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
public OVRExternalComposition(GameObject parentObject, Camera mainCamera)
{
Debug.Assert(backgroundCameraGameObject == null);
backgroundCameraGameObject = new GameObject();
backgroundCameraGameObject.name = "MRBackgroundCamera";
backgroundCameraGameObject.transform.parent = parentObject.transform;
backgroundCamera = backgroundCameraGameObject.AddComponent<Camera>();
backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
backgroundCamera.depth = float.MaxValue;
backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
backgroundCamera.clearFlags = mainCamera.clearFlags;
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
Debug.Assert(foregroundCameraGameObject == null);
foregroundCameraGameObject = new GameObject();
foregroundCameraGameObject.name = "MRForgroundCamera";
foregroundCameraGameObject.transform.parent = parentObject.transform;
foregroundCamera = foregroundCameraGameObject.AddComponent<Camera>();
foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
foregroundCamera.depth = float.MaxValue;
foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
foregroundCamera.clearFlags = CameraClearFlags.Color;
foregroundCamera.backgroundColor = OVRMixedReality.chromaKeyColor;
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
// Create cameraProxyPlane for clipping
Debug.Assert(cameraProxyPlane == null);
cameraProxyPlane = GameObject.CreatePrimitive(PrimitiveType.Quad);
cameraProxyPlane.name = "MRProxyClipPlane";
cameraProxyPlane.transform.parent = parentObject.transform;
cameraProxyPlane.GetComponent<Collider>().enabled = false;
cameraProxyPlane.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
Material clipMaterial = new Material(Shader.Find("Oculus/OVRMRClipPlane"));
cameraProxyPlane.GetComponent<MeshRenderer>().material = clipMaterial;
clipMaterial.SetColor("_Color", OVRMixedReality.chromaKeyColor);
clipMaterial.SetFloat("_Visible", 0.0f);
cameraProxyPlane.transform.localScale = new Vector3(1000, 1000, 1000);
cameraProxyPlane.SetActive(true);
OVRMRForegroundCameraManager foregroundCameraManager = foregroundCameraGameObject.AddComponent<OVRMRForegroundCameraManager>();
foregroundCameraManager.clipPlaneGameObj = cameraProxyPlane;
}
public override void Update(Camera mainCamera)
{
OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
backgroundCamera.clearFlags = mainCamera.clearFlags;
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
backgroundCamera.fieldOfView = fovY;
backgroundCamera.aspect = aspect;
backgroundCamera.transform.FromOVRPose(worldSpacePose);
foregroundCamera.fieldOfView = fovY;
foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
foregroundCamera.transform.FromOVRPose(worldSpacePose);
}
else
{
Debug.LogError("Failed to get external camera information");
return;
}
}
// Assume player always standing straightly
Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;
externalCameraToHeadXZ.y = 0;
cameraProxyPlane.transform.position = mainCamera.transform.position;
cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
}
public override void Cleanup()
{
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
backgroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
foregroundCamera = null;
OVRCompositionUtil.SafeDestroy(ref cameraProxyPlane);
Debug.Log("ExternalComposition deactivated");
}
}
/// <summary>
/// Helper internal class for foregroundCamera, don't call it outside
/// </summary>
internal class OVRMRForegroundCameraManager : MonoBehaviour
{
public GameObject clipPlaneGameObj;
private Material clipPlaneMaterial;
void OnPreRender()
{
// the clipPlaneGameObj should be only visible to foreground camera
if (clipPlaneGameObj)
{
if (clipPlaneMaterial == null)
clipPlaneMaterial = clipPlaneGameObj.GetComponent<MeshRenderer>().material;
clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 1.0f);
}
}
void OnPostRender()
{
if (clipPlaneGameObj)
{
Debug.Assert(clipPlaneMaterial);
clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 0.0f);
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 2c109ff55176f71418ec2c06d1b5d28e
timeCreated: 1502990231
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,373 @@
using UnityEngine;
using UnityEngine.Rendering;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRSandwichComposition : OVRCameraComposition
{
public float frameRealtime;
public Camera fgCamera;
public Camera bgCamera;
public class HistoryRecord
{
public float timestamp = float.MinValue;
public RenderTexture fgRenderTexture;
public RenderTexture bgRenderTexture;
public RenderTexture boundaryMeshMaskTexture;
}
public readonly int historyRecordCount = 8; // enough to compensate 88ms latency @ 90 fps
public readonly HistoryRecord[] historyRecordArray;
public int historyRecordCursorIndex = 0;
public GameObject cameraProxyPlane;
public Camera compositionCamera;
public OVRSandwichCompositionManager compositionManager;
private int _cameraFramePlaneLayer = -1;
// find an unnamed layer between 24..29
public int cameraFramePlaneLayer
{
get
{
if (_cameraFramePlaneLayer < 0)
{
for (int i=24; i<=29; ++i)
{
string layerName = LayerMask.LayerToName(i);
if (layerName == null || layerName.Length == 0)
{
_cameraFramePlaneLayer = i;
break;
}
}
if (_cameraFramePlaneLayer == -1)
{
Debug.LogWarning("Unable to find an unnamed layer between 24 and 29.");
_cameraFramePlaneLayer = 25;
}
Debug.LogFormat("Set the CameraFramePlaneLayer in SandwichComposition to {0}. Please do NOT put any other gameobject in this layer.", _cameraFramePlaneLayer);
}
return _cameraFramePlaneLayer;
}
}
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Sandwich; }
public OVRSandwichComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice cameraDevice, bool useDynamicLighting, OVRManager.DepthQuality depthQuality)
: base(cameraDevice, useDynamicLighting, depthQuality)
{
frameRealtime = Time.realtimeSinceStartup;
historyRecordCount = OVRManager.instance.sandwichCompositionBufferedFrames;
if (historyRecordCount < 1)
{
Debug.LogWarning("Invalid sandwichCompositionBufferedFrames in OVRManager. It should be at least 1");
historyRecordCount = 1;
}
if (historyRecordCount > 16)
{
Debug.LogWarning("The value of sandwichCompositionBufferedFrames in OVRManager is too big. It would consume a lot of memory. It has been override to 16");
historyRecordCount = 16;
}
historyRecordArray = new HistoryRecord[historyRecordCount];
for (int i=0; i<historyRecordCount; ++i)
{
historyRecordArray[i] = new HistoryRecord();
}
historyRecordCursorIndex = 0;
GameObject fgObject = new GameObject("MRSandwichForegroundCamera");
fgObject.transform.parent = parentObject.transform;
fgCamera = fgObject.AddComponent<Camera>();
fgCamera.depth = 200;
fgCamera.clearFlags = CameraClearFlags.SolidColor;
fgCamera.backgroundColor = Color.clear;
fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
fgCamera.nearClipPlane = mainCamera.nearClipPlane;
fgCamera.farClipPlane = mainCamera.farClipPlane;
GameObject bgObject = new GameObject("MRSandwichBackgroundCamera");
bgObject.transform.parent = parentObject.transform;
bgCamera = bgObject.AddComponent<Camera>();
bgCamera.depth = 100;
bgCamera.clearFlags = mainCamera.clearFlags;
bgCamera.backgroundColor = mainCamera.backgroundColor;
bgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
bgCamera.nearClipPlane = mainCamera.nearClipPlane;
bgCamera.farClipPlane = mainCamera.farClipPlane;
// Create cameraProxyPlane for clipping
Debug.Assert(cameraProxyPlane == null);
cameraProxyPlane = GameObject.CreatePrimitive(PrimitiveType.Quad);
cameraProxyPlane.name = "MRProxyClipPlane";
cameraProxyPlane.transform.parent = parentObject.transform;
cameraProxyPlane.GetComponent<Collider>().enabled = false;
cameraProxyPlane.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
Material clipMaterial = new Material(Shader.Find("Oculus/OVRMRClipPlane"));
cameraProxyPlane.GetComponent<MeshRenderer>().material = clipMaterial;
clipMaterial.SetColor("_Color", Color.clear);
clipMaterial.SetFloat("_Visible", 0.0f);
cameraProxyPlane.transform.localScale = new Vector3(1000, 1000, 1000);
cameraProxyPlane.SetActive(true);
OVRMRForegroundCameraManager foregroundCameraManager = fgCamera.gameObject.AddComponent<OVRMRForegroundCameraManager>();
foregroundCameraManager.clipPlaneGameObj = cameraProxyPlane;
GameObject compositionCameraObject = new GameObject("MRSandwichCaptureCamera");
compositionCameraObject.transform.parent = parentObject.transform;
compositionCamera = compositionCameraObject.AddComponent<Camera>();
compositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
compositionCamera.depth = float.MaxValue;
compositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
compositionCamera.clearFlags = CameraClearFlags.Depth;
compositionCamera.backgroundColor = mainCamera.backgroundColor;
compositionCamera.cullingMask = 1 << cameraFramePlaneLayer;
compositionCamera.nearClipPlane = mainCamera.nearClipPlane;
compositionCamera.farClipPlane = mainCamera.farClipPlane;
if (!hasCameraDeviceOpened)
{
Debug.LogError("Unable to open camera device " + cameraDevice);
}
else
{
Debug.Log("SandwichComposition activated : useDynamicLighting " + (useDynamicLighting ? "ON" : "OFF"));
CreateCameraFramePlaneObject(parentObject, compositionCamera, useDynamicLighting);
cameraFramePlaneObject.layer = cameraFramePlaneLayer;
RefreshRenderTextures(mainCamera);
compositionManager = compositionCamera.gameObject.AddComponent<OVRSandwichCompositionManager>();
compositionManager.fgTexture = historyRecordArray[historyRecordCursorIndex].fgRenderTexture;
compositionManager.bgTexture = historyRecordArray[historyRecordCursorIndex].bgRenderTexture;
}
}
public override void Update(Camera mainCamera)
{
if (!hasCameraDeviceOpened)
{
return;
}
frameRealtime = Time.realtimeSinceStartup;
++historyRecordCursorIndex;
if (historyRecordCursorIndex >= historyRecordCount)
{
historyRecordCursorIndex = 0;
}
if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
{
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
}
RefreshRenderTextures(mainCamera);
bgCamera.clearFlags = mainCamera.clearFlags;
bgCamera.backgroundColor = mainCamera.backgroundColor;
bgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
{
OVRPose worldSpacePose = new OVRPose();
OVRPose trackingSpacePose = new OVRPose();
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, worldSpacePose);
}
else
{
OVRPlugin.CameraExtrinsics extrinsics;
OVRPlugin.CameraIntrinsics intrinsics;
// So far, only support 1 camera for MR and always use camera index 0
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
{
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
RefreshCameraPoses(fovY, aspect, worldSpacePose);
}
else
{
Debug.LogWarning("Failed to get external camera information");
}
}
compositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = historyRecordArray[historyRecordCursorIndex].boundaryMeshMaskTexture;
HistoryRecord record = GetHistoryRecordForComposition();
UpdateCameraFramePlaneObject(mainCamera, compositionCamera, record.boundaryMeshMaskTexture);
OVRSandwichCompositionManager compositionManager = compositionCamera.gameObject.GetComponent<OVRSandwichCompositionManager>();
compositionManager.fgTexture = record.fgRenderTexture;
compositionManager.bgTexture = record.bgRenderTexture;
cameraProxyPlane.transform.position = fgCamera.transform.position + fgCamera.transform.forward * cameraFramePlaneDistance;
cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + fgCamera.transform.forward);
}
public override void Cleanup()
{
base.Cleanup();
Camera[] cameras = { fgCamera, bgCamera, compositionCamera };
foreach (Camera c in cameras)
{
OVRCompositionUtil.SafeDestroy(c.gameObject);
}
fgCamera = null;
bgCamera = null;
compositionCamera = null;
Debug.Log("SandwichComposition deactivated");
}
private RenderTextureFormat DesiredRenderTextureFormat(RenderTextureFormat originalFormat)
{
if (originalFormat == RenderTextureFormat.RGB565)
{
return RenderTextureFormat.ARGB1555;
}
else if (originalFormat == RenderTextureFormat.RGB111110Float)
{
return RenderTextureFormat.ARGBHalf;
}
else
{
return originalFormat;
}
}
protected void RefreshRenderTextures(Camera mainCamera)
{
int width = Screen.width;
int height = Screen.height;
RenderTextureFormat format = mainCamera.targetTexture ? DesiredRenderTextureFormat(mainCamera.targetTexture.format) : RenderTextureFormat.ARGB32;
int depth = mainCamera.targetTexture ? mainCamera.targetTexture.depth : 24;
Debug.Assert(fgCamera != null && bgCamera != null);
HistoryRecord record = historyRecordArray[historyRecordCursorIndex];
record.timestamp = frameRealtime;
if (record.fgRenderTexture == null || record.fgRenderTexture.width != width || record.fgRenderTexture.height != height || record.fgRenderTexture.format != format || record.fgRenderTexture.depth != depth)
{
record.fgRenderTexture = new RenderTexture(width, height, depth, format);
record.fgRenderTexture.name = "Sandwich FG " + historyRecordCursorIndex.ToString();
}
fgCamera.targetTexture = record.fgRenderTexture;
if (record.bgRenderTexture == null || record.bgRenderTexture.width != width || record.bgRenderTexture.height != height || record.bgRenderTexture.format != format || record.bgRenderTexture.depth != depth)
{
record.bgRenderTexture = new RenderTexture(width, height, depth, format);
record.bgRenderTexture.name = "Sandwich BG " + historyRecordCursorIndex.ToString();
}
bgCamera.targetTexture = record.bgRenderTexture;
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
{
if (record.boundaryMeshMaskTexture == null || record.boundaryMeshMaskTexture.width != width || record.boundaryMeshMaskTexture.height != height)
{
record.boundaryMeshMaskTexture = new RenderTexture(width, height, 0, RenderTextureFormat.R8);
record.boundaryMeshMaskTexture.name = "Boundary Mask " + historyRecordCursorIndex.ToString();
record.boundaryMeshMaskTexture.Create();
}
}
else
{
record.boundaryMeshMaskTexture = null;
}
Debug.Assert(fgCamera.targetTexture != null && bgCamera.targetTexture != null && (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || record.boundaryMeshMaskTexture != null));
}
protected HistoryRecord GetHistoryRecordForComposition()
{
float expectedTime = frameRealtime - OVRManager.instance.sandwichCompositionRenderLatency;
int currIndex = historyRecordCursorIndex;
int prevIndex = currIndex - 1;
if (prevIndex < 0)
{
prevIndex = historyRecordCount - 1;
}
while (prevIndex != historyRecordCursorIndex)
{
if (historyRecordArray[prevIndex].timestamp <= expectedTime)
{
float timeToCurrIndex = historyRecordArray[currIndex].timestamp - expectedTime;
float timeToPrevIndex = expectedTime - historyRecordArray[prevIndex].timestamp;
return timeToCurrIndex <= timeToPrevIndex ? historyRecordArray[currIndex] : historyRecordArray[prevIndex];
}
currIndex = prevIndex;
prevIndex = currIndex - 1;
if (prevIndex < 0) prevIndex = historyRecordCount - 1;
}
// return the earliest frame
return historyRecordArray[currIndex];
}
protected void RefreshCameraPoses(float fovY, float aspect, OVRPose pose)
{
Camera[] cameras = { fgCamera, bgCamera, compositionCamera };
foreach (Camera c in cameras)
{
c.fieldOfView = fovY;
c.aspect = aspect;
c.transform.FromOVRPose(pose);
}
}
public class OVRSandwichCompositionManager : MonoBehaviour
{
public RenderTexture fgTexture;
public RenderTexture bgTexture;
public Material alphaBlendMaterial;
void Start()
{
Shader alphaBlendShader = Shader.Find("Oculus/UnlitTransparent");
if (alphaBlendShader == null)
{
Debug.LogError("Unable to create transparent shader");
return;
}
alphaBlendMaterial = new Material(alphaBlendShader);
}
private void OnPreRender()
{
if (fgTexture == null || bgTexture == null || alphaBlendMaterial == null)
{
Debug.LogError("OVRSandwichCompositionManager has not setup properly");
return;
}
Graphics.Blit(bgTexture, RenderTexture.active);
}
void OnPostRender()
{
if (fgTexture == null || bgTexture == null || alphaBlendMaterial == null)
{
Debug.LogError("OVRSandwichCompositionManager has not setup properly");
return;
}
Graphics.Blit(fgTexture, RenderTexture.active, alphaBlendMaterial);
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3c02efcdd3fb2aa4e9c641b0c2a54b9a
timeCreated: 1502990248
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: e87d4bbdfc8d17445b4a41760b401026
folderAsset: yes
timeCreated: 1510282190
licenseType: Pro
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,204 @@
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Reflection;
[CustomEditor(typeof(OVRManager))]
public class OVRManagerEditor : Editor
{
override public void OnInspectorGUI()
{
DrawDefaultInspector();
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
OVRManager manager = (OVRManager)target;
EditorGUILayout.Space();
EditorGUILayout.LabelField("Mixed Reality Capture", EditorStyles.boldLabel);
SetupBoolField("Show Properties", ref manager.expandMixedRealityCapturePropertySheet);
if (manager.expandMixedRealityCapturePropertySheet)
{
string[] layerMaskOptions = new string[32];
for (int i=0; i<32; ++i)
{
layerMaskOptions[i] = LayerMask.LayerToName(i);
if (layerMaskOptions[i].Length == 0)
{
layerMaskOptions[i] = "<Layer " + i.ToString() + ">";
}
}
EditorGUI.indentLevel++;
EditorGUILayout.Space();
SetupBoolField("enableMixedReality", ref manager.enableMixedReality);
SetupCompositoinMethodField("compositionMethod", ref manager.compositionMethod);
SetupLayerMaskField("extraHiddenLayers", ref manager.extraHiddenLayers, layerMaskOptions);
if (manager.compositionMethod == OVRManager.CompositionMethod.Direct || manager.compositionMethod == OVRManager.CompositionMethod.Sandwich)
{
EditorGUILayout.Space();
if (manager.compositionMethod == OVRManager.CompositionMethod.Direct)
{
EditorGUILayout.LabelField("Direct Composition", EditorStyles.boldLabel);
}
else
{
EditorGUILayout.LabelField("Sandwich Composition", EditorStyles.boldLabel);
}
EditorGUI.indentLevel++;
EditorGUILayout.Space();
EditorGUILayout.LabelField("Camera", EditorStyles.boldLabel);
SetupCameraDeviceField("capturingCameraDevice", ref manager.capturingCameraDevice);
SetupBoolField("flipCameraFrameHorizontally", ref manager.flipCameraFrameHorizontally);
SetupBoolField("flipCameraFrameVertically", ref manager.flipCameraFrameVertically);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Chroma Key", EditorStyles.boldLabel);
SetupColorField("chromaKeyColor", ref manager.chromaKeyColor);
SetupFloatField("chromaKeySimilarity", ref manager.chromaKeySimilarity);
SetupFloatField("chromaKeySmoothRange", ref manager.chromaKeySmoothRange);
SetupFloatField("chromaKeySpillRange", ref manager.chromaKeySpillRange);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Dynamic Lighting", EditorStyles.boldLabel);
SetupBoolField("useDynamicLighting", ref manager.useDynamicLighting);
SetupDepthQualityField("depthQuality", ref manager.depthQuality);
SetupFloatField("dynamicLightingSmoothFactor", ref manager.dynamicLightingSmoothFactor);
SetupFloatField("dynamicLightingDepthVariationClampingValue", ref manager.dynamicLightingDepthVariationClampingValue);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Virtual Green Screen", EditorStyles.boldLabel);
SetupVirtualGreenTypeField("virtualGreenScreenType", ref manager.virtualGreenScreenType);
SetupFloatField("virtualGreenScreenTopY", ref manager.virtualGreenScreenTopY);
SetupFloatField("virtualGreenScreenBottomY", ref manager.virtualGreenScreenBottomY);
SetupBoolField("virtualGreenScreenApplyDepthCulling", ref manager.virtualGreenScreenApplyDepthCulling);
SetupFloatField("virtualGreenScreenDepthTolerance", ref manager.virtualGreenScreenDepthTolerance);
EditorGUILayout.Space();
EditorGUILayout.LabelField("Latency Control", EditorStyles.boldLabel);
SetupFloatField("handPoseStateLatency", ref manager.handPoseStateLatency);
if (manager.compositionMethod == OVRManager.CompositionMethod.Sandwich)
{
SetupFloatField("sandwichCompositionRenderLatency", ref manager.sandwichCompositionRenderLatency);
SetupIntField("sandwichCompositionBufferedFrames", ref manager.sandwichCompositionBufferedFrames);
}
EditorGUI.indentLevel--;
}
EditorGUI.indentLevel--;
}
#endif
}
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
void SetupBoolField(string name, ref bool member)
{
EditorGUI.BeginChangeCheck();
bool value = EditorGUILayout.Toggle(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
}
}
void SetupIntField(string name, ref int member)
{
EditorGUI.BeginChangeCheck();
int value = EditorGUILayout.IntField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
}
}
void SetupFloatField(string name, ref float member)
{
EditorGUI.BeginChangeCheck();
float value = EditorGUILayout.FloatField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
}
}
void SetupDoubleField(string name, ref double member)
{
EditorGUI.BeginChangeCheck();
double value = EditorGUILayout.DoubleField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
}
}
void SetupColorField(string name, ref Color member)
{
EditorGUI.BeginChangeCheck();
Color value = EditorGUILayout.ColorField(name, member);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
member = value;
}
}
void SetupLayerMaskField(string name, ref LayerMask layerMask, string[] layerMaskOptions)
{
EditorGUI.BeginChangeCheck();
int value = EditorGUILayout.MaskField(name, layerMask, layerMaskOptions);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
layerMask = value;
}
}
void SetupCompositoinMethodField(string name, ref OVRManager.CompositionMethod method)
{
EditorGUI.BeginChangeCheck();
OVRManager.CompositionMethod value = (OVRManager.CompositionMethod)EditorGUILayout.EnumPopup(name, method);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
method = value;
}
}
void SetupCameraDeviceField(string name, ref OVRManager.CameraDevice device)
{
EditorGUI.BeginChangeCheck();
OVRManager.CameraDevice value = (OVRManager.CameraDevice)EditorGUILayout.EnumPopup(name, device);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
device = value;
}
}
void SetupDepthQualityField(string name, ref OVRManager.DepthQuality depthQuality)
{
EditorGUI.BeginChangeCheck();
OVRManager.DepthQuality value = (OVRManager.DepthQuality)EditorGUILayout.EnumPopup(name, depthQuality);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
depthQuality = value;
}
}
void SetupVirtualGreenTypeField(string name, ref OVRManager.VirtualGreenScreenType virtualGreenScreenType)
{
EditorGUI.BeginChangeCheck();
OVRManager.VirtualGreenScreenType value = (OVRManager.VirtualGreenScreenType)EditorGUILayout.EnumPopup(name, virtualGreenScreenType);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(target, "Changed " + name);
virtualGreenScreenType = value;
}
}
#endif
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 9b07d18088099f94fa00fc15e64b2b17
timeCreated: 1502747851
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,188 @@
using System;
using System.Collections.Generic;
using UnityEngine;
using VR = UnityEngine.VR;
using System.Runtime.InteropServices;
/// <summary>
/// Provides access to the Oculus boundary system.
/// </summary>
public class OVRBoundary
{
/// <summary>
/// Specifies a tracked node that can be queried through the boundary system.
/// </summary>
public enum Node
{
HandLeft = OVRPlugin.Node.HandLeft, ///< Tracks the left hand node.
HandRight = OVRPlugin.Node.HandRight, ///< Tracks the right hand node.
Head = OVRPlugin.Node.Head, ///< Tracks the head node.
}
/// <summary>
/// Specifies a boundary type surface.
/// </summary>
public enum BoundaryType
{
OuterBoundary = OVRPlugin.BoundaryType.OuterBoundary, ///< Outer boundary that closely matches the user's configured walls.
PlayArea = OVRPlugin.BoundaryType.PlayArea, ///< Smaller convex area inset within the outer boundary.
}
/// <summary>
/// Provides test results of boundary system queries.
/// </summary>
public struct BoundaryTestResult
{
public bool IsTriggering; ///< Returns true if the queried test would violate and/or trigger the tested boundary types.
public float ClosestDistance; ///< Returns the distance between the queried test object and the closest tested boundary type.
public Vector3 ClosestPoint; ///< Returns the closest point to the queried test object.
public Vector3 ClosestPointNormal; ///< Returns the normal of the closest point to the queried test object.
}
/// <summary>
/// Specifies the boundary system parameters that can be configured. Can be overridden by the system or user.
/// </summary>
public struct BoundaryLookAndFeel
{
public Color Color;
}
/// <summary>
/// Returns true if the boundary system is currently configured with valid boundary data.
/// </summary>
public bool GetConfigured()
{
return OVRPlugin.GetBoundaryConfigured();
}
/// <summary>
/// Returns the results of testing a tracked node against the specified boundary type.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public OVRBoundary.BoundaryTestResult TestNode(OVRBoundary.Node node, OVRBoundary.BoundaryType boundaryType)
{
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryNode((OVRPlugin.Node)node, (OVRPlugin.BoundaryType)boundaryType);
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
{
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
ClosestDistance = ovrpRes.ClosestDistance,
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
};
return res;
}
/// <summary>
/// Returns the results of testing a 3d point against the specified boundary type.
/// The test point is expected in local tracking space.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public OVRBoundary.BoundaryTestResult TestPoint(Vector3 point, OVRBoundary.BoundaryType boundaryType)
{
OVRPlugin.BoundaryTestResult ovrpRes = OVRPlugin.TestBoundaryPoint(point.ToFlippedZVector3f(), (OVRPlugin.BoundaryType)boundaryType);
OVRBoundary.BoundaryTestResult res = new OVRBoundary.BoundaryTestResult()
{
IsTriggering = (ovrpRes.IsTriggering == OVRPlugin.Bool.True),
ClosestDistance = ovrpRes.ClosestDistance,
ClosestPoint = ovrpRes.ClosestPoint.FromFlippedZVector3f(),
ClosestPointNormal = ovrpRes.ClosestPointNormal.FromFlippedZVector3f(),
};
return res;
}
/// <summary>
/// Requests that the visual look and feel of the boundary system be changed as specified. Can be overridden by the system or user.
/// </summary>
public void SetLookAndFeel(OVRBoundary.BoundaryLookAndFeel lookAndFeel)
{
OVRPlugin.BoundaryLookAndFeel lf = new OVRPlugin.BoundaryLookAndFeel()
{
Color = lookAndFeel.Color.ToColorf()
};
OVRPlugin.SetBoundaryLookAndFeel(lf);
}
/// <summary>
/// Resets the visual look and feel of the boundary system to the initial system settings.
/// </summary>
public void ResetLookAndFeel()
{
OVRPlugin.ResetBoundaryLookAndFeel();
}
private static int cachedVector3fSize = Marshal.SizeOf(typeof(OVRPlugin.Vector3f));
private static OVRNativeBuffer cachedGeometryNativeBuffer = new OVRNativeBuffer(0);
private static float[] cachedGeometryManagedBuffer = new float[0];
/// <summary>
/// Returns an array of 3d points (in clockwise order) that define the specified boundary type.
/// All points are returned in local tracking space shared by tracked nodes and accessible through OVRCameraRig's trackingSpace anchor.
/// </summary>
public Vector3[] GetGeometry(OVRBoundary.BoundaryType boundaryType)
{
int pointsCount = 0;
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, IntPtr.Zero, ref pointsCount))
{
if (pointsCount > 0)
{
int requiredNativeBufferCapacity = pointsCount * cachedVector3fSize;
if (cachedGeometryNativeBuffer.GetCapacity() < requiredNativeBufferCapacity)
cachedGeometryNativeBuffer.Reset(requiredNativeBufferCapacity);
int requiredManagedBufferCapacity = pointsCount * 3;
if (cachedGeometryManagedBuffer.Length < requiredManagedBufferCapacity)
cachedGeometryManagedBuffer = new float[requiredManagedBufferCapacity];
if (OVRPlugin.GetBoundaryGeometry2((OVRPlugin.BoundaryType)boundaryType, cachedGeometryNativeBuffer.GetPointer(), ref pointsCount))
{
Marshal.Copy(cachedGeometryNativeBuffer.GetPointer(), cachedGeometryManagedBuffer, 0, requiredManagedBufferCapacity);
Vector3[] points = new Vector3[pointsCount];
for (int i = 0; i < pointsCount; i++)
{
points[i] = new OVRPlugin.Vector3f()
{
x = cachedGeometryManagedBuffer[3 * i + 0],
y = cachedGeometryManagedBuffer[3 * i + 1],
z = cachedGeometryManagedBuffer[3 * i + 2],
}.FromFlippedZVector3f();
}
return points;
}
}
}
return new Vector3[0];
}
/// <summary>
/// Returns a vector that indicates the spatial dimensions of the specified boundary type. (x = width, y = height, z = depth)
/// </summary>
public Vector3 GetDimensions(OVRBoundary.BoundaryType boundaryType)
{
return OVRPlugin.GetBoundaryDimensions((OVRPlugin.BoundaryType)boundaryType).FromVector3f();
}
/// <summary>
/// Returns true if the boundary system is currently visible.
/// </summary>
public bool GetVisible()
{
return OVRPlugin.GetBoundaryVisible();
}
/// <summary>
/// Requests that the boundary system visibility be set to the specified value.
/// The actual visibility can be overridden by the system (i.e., proximity trigger) or by the user (boundary system disabled)
/// </summary>
public void SetVisible(bool value)
{
OVRPlugin.SetBoundaryVisible(value);
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 332b8e08854932543ba356eec601c0ef
timeCreated: 1470352252
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,311 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// A head-tracked stereoscopic virtual reality camera rig.
/// </summary>
[ExecuteInEditMode]
public class OVRCameraRig : MonoBehaviour
{
/// <summary>
/// The left eye camera.
/// </summary>
public Camera leftEyeCamera { get { return (usePerEyeCameras) ? _leftEyeCamera : _centerEyeCamera; } }
/// <summary>
/// The right eye camera.
/// </summary>
public Camera rightEyeCamera { get { return (usePerEyeCameras) ? _rightEyeCamera : _centerEyeCamera; } }
/// <summary>
/// Provides a root transform for all anchors in tracking space.
/// </summary>
public Transform trackingSpace { get; private set; }
/// <summary>
/// Always coincides with the pose of the left eye.
/// </summary>
public Transform leftEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with average of the left and right eye poses.
/// </summary>
public Transform centerEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the right eye.
/// </summary>
public Transform rightEyeAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the left hand.
/// </summary>
public Transform leftHandAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the right hand.
/// </summary>
public Transform rightHandAnchor { get; private set; }
/// <summary>
/// Always coincides with the pose of the sensor.
/// </summary>
public Transform trackerAnchor { get; private set; }
/// <summary>
/// Occurs when the eye pose anchors have been set.
/// </summary>
public event System.Action<OVRCameraRig> UpdatedAnchors;
/// <summary>
/// If true, separate cameras will be used for the left and right eyes.
/// </summary>
public bool usePerEyeCameras = false;
/// <summary>
/// If true, all tracked anchors are updated in FixedUpdate instead of Update to favor physics fidelity.
/// \note: If the fixed update rate doesn't match the rendering framerate (OVRManager.display.appFramerate), the anchors will visibly judder.
/// </summary>
public bool useFixedUpdateForTracking = false;
protected bool _skipUpdate = false;
protected readonly string trackingSpaceName = "TrackingSpace";
protected readonly string trackerAnchorName = "TrackerAnchor";
protected readonly string leftEyeAnchorName = "LeftEyeAnchor";
protected readonly string centerEyeAnchorName = "CenterEyeAnchor";
protected readonly string rightEyeAnchorName = "RightEyeAnchor";
protected readonly string leftHandAnchorName = "LeftHandAnchor";
protected readonly string rightHandAnchorName = "RightHandAnchor";
protected Camera _centerEyeCamera;
protected Camera _leftEyeCamera;
protected Camera _rightEyeCamera;
#region Unity Messages
protected virtual void Awake()
{
_skipUpdate = true;
EnsureGameObjectIntegrity();
}
protected virtual void Start()
{
UpdateAnchors();
}
protected virtual void FixedUpdate()
{
if (useFixedUpdateForTracking)
UpdateAnchors();
}
protected virtual void Update()
{
_skipUpdate = false;
if (!useFixedUpdateForTracking)
UpdateAnchors();
}
#endregion
protected virtual void UpdateAnchors()
{
EnsureGameObjectIntegrity();
if (!Application.isPlaying)
return;
if (_skipUpdate)
{
centerEyeAnchor.FromOVRPose(OVRPose.identity, true);
leftEyeAnchor.FromOVRPose(OVRPose.identity, true);
rightEyeAnchor.FromOVRPose(OVRPose.identity, true);
return;
}
bool monoscopic = OVRManager.instance.monoscopic;
OVRPose tracker = OVRManager.tracker.GetPose();
trackerAnchor.localRotation = tracker.orientation;
#if UNITY_2017_2_OR_NEWER
centerEyeAnchor.localRotation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.CenterEye);
leftEyeAnchor.localRotation = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.LeftEye);
rightEyeAnchor.localRotation = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.RightEye);
#else
centerEyeAnchor.localRotation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.CenterEye);
leftEyeAnchor.localRotation = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.LeftEye);
rightEyeAnchor.localRotation = monoscopic ? centerEyeAnchor.localRotation : UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.RightEye);
#endif
leftHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.LTouch);
rightHandAnchor.localRotation = OVRInput.GetLocalControllerRotation(OVRInput.Controller.RTouch);
trackerAnchor.localPosition = tracker.position;
#if UNITY_2017_2_OR_NEWER
centerEyeAnchor.localPosition = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.CenterEye);
leftEyeAnchor.localPosition = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.LeftEye);
rightEyeAnchor.localPosition = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.RightEye);
#else
centerEyeAnchor.localPosition = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.CenterEye);
leftEyeAnchor.localPosition = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.LeftEye);
rightEyeAnchor.localPosition = monoscopic ? centerEyeAnchor.localPosition : UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.RightEye);
#endif
leftHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.LTouch);
rightHandAnchor.localPosition = OVRInput.GetLocalControllerPosition(OVRInput.Controller.RTouch);
RaiseUpdatedAnchorsEvent();
}
protected virtual void RaiseUpdatedAnchorsEvent()
{
if (UpdatedAnchors != null)
{
UpdatedAnchors(this);
}
}
public virtual void EnsureGameObjectIntegrity()
{
bool monoscopic = OVRManager.instance != null ? OVRManager.instance.monoscopic : false;
if (trackingSpace == null)
trackingSpace = ConfigureAnchor(null, trackingSpaceName);
if (leftEyeAnchor == null)
leftEyeAnchor = ConfigureAnchor(trackingSpace, leftEyeAnchorName);
if (centerEyeAnchor == null)
centerEyeAnchor = ConfigureAnchor(trackingSpace, centerEyeAnchorName);
if (rightEyeAnchor == null)
rightEyeAnchor = ConfigureAnchor(trackingSpace, rightEyeAnchorName);
if (leftHandAnchor == null)
leftHandAnchor = ConfigureAnchor(trackingSpace, leftHandAnchorName);
if (rightHandAnchor == null)
rightHandAnchor = ConfigureAnchor(trackingSpace, rightHandAnchorName);
if (trackerAnchor == null)
trackerAnchor = ConfigureAnchor(trackingSpace, trackerAnchorName);
if (_centerEyeCamera == null || _leftEyeCamera == null || _rightEyeCamera == null)
{
_centerEyeCamera = centerEyeAnchor.GetComponent<Camera>();
_leftEyeCamera = leftEyeAnchor.GetComponent<Camera>();
_rightEyeCamera = rightEyeAnchor.GetComponent<Camera>();
if (_centerEyeCamera == null)
{
_centerEyeCamera = centerEyeAnchor.gameObject.AddComponent<Camera>();
_centerEyeCamera.tag = "MainCamera";
}
if (_leftEyeCamera == null)
{
_leftEyeCamera = leftEyeAnchor.gameObject.AddComponent<Camera>();
_leftEyeCamera.tag = "MainCamera";
}
if (_rightEyeCamera == null)
{
_rightEyeCamera = rightEyeAnchor.gameObject.AddComponent<Camera>();
_rightEyeCamera.tag = "MainCamera";
}
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
_leftEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
_rightEyeCamera.stereoTargetEye = StereoTargetEyeMask.Right;
}
if (monoscopic && !OVRPlugin.EyeTextureArrayEnabled)
{
// Output to left eye only when in monoscopic mode
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Left)
{
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Left;
}
}
else
{
if (_centerEyeCamera.stereoTargetEye != StereoTargetEyeMask.Both)
{
_centerEyeCamera.stereoTargetEye = StereoTargetEyeMask.Both;
}
}
// disable the right eye camera when in monoscopic mode
if (_centerEyeCamera.enabled == usePerEyeCameras ||
_leftEyeCamera.enabled == !usePerEyeCameras ||
_rightEyeCamera.enabled == !(usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled)))
{
_skipUpdate = true;
}
_centerEyeCamera.enabled = !usePerEyeCameras;
_leftEyeCamera.enabled = usePerEyeCameras;
_rightEyeCamera.enabled = (usePerEyeCameras && (!monoscopic || OVRPlugin.EyeTextureArrayEnabled));
}
protected virtual Transform ConfigureAnchor(Transform root, string name)
{
Transform anchor = (root != null) ? transform.Find(root.name + "/" + name) : null;
if (anchor == null)
{
anchor = transform.Find(name);
}
if (anchor == null)
{
anchor = new GameObject(name).transform;
}
anchor.name = name;
anchor.parent = (root != null) ? root : transform;
anchor.localScale = Vector3.one;
anchor.localPosition = Vector3.zero;
anchor.localRotation = Quaternion.identity;
return anchor;
}
public virtual Matrix4x4 ComputeTrackReferenceMatrix()
{
if (centerEyeAnchor == null)
{
Debug.LogError("centerEyeAnchor is required");
return Matrix4x4.identity;
}
// The ideal approach would be using UnityEngine.VR.VRNode.TrackingReference, then we would not have to depend on the OVRCameraRig. Unfortunately, it is not available in Unity 5.4.3
OVRPose headPose;
#if UNITY_2017_2_OR_NEWER
headPose.position = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.Head);
headPose.orientation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.Head);
#else
headPose.position = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.Head);
headPose.orientation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.Head);
#endif
OVRPose invHeadPose = headPose.Inverse();
Matrix4x4 invHeadMatrix = Matrix4x4.TRS(invHeadPose.position, invHeadPose.orientation, Vector3.one);
Matrix4x4 ret = centerEyeAnchor.localToWorldMatrix * invHeadMatrix;
return ret;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df9f338034892c44ebb62d97894772f1
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,380 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
/// <summary>
/// Miscellaneous extension methods that any script can use.
/// </summary>
public static class OVRExtensions
{
/// <summary>
/// Converts the given world-space transform to an OVRPose in tracking space.
/// </summary>
public static OVRPose ToTrackingSpacePose(this Transform transform, Camera camera)
{
OVRPose headPose;
#if UNITY_2017_2_OR_NEWER
headPose.position = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.Head);
headPose.orientation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.Head);
#else
headPose.position = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.Head);
headPose.orientation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.Head);
#endif
var ret = headPose * transform.ToHeadSpacePose(camera);
return ret;
}
/// <summary>
/// Converts the given pose from tracking-space to world-space.
/// </summary>
public static OVRPose ToWorldSpacePose(OVRPose trackingSpacePose)
{
OVRPose headPose;
#if UNITY_2017_2_OR_NEWER
headPose.position = UnityEngine.XR.InputTracking.GetLocalPosition(UnityEngine.XR.XRNode.Head);
headPose.orientation = UnityEngine.XR.InputTracking.GetLocalRotation(UnityEngine.XR.XRNode.Head);
#else
headPose.position = UnityEngine.VR.InputTracking.GetLocalPosition(UnityEngine.VR.VRNode.Head);
headPose.orientation = UnityEngine.VR.InputTracking.GetLocalRotation(UnityEngine.VR.VRNode.Head);
#endif
// Transform from tracking-Space to head-Space
OVRPose poseInHeadSpace = headPose.Inverse() * trackingSpacePose;
// Transform from head space to world space
OVRPose ret = Camera.main.transform.ToOVRPose() * poseInHeadSpace;
return ret;
}
/// <summary>
/// Converts the given world-space transform to an OVRPose in head space.
/// </summary>
public static OVRPose ToHeadSpacePose(this Transform transform, Camera camera)
{
return camera.transform.ToOVRPose().Inverse() * transform.ToOVRPose();
}
internal static OVRPose ToOVRPose(this Transform t, bool isLocal = false)
{
OVRPose pose;
pose.orientation = (isLocal) ? t.localRotation : t.rotation;
pose.position = (isLocal) ? t.localPosition : t.position;
return pose;
}
internal static void FromOVRPose(this Transform t, OVRPose pose, bool isLocal = false)
{
if (isLocal)
{
t.localRotation = pose.orientation;
t.localPosition = pose.position;
}
else
{
t.rotation = pose.orientation;
t.position = pose.position;
}
}
internal static OVRPose ToOVRPose(this OVRPlugin.Posef p)
{
return new OVRPose()
{
position = new Vector3(p.Position.x, p.Position.y, -p.Position.z),
orientation = new Quaternion(-p.Orientation.x, -p.Orientation.y, p.Orientation.z, p.Orientation.w)
};
}
internal static OVRTracker.Frustum ToFrustum(this OVRPlugin.Frustumf f)
{
return new OVRTracker.Frustum()
{
nearZ = f.zNear,
farZ = f.zFar,
fov = new Vector2()
{
x = Mathf.Rad2Deg * f.fovX,
y = Mathf.Rad2Deg * f.fovY
}
};
}
internal static Color FromColorf(this OVRPlugin.Colorf c)
{
return new Color() { r = c.r, g = c.g, b = c.b, a = c.a };
}
internal static OVRPlugin.Colorf ToColorf(this Color c)
{
return new OVRPlugin.Colorf() { r = c.r, g = c.g, b = c.b, a = c.a };
}
internal static Vector3 FromVector3f(this OVRPlugin.Vector3f v)
{
return new Vector3() { x = v.x, y = v.y, z = v.z };
}
internal static Vector3 FromFlippedZVector3f(this OVRPlugin.Vector3f v)
{
return new Vector3() { x = v.x, y = v.y, z = -v.z };
}
internal static OVRPlugin.Vector3f ToVector3f(this Vector3 v)
{
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = v.z };
}
internal static OVRPlugin.Vector3f ToFlippedZVector3f(this Vector3 v)
{
return new OVRPlugin.Vector3f() { x = v.x, y = v.y, z = -v.z };
}
internal static Quaternion FromQuatf(this OVRPlugin.Quatf q)
{
return new Quaternion() { x = q.x, y = q.y, z = q.z, w = q.w };
}
internal static Quaternion FromFlippedZQuatf(this OVRPlugin.Quatf q)
{
return new Quaternion() { x = -q.x, y = -q.y, z = q.z, w = q.w };
}
internal static OVRPlugin.Quatf ToQuatf(this Quaternion q)
{
return new OVRPlugin.Quatf() { x = q.x, y = q.y, z = q.z, w = q.w };
}
internal static OVRPlugin.Quatf ToFlippedZQuatf(this Quaternion q)
{
return new OVRPlugin.Quatf() { x = -q.x, y = -q.y, z = q.z, w = q.w };
}
}
/// <summary>
/// An affine transformation built from a Unity position and orientation.
/// </summary>
[System.Serializable]
public struct OVRPose
{
/// <summary>
/// A pose with no translation or rotation.
/// </summary>
public static OVRPose identity
{
get {
return new OVRPose()
{
position = Vector3.zero,
orientation = Quaternion.identity
};
}
}
public override bool Equals(System.Object obj)
{
return obj is OVRPose && this == (OVRPose)obj;
}
public override int GetHashCode()
{
return position.GetHashCode() ^ orientation.GetHashCode();
}
public static bool operator ==(OVRPose x, OVRPose y)
{
return x.position == y.position && x.orientation == y.orientation;
}
public static bool operator !=(OVRPose x, OVRPose y)
{
return !(x == y);
}
/// <summary>
/// The position.
/// </summary>
public Vector3 position;
/// <summary>
/// The orientation.
/// </summary>
public Quaternion orientation;
/// <summary>
/// Multiplies two poses.
/// </summary>
public static OVRPose operator*(OVRPose lhs, OVRPose rhs)
{
var ret = new OVRPose();
ret.position = lhs.position + lhs.orientation * rhs.position;
ret.orientation = lhs.orientation * rhs.orientation;
return ret;
}
/// <summary>
/// Computes the inverse of the given pose.
/// </summary>
public OVRPose Inverse()
{
OVRPose ret;
ret.orientation = Quaternion.Inverse(orientation);
ret.position = ret.orientation * -position;
return ret;
}
/// <summary>
/// Converts the pose from left- to right-handed or vice-versa.
/// </summary>
internal OVRPose flipZ()
{
var ret = this;
ret.position.z = -ret.position.z;
ret.orientation.z = -ret.orientation.z;
ret.orientation.w = -ret.orientation.w;
return ret;
}
internal OVRPlugin.Posef ToPosef()
{
return new OVRPlugin.Posef()
{
Position = position.ToVector3f(),
Orientation = orientation.ToQuatf()
};
}
}
/// <summary>
/// Encapsulates an 8-byte-aligned of unmanaged memory.
/// </summary>
public class OVRNativeBuffer : IDisposable
{
private bool disposed = false;
private int m_numBytes = 0;
private IntPtr m_ptr = IntPtr.Zero;
/// <summary>
/// Creates a buffer of the specified size.
/// </summary>
public OVRNativeBuffer(int numBytes)
{
Reallocate(numBytes);
}
/// <summary>
/// Releases unmanaged resources and performs other cleanup operations before the <see cref="OVRNativeBuffer"/> is
/// reclaimed by garbage collection.
/// </summary>
~OVRNativeBuffer()
{
Dispose(false);
}
/// <summary>
/// Reallocates the buffer with the specified new size.
/// </summary>
public void Reset(int numBytes)
{
Reallocate(numBytes);
}
/// <summary>
/// The current number of bytes in the buffer.
/// </summary>
public int GetCapacity()
{
return m_numBytes;
}
/// <summary>
/// A pointer to the unmanaged memory in the buffer, starting at the given offset in bytes.
/// </summary>
public IntPtr GetPointer(int byteOffset = 0)
{
if (byteOffset < 0 || byteOffset >= m_numBytes)
return IntPtr.Zero;
return (byteOffset == 0) ? m_ptr : new IntPtr(m_ptr.ToInt64() + byteOffset);
}
/// <summary>
/// Releases all resource used by the <see cref="OVRNativeBuffer"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="OVRNativeBuffer"/>. The <see cref="Dispose"/>
/// method leaves the <see cref="OVRNativeBuffer"/> in an unusable state. After calling <see cref="Dispose"/>, you must
/// release all references to the <see cref="OVRNativeBuffer"/> so the garbage collector can reclaim the memory that
/// the <see cref="OVRNativeBuffer"/> was occupying.</remarks>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private void Dispose(bool disposing)
{
if (disposed)
return;
if (disposing)
{
// dispose managed resources
}
// dispose unmanaged resources
Release();
disposed = true;
}
private void Reallocate(int numBytes)
{
Release();
if (numBytes > 0)
{
m_ptr = Marshal.AllocHGlobal(numBytes);
m_numBytes = numBytes;
}
else
{
m_ptr = IntPtr.Zero;
m_numBytes = 0;
}
}
private void Release()
{
if (m_ptr != IntPtr.Zero)
{
Marshal.FreeHGlobal(m_ptr);
m_ptr = IntPtr.Zero;
m_numBytes = 0;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 176f8d665b1d78048b1e87956698df6b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,124 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// This is a simple behavior that can be attached to a parent of the CameraRig in order
/// to provide movement via the gamepad. This is useful when testing an application in
/// the Unity editor without the HMD.
/// To use it, create a game object in your scene and drag your CameraRig to be a child
/// of the game object. Then, add the OVRDebugHeadController behavior to the game object.
/// Alternatively, this behavior can be placed directly on the OVRCameraRig object, but
/// that is not guaranteed to work if OVRCameraRig functionality changes in the future.
/// In the parent case, the object with OVRDebugHeadController can be thougt of as a
/// platform that your camera is attached to. When the platform moves or rotates, the
/// camera moves or rotates, but the camera can still move independently while "on" the
/// platform.
/// In general, this behavior should be disabled when not debugging.
/// </summary>
public class OVRDebugHeadController : MonoBehaviour
{
[SerializeField]
public bool AllowPitchLook = false;
[SerializeField]
public bool AllowYawLook = true;
[SerializeField]
public bool InvertPitch = false;
[SerializeField]
public float GamePad_PitchDegreesPerSec = 90.0f;
[SerializeField]
public float GamePad_YawDegreesPerSec = 90.0f;
[SerializeField]
public bool AllowMovement = false;
[SerializeField]
public float ForwardSpeed = 2.0f;
[SerializeField]
public float StrafeSpeed = 2.0f;
protected OVRCameraRig CameraRig = null;
void Awake()
{
// locate the camera rig so we can use it to get the current camera transform each frame
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if( CameraRigs.Length == 0 )
Debug.LogWarning("OVRCamParent: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRCamParent: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
}
// Use this for initialization
void Start ()
{
}
// Update is called once per frame
void Update ()
{
if ( AllowMovement )
{
float gamePad_FwdAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).y;
float gamePad_StrafeAxis = OVRInput.Get(OVRInput.RawAxis2D.LThumbstick).x;
Vector3 fwdMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.forward ) * gamePad_FwdAxis * Time.deltaTime * ForwardSpeed;
Vector3 strafeMove = ( CameraRig.centerEyeAnchor.rotation * Vector3.right ) * gamePad_StrafeAxis * Time.deltaTime * StrafeSpeed;
transform.position += fwdMove + strafeMove;
}
#if UNITY_2017_2_OR_NEWER
if ( !UnityEngine.XR.XRDevice.isPresent && ( AllowYawLook || AllowPitchLook ) )
#else
if ( !UnityEngine.VR.VRDevice.isPresent && ( AllowYawLook || AllowPitchLook ) )
#endif
{
Quaternion r = transform.rotation;
if ( AllowYawLook )
{
float gamePadYaw = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).x;
float yawAmount = gamePadYaw * Time.deltaTime * GamePad_YawDegreesPerSec;
Quaternion yawRot = Quaternion.AngleAxis( yawAmount, Vector3.up );
r = yawRot * r;
}
if ( AllowPitchLook )
{
float gamePadPitch = OVRInput.Get(OVRInput.RawAxis2D.RThumbstick).y;
if ( Mathf.Abs( gamePadPitch ) > 0.0001f )
{
if ( InvertPitch )
{
gamePadPitch *= -1.0f;
}
float pitchAmount = gamePadPitch * Time.deltaTime * GamePad_PitchDegreesPerSec;
Quaternion pitchRot = Quaternion.AngleAxis( pitchAmount, Vector3.left );
r = r * pitchRot;
}
}
transform.rotation = r;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 563681618daa71b4c89f979b1fd7170b
timeCreated: 1433450365
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,355 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using UnityEngine;
/// <summary>
/// Manages an Oculus Rift head-mounted display (HMD).
/// </summary>
public class OVRDisplay
{
/// <summary>
/// Contains full fov information per eye
/// Under Symmetric Fov mode, UpFov == DownFov and LeftFov == RightFov.
/// </summary>
public struct EyeFov
{
public float UpFov;
public float DownFov;
public float LeftFov;
public float RightFov;
}
/// <summary>
/// Specifies the size and field-of-view for one eye texture.
/// </summary>
public struct EyeRenderDesc
{
/// <summary>
/// The horizontal and vertical size of the texture.
/// </summary>
public Vector2 resolution;
/// <summary>
/// The angle of the horizontal and vertical field of view in degrees.
/// For Symmetric FOV interface compatibility
/// Note this includes the fov angle from both sides
/// </summary>
public Vector2 fov;
/// <summary>
/// The full information of field of view in degrees.
/// When Asymmetric FOV isn't enabled, this returns the maximum fov angle
/// </summary>
public EyeFov fullFov;
}
/// <summary>
/// Contains latency measurements for a single frame of rendering.
/// </summary>
public struct LatencyData
{
/// <summary>
/// The time it took to render both eyes in seconds.
/// </summary>
public float render;
/// <summary>
/// The time it took to perform TimeWarp in seconds.
/// </summary>
public float timeWarp;
/// <summary>
/// The time between the end of TimeWarp and scan-out in seconds.
/// </summary>
public float postPresent;
public float renderError;
public float timeWarpError;
}
private bool needsConfigureTexture;
private EyeRenderDesc[] eyeDescs = new EyeRenderDesc[2];
private bool recenterRequested = false;
private int recenterRequestedFrameCount = int.MaxValue;
/// <summary>
/// Creates an instance of OVRDisplay. Called by OVRManager.
/// </summary>
public OVRDisplay()
{
UpdateTextures();
}
/// <summary>
/// Updates the internal state of the OVRDisplay. Called by OVRManager.
/// </summary>
public void Update()
{
UpdateTextures();
if (recenterRequested && Time.frameCount > recenterRequestedFrameCount)
{
if (RecenteredPose != null)
{
RecenteredPose();
}
recenterRequested = false;
recenterRequestedFrameCount = int.MaxValue;
}
}
/// <summary>
/// Occurs when the head pose is reset.
/// </summary>
public event System.Action RecenteredPose;
/// <summary>
/// Recenters the head pose.
/// </summary>
public void RecenterPose()
{
#if UNITY_2017_2_OR_NEWER
UnityEngine.XR.InputTracking.Recenter();
#else
UnityEngine.VR.InputTracking.Recenter();
#endif
// The current poses are cached for the current frame and won't be updated immediately
// after UnityEngine.VR.InputTracking.Recenter(). So we need to wait until next frame
// to trigger the RecenteredPose delegate. The application could expect the correct pose
// when the RecenteredPose delegate get called.
recenterRequested = true;
recenterRequestedFrameCount = Time.frameCount;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
OVRMixedReality.RecenterPose();
#endif
}
/// <summary>
/// Gets the current linear acceleration of the head.
/// </summary>
public Vector3 acceleration
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAcceleration(OVRPlugin.Node.Head, OVRPlugin.Step.Render).FromFlippedZVector3f();
}
}
/// <summary>
/// Gets the current angular acceleration of the head.
/// </summary>
public Vector3 angularAcceleration
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAngularAcceleration(OVRPlugin.Node.Head, OVRPlugin.Step.Render).FromFlippedZVector3f() * Mathf.Rad2Deg;
}
}
/// <summary>
/// Gets the current linear velocity of the head.
/// </summary>
public Vector3 velocity
{
get
{
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeVelocity(OVRPlugin.Node.Head, OVRPlugin.Step.Render).FromFlippedZVector3f();
}
}
/// <summary>
/// Gets the current angular velocity of the head.
/// </summary>
public Vector3 angularVelocity
{
get {
if (!OVRManager.isHmdPresent)
return Vector3.zero;
return OVRPlugin.GetNodeAngularVelocity(OVRPlugin.Node.Head, OVRPlugin.Step.Render).FromFlippedZVector3f() * Mathf.Rad2Deg;
}
}
/// <summary>
/// Gets the resolution and field of view for the given eye.
/// </summary>
#if UNITY_2017_2_OR_NEWER
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.XR.XRNode eye)
#else
public EyeRenderDesc GetEyeRenderDesc(UnityEngine.VR.VRNode eye)
#endif
{
return eyeDescs[(int)eye];
}
/// <summary>
/// Gets the current measured latency values.
/// </summary>
public LatencyData latency
{
get {
if (!OVRManager.isHmdPresent)
return new LatencyData();
string latency = OVRPlugin.latency;
var r = new Regex("Render: ([0-9]+[.][0-9]+)ms, TimeWarp: ([0-9]+[.][0-9]+)ms, PostPresent: ([0-9]+[.][0-9]+)ms", RegexOptions.None);
var ret = new LatencyData();
Match match = r.Match(latency);
if (match.Success)
{
ret.render = float.Parse(match.Groups[1].Value);
ret.timeWarp = float.Parse(match.Groups[2].Value);
ret.postPresent = float.Parse(match.Groups[3].Value);
}
return ret;
}
}
/// <summary>
/// Gets application's frame rate reported by oculus plugin
/// </summary>
public float appFramerate
{
get
{
if (!OVRManager.isHmdPresent)
return 0;
return OVRPlugin.GetAppFramerate();
}
}
/// <summary>
/// Gets the recommended MSAA level for optimal quality/performance the current device.
/// </summary>
public int recommendedMSAALevel
{
get
{
int result = OVRPlugin.recommendedMSAALevel;
if (result == 1)
result = 0;
return result;
}
}
/// <summary>
/// Gets the list of available display frequencies supported by this hardware.
/// </summary>
public float[] displayFrequenciesAvailable
{
get { return OVRPlugin.systemDisplayFrequenciesAvailable; }
}
/// <summary>
/// Gets and sets the current display frequency.
/// </summary>
public float displayFrequency
{
get
{
return OVRPlugin.systemDisplayFrequency;
}
set
{
OVRPlugin.systemDisplayFrequency = value;
}
}
private void UpdateTextures()
{
#if UNITY_2017_2_OR_NEWER
ConfigureEyeDesc(UnityEngine.XR.XRNode.LeftEye);
ConfigureEyeDesc(UnityEngine.XR.XRNode.RightEye);
#else
ConfigureEyeDesc(UnityEngine.VR.VRNode.LeftEye);
ConfigureEyeDesc(UnityEngine.VR.VRNode.RightEye);
#endif
}
#if UNITY_2017_2_OR_NEWER
private void ConfigureEyeDesc(UnityEngine.XR.XRNode eye)
#else
private void ConfigureEyeDesc(UnityEngine.VR.VRNode eye)
#endif
{
if (!OVRManager.isHmdPresent)
return;
OVRPlugin.Sizei size = OVRPlugin.GetEyeTextureSize((OVRPlugin.Eye)eye);
eyeDescs[(int)eye] = new EyeRenderDesc();
eyeDescs[(int)eye].resolution = new Vector2(size.w, size.h);
OVRPlugin.Frustumf2 frust;
if (OVRPlugin.GetNodeFrustum2((OVRPlugin.Node)eye, out frust))
{
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.LeftTan);
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.RightTan);
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.UpTan);
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * Mathf.Atan(frust.Fov.DownTan);
}
else
{
OVRPlugin.Frustumf frustOld = OVRPlugin.GetEyeFrustum((OVRPlugin.Eye)eye);
eyeDescs[(int)eye].fullFov.LeftFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.RightFov = Mathf.Rad2Deg * frustOld.fovX * 0.5f;
eyeDescs[(int)eye].fullFov.UpFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
eyeDescs[(int)eye].fullFov.DownFov = Mathf.Rad2Deg * frustOld.fovY * 0.5f;
}
// Symmetric Fov uses the maximum fov angle
float maxFovX = Mathf.Max(eyeDescs[(int)eye].fullFov.LeftFov, eyeDescs[(int)eye].fullFov.RightFov);
float maxFovY = Mathf.Max(eyeDescs[(int)eye].fullFov.UpFov, eyeDescs[(int)eye].fullFov.DownFov);
eyeDescs[(int)eye].fov.x = maxFovX * 2.0f;
eyeDescs[(int)eye].fov.y = maxFovY * 2.0f;
if (!OVRPlugin.AsymmetricFovEnabled)
{
eyeDescs[(int)eye].fullFov.LeftFov = maxFovX;
eyeDescs[(int)eye].fullFov.RightFov = maxFovX;
eyeDescs[(int)eye].fullFov.UpFov = maxFovY;
eyeDescs[(int)eye].fullFov.DownFov = maxFovY;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: bb365ebe8e821fc4e81e9dca9d704357
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,373 @@
using UnityEngine;
using System;
using System.IO;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
/// <summary>
/// Plays tactile effects on a tracked VR controller.
/// </summary>
public static class OVRHaptics
{
public readonly static OVRHapticsChannel[] Channels;
public readonly static OVRHapticsChannel LeftChannel;
public readonly static OVRHapticsChannel RightChannel;
private readonly static OVRHapticsOutput[] m_outputs;
static OVRHaptics()
{
Config.Load();
m_outputs = new OVRHapticsOutput[]
{
new OVRHapticsOutput((uint)OVRPlugin.Controller.LTouch),
new OVRHapticsOutput((uint)OVRPlugin.Controller.RTouch),
};
Channels = new OVRHapticsChannel[]
{
LeftChannel = new OVRHapticsChannel(0),
RightChannel = new OVRHapticsChannel(1),
};
}
/// <summary>
/// Determines the target format for haptics data on a specific device.
/// </summary>
public static class Config
{
public static int SampleRateHz { get; private set; }
public static int SampleSizeInBytes { get; private set; }
public static int MinimumSafeSamplesQueued { get; private set; }
public static int MinimumBufferSamplesCount { get; private set; }
public static int OptimalBufferSamplesCount { get; private set; }
public static int MaximumBufferSamplesCount { get; private set; }
static Config()
{
Load();
}
public static void Load()
{
OVRPlugin.HapticsDesc desc = OVRPlugin.GetControllerHapticsDesc((uint)OVRPlugin.Controller.RTouch);
SampleRateHz = desc.SampleRateHz;
SampleSizeInBytes = desc.SampleSizeInBytes;
MinimumSafeSamplesQueued = desc.MinimumSafeSamplesQueued;
MinimumBufferSamplesCount = desc.MinimumBufferSamplesCount;
OptimalBufferSamplesCount = desc.OptimalBufferSamplesCount;
MaximumBufferSamplesCount = desc.MaximumBufferSamplesCount;
}
}
/// <summary>
/// A track of haptics data that can be mixed or sequenced with another track.
/// </summary>
public class OVRHapticsChannel
{
private OVRHapticsOutput m_output;
/// <summary>
/// Constructs a channel targeting the specified output.
/// </summary>
public OVRHapticsChannel(uint outputIndex)
{
m_output = m_outputs[outputIndex];
}
/// <summary>
/// Cancels any currently-playing clips and immediatly plays the specified clip instead.
/// </summary>
public void Preempt(OVRHapticsClip clip)
{
m_output.Preempt(clip);
}
/// <summary>
/// Enqueues the specified clip to play after any currently-playing clips finish.
/// </summary>
public void Queue(OVRHapticsClip clip)
{
m_output.Queue(clip);
}
/// <summary>
/// Adds the specified clip to play simultaneously to the currently-playing clip(s).
/// </summary>
public void Mix(OVRHapticsClip clip)
{
m_output.Mix(clip);
}
/// <summary>
/// Cancels any currently-playing clips.
/// </summary>
public void Clear()
{
m_output.Clear();
}
}
private class OVRHapticsOutput
{
private class ClipPlaybackTracker
{
public int ReadCount { get; set; }
public OVRHapticsClip Clip { get; set; }
public ClipPlaybackTracker(OVRHapticsClip clip)
{
Clip = clip;
}
}
private bool m_lowLatencyMode = true;
private bool m_paddingEnabled = true;
private int m_prevSamplesQueued = 0;
private float m_prevSamplesQueuedTime = 0;
private int m_numPredictionHits = 0;
private int m_numPredictionMisses = 0;
private int m_numUnderruns = 0;
private List<ClipPlaybackTracker> m_pendingClips = new List<ClipPlaybackTracker>();
private uint m_controller = 0;
private OVRNativeBuffer m_nativeBuffer = new OVRNativeBuffer(OVRHaptics.Config.MaximumBufferSamplesCount * OVRHaptics.Config.SampleSizeInBytes);
private OVRHapticsClip m_paddingClip = new OVRHapticsClip();
public OVRHapticsOutput(uint controller)
{
#if UNITY_ANDROID
m_paddingEnabled = false;
#endif
m_controller = controller;
}
/// <summary>
/// The system calls this each frame to update haptics playback.
/// </summary>
public void Process()
{
var hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
float elapsedTime = Time.realtimeSinceStartup - m_prevSamplesQueuedTime;
if (m_prevSamplesQueued > 0)
{
int expectedSamples = m_prevSamplesQueued - (int)(elapsedTime * OVRHaptics.Config.SampleRateHz + 0.5f);
if (expectedSamples < 0)
expectedSamples = 0;
if ((hapticsState.SamplesQueued - expectedSamples) == 0)
m_numPredictionHits++;
else
m_numPredictionMisses++;
//Debug.Log(hapticsState.SamplesAvailable + "a " + hapticsState.SamplesQueued + "q " + expectedSamples + "e "
//+ "Prediction Accuracy: " + m_numPredictionHits / (float)(m_numPredictionMisses + m_numPredictionHits));
if ((expectedSamples > 0) && (hapticsState.SamplesQueued == 0))
{
m_numUnderruns++;
//Debug.LogError("Samples Underrun (" + m_controller + " #" + m_numUnderruns + ") -"
// + " Expected: " + expectedSamples
// + " Actual: " + hapticsState.SamplesQueued);
}
m_prevSamplesQueued = hapticsState.SamplesQueued;
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
}
int desiredSamplesCount = OVRHaptics.Config.OptimalBufferSamplesCount;
if (m_lowLatencyMode)
{
float sampleRateMs = 1000.0f / (float)OVRHaptics.Config.SampleRateHz;
float elapsedMs = elapsedTime * 1000.0f;
int samplesNeededPerFrame = (int)Mathf.Ceil(elapsedMs / sampleRateMs);
int lowLatencySamplesCount = OVRHaptics.Config.MinimumSafeSamplesQueued + samplesNeededPerFrame;
if (lowLatencySamplesCount < desiredSamplesCount)
desiredSamplesCount = lowLatencySamplesCount;
}
if (hapticsState.SamplesQueued > desiredSamplesCount)
return;
if (desiredSamplesCount > OVRHaptics.Config.MaximumBufferSamplesCount)
desiredSamplesCount = OVRHaptics.Config.MaximumBufferSamplesCount;
if (desiredSamplesCount > hapticsState.SamplesAvailable)
desiredSamplesCount = hapticsState.SamplesAvailable;
int acquiredSamplesCount = 0;
int clipIndex = 0;
while(acquiredSamplesCount < desiredSamplesCount && clipIndex < m_pendingClips.Count)
{
int numSamplesToCopy = desiredSamplesCount - acquiredSamplesCount;
int remainingSamplesInClip = m_pendingClips[clipIndex].Clip.Count - m_pendingClips[clipIndex].ReadCount;
if (numSamplesToCopy > remainingSamplesInClip)
numSamplesToCopy = remainingSamplesInClip;
if (numSamplesToCopy > 0)
{
int numBytes = numSamplesToCopy * OVRHaptics.Config.SampleSizeInBytes;
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
int srcOffset = m_pendingClips[clipIndex].ReadCount * OVRHaptics.Config.SampleSizeInBytes;
Marshal.Copy(m_pendingClips[clipIndex].Clip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
m_pendingClips[clipIndex].ReadCount += numSamplesToCopy;
acquiredSamplesCount += numSamplesToCopy;
}
clipIndex++;
}
for (int i = m_pendingClips.Count - 1; i >= 0 && m_pendingClips.Count > 0; i--)
{
if (m_pendingClips[i].ReadCount >= m_pendingClips[i].Clip.Count)
m_pendingClips.RemoveAt(i);
}
if (m_paddingEnabled)
{
int desiredPadding = desiredSamplesCount - (hapticsState.SamplesQueued + acquiredSamplesCount);
if (desiredPadding < (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount))
desiredPadding = (OVRHaptics.Config.MinimumBufferSamplesCount - acquiredSamplesCount);
if (desiredPadding > hapticsState.SamplesAvailable)
desiredPadding = hapticsState.SamplesAvailable;
if (desiredPadding > 0)
{
int numBytes = desiredPadding * OVRHaptics.Config.SampleSizeInBytes;
int dstOffset = acquiredSamplesCount * OVRHaptics.Config.SampleSizeInBytes;
int srcOffset = 0;
Marshal.Copy(m_paddingClip.Samples, srcOffset, m_nativeBuffer.GetPointer(dstOffset), numBytes);
acquiredSamplesCount += desiredPadding;
}
}
if (acquiredSamplesCount > 0)
{
OVRPlugin.HapticsBuffer hapticsBuffer;
hapticsBuffer.Samples = m_nativeBuffer.GetPointer();
hapticsBuffer.SamplesCount = acquiredSamplesCount;
OVRPlugin.SetControllerHaptics(m_controller, hapticsBuffer);
hapticsState = OVRPlugin.GetControllerHapticsState(m_controller);
m_prevSamplesQueued = hapticsState.SamplesQueued;
m_prevSamplesQueuedTime = Time.realtimeSinceStartup;
}
}
/// <summary>
/// Immediately plays the specified clip without waiting for any currently-playing clip to finish.
/// </summary>
public void Preempt(OVRHapticsClip clip)
{
m_pendingClips.Clear();
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
/// <summary>
/// Enqueues the specified clip to play after any currently-playing clip finishes.
/// </summary>
public void Queue(OVRHapticsClip clip)
{
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
/// <summary>
/// Adds the samples from the specified clip to the ones in the currently-playing clip(s).
/// </summary>
public void Mix(OVRHapticsClip clip)
{
int numClipsToMix = 0;
int numSamplesToMix = 0;
int numSamplesRemaining = clip.Count;
while (numSamplesRemaining > 0 && numClipsToMix < m_pendingClips.Count)
{
int numSamplesRemainingInClip = m_pendingClips[numClipsToMix].Clip.Count - m_pendingClips[numClipsToMix].ReadCount;
numSamplesRemaining -= numSamplesRemainingInClip;
numSamplesToMix += numSamplesRemainingInClip;
numClipsToMix++;
}
if (numSamplesRemaining > 0)
{
numSamplesToMix += numSamplesRemaining;
numSamplesRemaining = 0;
}
if (numClipsToMix > 0)
{
OVRHapticsClip mixClip = new OVRHapticsClip(numSamplesToMix);
OVRHapticsClip a = clip;
int aReadCount = 0;
for (int i = 0; i < numClipsToMix; i++)
{
OVRHapticsClip b = m_pendingClips[i].Clip;
for(int bReadCount = m_pendingClips[i].ReadCount; bReadCount < b.Count; bReadCount++)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
byte sample = 0; // TODO support multi-byte samples
if ((aReadCount < a.Count) && (bReadCount < b.Count))
{
sample = (byte)(Mathf.Clamp(a.Samples[aReadCount] + b.Samples[bReadCount], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
aReadCount++;
}
else if (bReadCount < b.Count)
{
sample = b.Samples[bReadCount]; // TODO support multi-byte samples
}
mixClip.WriteSample(sample); // TODO support multi-byte samples
}
}
}
while (aReadCount < a.Count)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
mixClip.WriteSample(a.Samples[aReadCount]); // TODO support multi-byte samples
}
aReadCount++;
}
m_pendingClips[0] = new ClipPlaybackTracker(mixClip);
for (int i = 1; i < numClipsToMix; i++)
{
m_pendingClips.RemoveAt(1);
}
}
else
{
m_pendingClips.Add(new ClipPlaybackTracker(clip));
}
}
public void Clear()
{
m_pendingClips.Clear();
}
}
/// <summary>
/// The system calls this each frame to update haptics playback.
/// </summary>
public static void Process()
{
Config.Load();
for (int i = 0; i < m_outputs.Length; i++)
{
m_outputs[i].Process();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d3b22b858e27329498781f145fa42610
timeCreated: 1463018541
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,149 @@
using UnityEngine;
using System.Collections;
/// <summary>
/// A PCM buffer of data for a haptics effect.
/// </summary>
public class OVRHapticsClip
{
/// <summary>
/// The current number of samples in the clip.
/// </summary>
public int Count { get; private set; }
/// <summary>
/// The maximum number of samples the clip can store.
/// </summary>
public int Capacity { get; private set; }
/// <summary>
/// The raw haptics data.
/// </summary>
public byte[] Samples { get; private set; }
public OVRHapticsClip()
{
Capacity = OVRHaptics.Config.MaximumBufferSamplesCount;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
}
/// <summary>
/// Creates a clip with the specified capacity.
/// </summary>
public OVRHapticsClip(int capacity)
{
Capacity = (capacity >= 0) ? capacity : 0;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
}
/// <summary>
/// Creates a clip with the specified data.
/// </summary>
public OVRHapticsClip(byte[] samples, int samplesCount)
{
Samples = samples;
Capacity = Samples.Length / OVRHaptics.Config.SampleSizeInBytes;
Count = (samplesCount >= 0) ? samplesCount : 0;
}
/// <summary>
/// Creates a clip by mixing the specified clips.
/// </summary>
public OVRHapticsClip(OVRHapticsClip a, OVRHapticsClip b)
{
int maxCount = a.Count;
if (b.Count > maxCount)
maxCount = b.Count;
Capacity = maxCount;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
for (int i = 0; i < a.Count || i < b.Count; i++)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
byte sample = 0; // TODO support multi-byte samples
if ((i < a.Count) && (i < b.Count))
sample = (byte)(Mathf.Clamp(a.Samples[i] + b.Samples[i], 0, System.Byte.MaxValue)); // TODO support multi-byte samples
else if (i < a.Count)
sample = a.Samples[i]; // TODO support multi-byte samples
else if (i < b.Count)
sample = b.Samples[i]; // TODO support multi-byte samples
WriteSample(sample); // TODO support multi-byte samples
}
}
}
/// <summary>
/// Creates a haptics clip from the specified audio clip.
/// </summary>
public OVRHapticsClip(AudioClip audioClip, int channel = 0)
{
float[] audioData = new float[audioClip.samples * audioClip.channels];
audioClip.GetData(audioData, 0);
InitializeFromAudioFloatTrack(audioData, audioClip.frequency, audioClip.channels, channel);
}
/// <summary>
/// Adds the specified sample to the end of the clip.
/// </summary>
public void WriteSample(byte sample) // TODO support multi-byte samples
{
if (Count >= Capacity)
{
//Debug.LogError("Attempted to write OVRHapticsClip sample out of range - Count:" + Count + " Capacity:" + Capacity);
return;
}
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
Samples[Count * OVRHaptics.Config.SampleSizeInBytes] = sample; // TODO support multi-byte samples
}
Count++;
}
/// <summary>
/// Clears the clip and resets its size to 0.
/// </summary>
public void Reset()
{
Count = 0;
}
private void InitializeFromAudioFloatTrack(float[] sourceData, double sourceFrequency, int sourceChannelCount, int sourceChannel)
{
double stepSizePrecise = (sourceFrequency + 1e-6) / OVRHaptics.Config.SampleRateHz;
if (stepSizePrecise < 1.0)
return;
int stepSize = (int)stepSizePrecise;
double stepSizeError = stepSizePrecise - stepSize;
double accumulatedStepSizeError = 0.0f;
int length = sourceData.Length;
Count = 0;
Capacity = length / sourceChannelCount / stepSize + 1;
Samples = new byte[Capacity * OVRHaptics.Config.SampleSizeInBytes];
int i = sourceChannel % sourceChannelCount;
while (i < length)
{
if (OVRHaptics.Config.SampleSizeInBytes == 1)
{
WriteSample((byte)(Mathf.Clamp01(Mathf.Abs(sourceData[i])) * System.Byte.MaxValue)); // TODO support multi-byte samples
}
i+= stepSize * sourceChannelCount;
accumulatedStepSizeError += stepSizeError;
if ((int)accumulatedStepSizeError > 0)
{
i+= (int)accumulatedStepSizeError * sourceChannelCount;
accumulatedStepSizeError = accumulatedStepSizeError - (int)accumulatedStepSizeError;
}
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: c123270a848515b458069b5242866451
timeCreated: 1467575852
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: d210caf8a50e1954c80690fa858572ad
timeCreated: 1438295094
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,7 @@
using UnityEngine;
/// <summary>
/// Dummy subtype of PropertyAttribute for custom inspector to use.
/// </summary>
public class OVRLayerAttribute : PropertyAttribute {
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 701bfebb60063334f994e36546c103d6
timeCreated: 1499749327
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,808 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using Assets.OVR.Scripts;
/// <summary>
///Scans the project and warns about the following conditions:
///Audio sources > 16
///Using MSAA levels other than recommended level
///Excessive pixel lights (>1 on Gear VR; >3 on Rift)
///Directional Lightmapping Modes (on Gear; use Non-Directional)
///Preload audio setting on individual audio clips
///Decompressing audio clips on load
///Disabling occlusion mesh
///Android target API level set to 21 or higher
///Unity skybox use (on by default, but if you can't see the skybox switching to Color is much faster on Gear)
///Lights marked as "baked" but that were not included in the last bake (and are therefore realtime).
///Lack of static batching and dynamic batching settings activated.
///Full screen image effects (Gear)
///Warn about large textures that are marked as uncompressed.
///32-bit depth buffer (use 16)
///Use of projectors (Gear; can be used carefully but slow enough to warrant a warning)
///Maybe in the future once quantified: Graphics jobs and IL2CPP on Gear.
///Real-time global illumination
///No texture compression, or non-ASTC texture compression as a global setting (Gear).
///Using deferred rendering
///Excessive texture resolution after LOD bias (>2k on Gear VR; >4k on Rift)
///Not using trilinear or aniso filtering and not generating mipmaps
///Excessive render scale (>1.2)
///Slow physics settings: Sleep Threshold < 0.005, Default Contact Offset < 0.01, Solver Iteration Count > 6
///Shadows on when approaching the geometry or draw call limits
///Non-static objects with colliders that are missing rigidbodies on themselves or in the parent chain.
///No initialization of GPU/CPU throttling settings, or init to dangerous values (-1 or > 3) (Gear)
///Using inefficient effects: SSAO, motion blur, global fog, parallax mapping, etc.
///Too many Overlay layers
///Use of Standard shader or Standard Specular shader on Gear. More generally, excessive use of multipass shaders (legacy specular, etc).
///Multiple cameras with clears (on Gear, potential for excessive fill cost)
///Excessive shader passes (>2)
///Material pointers that have been instanced in the editor (esp. if we could determine that the instance has no deltas from the original)
///Excessive draw calls (>150 on Gear VR; >2000 on Rift)
///Excessive tris or verts (>100k on Gear VR; >1M on Rift)
///Large textures, lots of prefabs in startup scene (for bootstrap optimization)
///GPU skinning: testing Android-only, as most Rift devs are GPU-bound.
/// </summary>
public class OVRLint : EditorWindow
{
//TODO: The following require reflection or static analysis.
///Use of ONSP reflections (Gear)
///Use of LoadLevelAsync / LoadLevelAdditiveAsync (on Gear, this kills frame rate so dramatically it's probably better to just go to black and load synchronously)
///Use of Linq in non-editor assemblies (common cause of GCs). Minor: use of foreach.
///Use of Unity WWW (exceptionally high overhead for large file downloads, but acceptable for tiny gets).
///Declared but empty Awake/Start/Update/OnCollisionEnter/OnCollisionExit/OnCollisionStay. Also OnCollision* star methods that declare the Collision argument but do not reference it (omitting it short-circuits the collision contact calculation).
private static List<FixRecord> mRecords = new List<FixRecord>();
private Vector2 mScrollPosition;
[MenuItem("Tools/Oculus/OVR Performance Lint Tool")]
static void Init()
{
// Get existing open window or if none, make a new one:
EditorWindow.GetWindow(typeof(OVRLint));
OVRLint.RunCheck();
}
void OnGUI()
{
GUILayout.Label("OVR Performance Lint Tool", EditorStyles.boldLabel);
if (GUILayout.Button("Refresh", EditorStyles.toolbarButton, GUILayout.ExpandWidth(false)))
{
RunCheck();
}
string lastCategory = "";
mScrollPosition = EditorGUILayout.BeginScrollView(mScrollPosition);
for (int x = 0; x < mRecords.Count; x++)
{
FixRecord record = mRecords[x];
if (!record.category.Equals(lastCategory)) // new category
{
lastCategory = record.category;
EditorGUILayout.Separator();
EditorGUILayout.BeginHorizontal();
GUILayout.Label(lastCategory, EditorStyles.label, GUILayout.Width(200));
bool moreThanOne = (x + 1 < mRecords.Count && mRecords[x + 1].category.Equals(lastCategory));
if (record.buttonNames != null && record.buttonNames.Length > 0)
{
if (moreThanOne)
{
GUILayout.Label("Apply to all:", EditorStyles.label, GUILayout.Width(75));
for (int y = 0; y < record.buttonNames.Length; y++)
{
if (GUILayout.Button(record.buttonNames[y], EditorStyles.toolbarButton, GUILayout.Width(200)))
{
List<FixRecord> recordsToProcess = new List<FixRecord>();
for (int z = x; z < mRecords.Count; z++)
{
FixRecord thisRecord = mRecords[z];
bool isLast = false;
if (z + 1 >= mRecords.Count || !mRecords[z + 1].category.Equals(lastCategory))
{
isLast = true;
}
if (!thisRecord.complete)
{
recordsToProcess.Add(thisRecord);
}
if (isLast)
{
break;
}
}
UnityEngine.Object[] undoObjects = new UnityEngine.Object[recordsToProcess.Count];
for (int z = 0; z < recordsToProcess.Count; z++)
{
undoObjects[z] = recordsToProcess[z].targetObject;
}
Undo.RecordObjects(undoObjects, record.category + " (Multiple)");
for (int z = 0; z < recordsToProcess.Count; z++)
{
FixRecord thisRecord = recordsToProcess[z];
thisRecord.fixMethod(thisRecord.targetObject, (z + 1 == recordsToProcess.Count), y);
thisRecord.complete = true;
}
}
}
}
}
EditorGUILayout.EndHorizontal();
if (moreThanOne || record.targetObject)
{
GUILayout.Label(record.message);
}
}
EditorGUILayout.BeginHorizontal();
GUI.enabled = !record.complete;
if (record.targetObject)
{
EditorGUILayout.ObjectField(record.targetObject, record.targetObject.GetType(), true);
}
else
{
GUILayout.Label(record.message);
}
if (record.buttonNames != null)
{
for (int y = 0; y < record.buttonNames.Length; y++)
{
if (GUILayout.Button(record.buttonNames[y], EditorStyles.toolbarButton, GUILayout.Width(200)))
{
if (record.targetObject != null)
{
Undo.RecordObject(record.targetObject, record.category);
}
record.fixMethod(record.targetObject, true, y);
record.complete = true;
}
}
}
GUI.enabled = true;
EditorGUILayout.EndHorizontal();
}
EditorGUILayout.EndScrollView();
}
static void RunCheck()
{
mRecords.Clear();
CheckStaticCommonIssues();
#if UNITY_ANDROID
CheckStaticAndroidIssues();
#endif
if (EditorApplication.isPlaying)
{
CheckRuntimeCommonIssues();
#if UNITY_ANDROID
CheckRuntimeAndroidIssues();
#endif
}
mRecords.Sort(delegate (FixRecord record1, FixRecord record2)
{
return record1.category.CompareTo(record2.category);
});
}
static void AddFix(string category, string message, FixMethodDelegate method, UnityEngine.Object target, params string[] buttons)
{
mRecords.Add(new FixRecord(category, message, method, target, buttons));
}
static void CheckStaticCommonIssues()
{
if (OVRManager.IsUnityAlphaOrBetaVersion())
{
AddFix("General", OVRManager.UnityAlphaOrBetaVersionWarningMessage, null, null);
}
if (QualitySettings.anisotropicFiltering != AnisotropicFiltering.Enable && QualitySettings.anisotropicFiltering != AnisotropicFiltering.ForceEnable)
{
AddFix("Optimize Aniso", "Anisotropic filtering is recommended for optimal image sharpness and GPU performance.", delegate (UnityEngine.Object obj, bool last, int selected)
{
// Ideally this would be multi-option: offer Enable or ForceEnable.
QualitySettings.anisotropicFiltering = AnisotropicFiltering.Enable;
}, null, "Fix");
}
#if UNITY_ANDROID
int recommendedPixelLightCount = 1;
#else
int recommendedPixelLightCount = 3;
#endif
if (QualitySettings.pixelLightCount > recommendedPixelLightCount)
{
AddFix("Optimize Pixel Light Count", "For GPU performance set no more than " + recommendedPixelLightCount + " pixel lights in Quality Settings (currently " + QualitySettings.pixelLightCount + ").", delegate (UnityEngine.Object obj, bool last, int selected)
{
QualitySettings.pixelLightCount = recommendedPixelLightCount;
}, null, "Fix");
}
#if false
// Should we recommend this? Seems to be mutually exclusive w/ dynamic batching.
if (!PlayerSettings.graphicsJobs)
{
AddFix ("Optimize Graphics Jobs", "For CPU performance, please use graphics jobs.", delegate(UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.graphicsJobs = true;
}, null, "Fix");
}
#endif
#if UNITY_2017_2_OR_NEWER
if ((!PlayerSettings.MTRendering || !PlayerSettings.GetMobileMTRendering(BuildTargetGroup.Android)))
#else
if ((!PlayerSettings.MTRendering || !PlayerSettings.mobileMTRendering))
#endif
{
AddFix("Optimize MT Rendering", "For CPU performance, please enable multithreaded rendering.", delegate (UnityEngine.Object obj, bool last, int selected)
{
#if UNITY_2017_2_OR_NEWER
PlayerSettings.SetMobileMTRendering(BuildTargetGroup.Standalone, true);
PlayerSettings.SetMobileMTRendering(BuildTargetGroup.Android, true);
#else
PlayerSettings.MTRendering = PlayerSettings.mobileMTRendering = true;
#endif
}, null, "Fix");
}
#if UNITY_ANDROID
if (!PlayerSettings.use32BitDisplayBuffer)
{
AddFix("Optimize Display Buffer Format", "We recommend to enable use32BitDisplayBuffer.", delegate (UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.use32BitDisplayBuffer = true;
}, null, "Fix");
}
#endif
BuildTargetGroup target = EditorUserBuildSettings.selectedBuildTargetGroup;
var tier = UnityEngine.Rendering.GraphicsTier.Tier1;
var tierSettings = UnityEditor.Rendering.EditorGraphicsSettings.GetTierSettings(target, tier);
if ((tierSettings.renderingPath == RenderingPath.DeferredShading ||
tierSettings.renderingPath == RenderingPath.DeferredLighting))
{
AddFix("Optimize Rendering Path", "For CPU performance, please do not use deferred shading.", delegate (UnityEngine.Object obj, bool last, int selected)
{
tierSettings.renderingPath = RenderingPath.Forward;
UnityEditor.Rendering.EditorGraphicsSettings.SetTierSettings(target, tier, tierSettings);
}, null, "Use Forward");
}
if (PlayerSettings.stereoRenderingPath == StereoRenderingPath.MultiPass)
{
AddFix("Optimize Stereo Rendering", "For CPU performance, please enable single-pass or instanced stereo rendering.", delegate (UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.stereoRenderingPath = StereoRenderingPath.Instancing;
}, null, "Fix");
}
if (LightmapSettings.lightmaps.Length > 0 && LightmapSettings.lightmapsMode != LightmapsMode.NonDirectional)
{
AddFix("Optimize Lightmap Directionality", "Switching from directional lightmaps to non-directional lightmaps can save a small amount of GPU time.", delegate (UnityEngine.Object obj, bool last, int selected)
{
LightmapSettings.lightmapsMode = LightmapsMode.NonDirectional;
}, null, "Switch to non-directional lightmaps");
}
if (Lightmapping.realtimeGI)
{
AddFix("Disable Realtime GI", "Disabling real-time global illumination can improve GPU performance.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Lightmapping.realtimeGI = false;
}, null, "Set Lightmapping.realtimeGI = false.");
}
var lights = GameObject.FindObjectsOfType<Light>();
for (int i = 0; i < lights.Length; ++i)
{
#if UNITY_2017_3_OR_NEWER
if (lights [i].type != LightType.Directional && !lights [i].bakingOutput.isBaked && IsLightBaked(lights[i]))
#else
if (lights[i].type != LightType.Directional && !lights[i].isBaked && IsLightBaked(lights[i]))
#endif
{
AddFix("Unbaked Lights", "The following lights in the scene are marked as Baked, but they don't have up to date lightmap data. Generate the lightmap data, or set it to auto-generate, in Window->Lighting->Settings.", null, lights[i], null);
}
if (lights[i].shadows != LightShadows.None && !IsLightBaked(lights[i]))
{
AddFix("Optimize Shadows", "For CPU performance, consider disabling shadows on realtime lights.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Light thisLight = (Light)obj;
thisLight.shadows = LightShadows.None;
}, lights[i], "Set \"Shadow Type\" to \"No Shadows\"");
}
}
var sources = GameObject.FindObjectsOfType<AudioSource>();
if (sources.Length > 16)
{
List<AudioSource> playingAudioSources = new List<AudioSource>();
foreach (var audioSource in sources)
{
if (audioSource.isPlaying)
{
playingAudioSources.Add(audioSource);
}
}
if (playingAudioSources.Count > 16)
{
// Sort playing audio sources by priority
playingAudioSources.Sort(delegate (AudioSource x, AudioSource y)
{
return x.priority.CompareTo(y.priority);
});
for (int i = 16; i < playingAudioSources.Count; ++i)
{
AddFix("Optimize Audio Source Count", "For CPU performance, please disable all but the top 16 AudioSources.", delegate (UnityEngine.Object obj, bool last, int selected)
{
AudioSource audioSource = (AudioSource)obj;
audioSource.enabled = false;
}, playingAudioSources[i], "Disable");
}
}
}
var clips = GameObject.FindObjectsOfType<AudioClip>();
for (int i = 0; i < clips.Length; ++i)
{
if (clips[i].loadType == AudioClipLoadType.DecompressOnLoad)
{
AddFix("Audio Loading", "For fast loading, please don't use decompress on load for audio clips", delegate (UnityEngine.Object obj, bool last, int selected)
{
AudioClip thisClip = (AudioClip)obj;
if (selected == 0)
{
SetAudioLoadType(thisClip, AudioClipLoadType.CompressedInMemory, last);
}
else
{
SetAudioLoadType(thisClip, AudioClipLoadType.Streaming, last);
}
}, clips[i], "Change to Compressed in Memory", "Change to Streaming");
}
if (clips[i].preloadAudioData)
{
AddFix("Audio Preload", "For fast loading, please don't preload data for audio clips.", delegate (UnityEngine.Object obj, bool last, int selected)
{
SetAudioPreload(clips[i], false, last);
}, clips[i], "Fix");
}
}
if (Physics.defaultContactOffset < 0.01f)
{
AddFix("Optimize Contact Offset", "For CPU performance, please don't use default contact offset below 0.01.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Physics.defaultContactOffset = 0.01f;
}, null, "Fix");
}
if (Physics.sleepThreshold < 0.005f)
{
AddFix("Optimize Sleep Threshold", "For CPU performance, please don't use sleep threshold below 0.005.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Physics.sleepThreshold = 0.005f;
}, null, "Fix");
}
if (Physics.defaultSolverIterations > 8)
{
AddFix("Optimize Solver Iterations", "For CPU performance, please don't use excessive solver iteration counts.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Physics.defaultSolverIterations = 8;
}, null, "Fix");
}
var materials = Resources.FindObjectsOfTypeAll<Material>();
for (int i = 0; i < materials.Length; ++i)
{
if (materials[i].shader.name.Contains("Parallax") || materials[i].IsKeywordEnabled("_PARALLAXMAP"))
{
AddFix("Optimize Shading", "For GPU performance, please don't use parallax-mapped materials.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Material thisMaterial = (Material)obj;
if (thisMaterial.IsKeywordEnabled("_PARALLAXMAP"))
{
thisMaterial.DisableKeyword("_PARALLAXMAP");
}
if (thisMaterial.shader.name.Contains("Parallax"))
{
var newName = thisMaterial.shader.name.Replace("-ParallaxSpec", "-BumpSpec");
newName = newName.Replace("-Parallax", "-Bump");
var newShader = Shader.Find(newName);
if (newShader)
{
thisMaterial.shader = newShader;
}
else
{
Debug.LogWarning("Unable to find a replacement for shader " + materials[i].shader.name);
}
}
}, materials[i], "Fix");
}
}
var renderers = GameObject.FindObjectsOfType<Renderer>();
for (int i = 0; i < renderers.Length; ++i)
{
if (renderers[i].sharedMaterial == null)
{
AddFix("Instanced Materials", "Please avoid instanced materials on renderers.", null, renderers[i]);
}
}
var overlays = GameObject.FindObjectsOfType<OVROverlay>();
if (overlays.Length > 4)
{
AddFix("Optimize VR Layer Count", "For GPU performance, please use 4 or fewer VR layers.", delegate (UnityEngine.Object obj, bool last, int selected)
{
for (int i = 4; i < OVROverlay.instances.Length; ++i)
{
OVROverlay.instances[i].enabled = false;
}
}, null, "Fix");
}
var splashScreen = PlayerSettings.virtualRealitySplashScreen;
if (splashScreen != null)
{
if (splashScreen.filterMode != FilterMode.Trilinear)
{
AddFix("Optimize VR Splash Filtering", "For visual quality, please use trilinear filtering on your VR splash screen.", delegate (UnityEngine.Object obj, bool last, int EditorSelectedRenderState)
{
var assetPath = AssetDatabase.GetAssetPath(splashScreen);
var importer = (TextureImporter)TextureImporter.GetAtPath(assetPath);
importer.filterMode = FilterMode.Trilinear;
AssetDatabase.ImportAsset(assetPath, ImportAssetOptions.ForceUpdate);
}, null, "Fix");
}
if (splashScreen.mipmapCount <= 1)
{
AddFix("Generate VR Splash Mipmaps", "For visual quality, please use mipmaps with your VR splash screen.", delegate (UnityEngine.Object obj, bool last, int EditorSelectedRenderState)
{
var assetPath = AssetDatabase.GetAssetPath(splashScreen);
var importer = (TextureImporter)TextureImporter.GetAtPath(assetPath);
importer.mipmapEnabled = true;
AssetDatabase.ImportAsset(assetPath, ImportAssetOptions.ForceUpdate);
}, null, "Fix");
}
}
}
static void CheckRuntimeCommonIssues()
{
if (!OVRPlugin.occlusionMesh)
{
AddFix("Occlusion Mesh", "Enabling the occlusion mesh saves substantial GPU resources, generally with no visual impact. Enable unless you have an exceptional use case.", delegate (UnityEngine.Object obj, bool last, int selected)
{
OVRPlugin.occlusionMesh = true;
}, null, "Set OVRPlugin.occlusionMesh = true");
}
if (OVRManager.instance != null && !OVRManager.instance.useRecommendedMSAALevel)
{
AddFix("Optimize MSAA", "OVRManager can select the optimal antialiasing for the installed hardware at runtime. Recommend enabling this.", delegate (UnityEngine.Object obj, bool last, int selected)
{
OVRManager.instance.useRecommendedMSAALevel = true;
}, null, "Set useRecommendedMSAALevel = true");
}
#if UNITY_2017_2_OR_NEWER
if (UnityEngine.XR.XRSettings.eyeTextureResolutionScale > 1.5)
#else
if (UnityEngine.VR.VRSettings.renderScale > 1.5)
#endif
{
AddFix("Optimize Render Scale", "Render scale above 1.5 is extremely expensive on the GPU, with little if any positive visual benefit.", delegate (UnityEngine.Object obj, bool last, int selected)
{
#if UNITY_2017_2_OR_NEWER
UnityEngine.XR.XRSettings.eyeTextureResolutionScale = 1.5f;
#else
UnityEngine.VR.VRSettings.renderScale = 1.5f;
#endif
}, null, "Fix");
}
}
static void CheckStaticAndroidIssues()
{
AndroidSdkVersions recommendedAndroidSdkVersion = AndroidSdkVersions.AndroidApiLevel21;
if ((int)PlayerSettings.Android.minSdkVersion < (int)recommendedAndroidSdkVersion)
{
AddFix("Optimize Android API Level", "To avoid legacy workarounds, please require at least API level " + (int)recommendedAndroidSdkVersion, delegate (UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.Android.minSdkVersion = recommendedAndroidSdkVersion;
}, null, "Fix");
}
if (!PlayerSettings.gpuSkinning)
{
AddFix("Optimize GPU Skinning", "If you are CPU-bound, consider using GPU skinning.", delegate (UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.gpuSkinning = true;
}, null, "Fix");
}
if (RenderSettings.skybox)
{
AddFix("Optimize Clearing", "For GPU performance, please don't use Unity's built-in Skybox.", delegate (UnityEngine.Object obj, bool last, int selected)
{
RenderSettings.skybox = null;
}, null, "Clear Skybox");
}
var materials = Resources.FindObjectsOfTypeAll<Material>();
for (int i = 0; i < materials.Length; ++i)
{
if (materials[i].IsKeywordEnabled("_SPECGLOSSMAP") || materials[i].IsKeywordEnabled("_METALLICGLOSSMAP"))
{
AddFix("Optimize Specular Material", "For GPU performance, please don't use specular shader on materials.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Material thisMaterial = (Material)obj;
thisMaterial.DisableKeyword("_SPECGLOSSMAP");
thisMaterial.DisableKeyword("_METALLICGLOSSMAP");
}, materials[i], "Fix");
}
if (materials[i].passCount > 1)
{
AddFix("Material Passes", "Please use 2 or fewer passes in materials.", null, materials[i]);
}
}
ScriptingImplementation backend = PlayerSettings.GetScriptingBackend(UnityEditor.BuildTargetGroup.Android);
if (backend != UnityEditor.ScriptingImplementation.IL2CPP)
{
AddFix("Optimize Scripting Backend", "For CPU performance, please use IL2CPP.", delegate (UnityEngine.Object obj, bool last, int selected)
{
PlayerSettings.SetScriptingBackend(UnityEditor.BuildTargetGroup.Android, UnityEditor.ScriptingImplementation.IL2CPP);
}, null, "Fix");
}
var monoBehaviours = GameObject.FindObjectsOfType<MonoBehaviour>();
System.Type effectBaseType = System.Type.GetType("UnityStandardAssets.ImageEffects.PostEffectsBase");
if (effectBaseType != null)
{
for (int i = 0; i < monoBehaviours.Length; ++i)
{
if (monoBehaviours[i].GetType().IsSubclassOf(effectBaseType))
{
AddFix("Image Effects", "Please don't use image effects.", null, monoBehaviours[i]);
}
}
}
var textures = Resources.FindObjectsOfTypeAll<Texture2D>();
int maxTextureSize = 1024 * (1 << QualitySettings.masterTextureLimit);
maxTextureSize = maxTextureSize * maxTextureSize;
for (int i = 0; i < textures.Length; ++i)
{
if (textures[i].filterMode == FilterMode.Trilinear && textures[i].mipmapCount == 1)
{
AddFix("Optimize Texture Filtering", "For GPU performance, please generate mipmaps or disable trilinear filtering for textures.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Texture2D thisTexture = (Texture2D)obj;
if (selected == 0)
{
thisTexture.filterMode = FilterMode.Bilinear;
}
else
{
SetTextureUseMips(thisTexture, true, last);
}
}, textures[i], "Switch to Bilinear", "Generate Mipmaps");
}
}
var projectors = GameObject.FindObjectsOfType<Projector>();
if (projectors.Length > 0)
{
AddFix("Optimize Projectors", "For GPU performance, please don't use projectors.", delegate (UnityEngine.Object obj, bool last, int selected)
{
Projector[] thisProjectors = GameObject.FindObjectsOfType<Projector>();
for (int i = 0; i < thisProjectors.Length; ++i)
{
thisProjectors[i].enabled = false;
}
}, null, "Disable Projectors");
}
if (EditorUserBuildSettings.androidBuildSubtarget != MobileTextureSubtarget.ASTC)
{
AddFix("Optimize Texture Compression", "For GPU performance, please use ASTC.", delegate (UnityEngine.Object obj, bool last, int selected)
{
EditorUserBuildSettings.androidBuildSubtarget = MobileTextureSubtarget.ASTC;
}, null, "Fix");
}
var cameras = GameObject.FindObjectsOfType<Camera>();
int clearCount = 0;
for (int i = 0; i < cameras.Length; ++i)
{
if (cameras[i].clearFlags != CameraClearFlags.Nothing && cameras[i].clearFlags != CameraClearFlags.Depth)
++clearCount;
}
if (clearCount > 2)
{
AddFix("Camera Clears", "Please use 2 or fewer clears.", null, null);
}
}
static void CheckRuntimeAndroidIssues()
{
if (UnityStats.usedTextureMemorySize + UnityStats.vboTotalBytes > 1000000)
{
AddFix("Graphics Memory", "Please use less than 1GB of vertex and texture memory.", null, null);
}
if (OVRManager.cpuLevel < 0 || OVRManager.cpuLevel > 3)
{
AddFix("Optimize CPU level", "For battery life, please use a safe CPU level.", delegate (UnityEngine.Object obj, bool last, int selected)
{
OVRManager.cpuLevel = 2;
}, null, "Set to CPU2");
}
if (OVRManager.gpuLevel < 0 || OVRManager.gpuLevel > 3)
{
AddFix("Optimize GPU level", "For battery life, please use a safe GPU level.", delegate (UnityEngine.Object obj, bool last, int selected)
{
OVRManager.gpuLevel = 2;
}, null, "Set to GPU2");
}
if (UnityStats.triangles > 100000 || UnityStats.vertices > 100000)
{
AddFix("Triangles and Verts", "Please use less than 100000 triangles or vertices.", null, null);
}
// Warn for 50 if in non-VR mode?
if (UnityStats.drawCalls > 100)
{
AddFix("Draw Calls", "Please use less than 100 draw calls.", null, null);
}
}
enum LightmapType { Realtime = 4, Baked = 2, Mixed = 1 };
static bool IsLightBaked(Light light)
{
return light.lightmapBakeType == LightmapBakeType.Baked;
}
static void SetAudioPreload(AudioClip clip, bool preload, bool refreshImmediately)
{
if (clip != null)
{
string assetPath = AssetDatabase.GetAssetPath(clip);
AudioImporter importer = AssetImporter.GetAtPath(assetPath) as AudioImporter;
if (importer != null)
{
if (preload != importer.preloadAudioData)
{
importer.preloadAudioData = preload;
AssetDatabase.ImportAsset(assetPath);
if (refreshImmediately)
{
AssetDatabase.Refresh();
}
}
}
}
}
static void SetAudioLoadType(AudioClip clip, AudioClipLoadType loadType, bool refreshImmediately)
{
if (clip != null)
{
string assetPath = AssetDatabase.GetAssetPath(clip);
AudioImporter importer = AssetImporter.GetAtPath(assetPath) as AudioImporter;
if (importer != null)
{
if (loadType != importer.defaultSampleSettings.loadType)
{
AudioImporterSampleSettings settings = importer.defaultSampleSettings;
settings.loadType = loadType;
importer.defaultSampleSettings = settings;
AssetDatabase.ImportAsset(assetPath);
if (refreshImmediately)
{
AssetDatabase.Refresh();
}
}
}
}
}
public static void SetTextureUseMips(Texture texture, bool useMips, bool refreshImmediately)
{
if (texture != null)
{
string assetPath = AssetDatabase.GetAssetPath(texture);
TextureImporter tImporter = AssetImporter.GetAtPath(assetPath) as TextureImporter;
if (tImporter != null && tImporter.mipmapEnabled != useMips)
{
tImporter.mipmapEnabled = useMips;
AssetDatabase.ImportAsset(assetPath);
if (refreshImmediately)
{
AssetDatabase.Refresh();
}
}
}
}
static T FindComponentInParents<T>(GameObject obj) where T : Component
{
T component = null;
if (obj != null)
{
Transform parent = obj.transform.parent;
if (parent != null)
{
do
{
component = parent.GetComponent(typeof(T)) as T;
parent = parent.parent;
} while (parent != null && component == null);
}
}
return component;
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: f483b2ec6fdc344049771ed51034b4e4
timeCreated: 1484088598
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7e933e81d3c20c74ea6fdc708a67e3a5
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,134 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Collections.Generic;
using UnityEngine;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
/// <summary>
/// Manages mix-reality elements
/// </summary>
internal static class OVRMixedReality
{
/// <summary>
/// Configurable parameters
/// </summary>
public static Color chromaKeyColor = Color.green;
/// <summary>
/// For Debugging purpose, we can use preset parameters to fake a camera when external camera is not available
/// </summary>
public static bool useFakeExternalCamera = false;
public static Vector3 fakeCameraPositon = new Vector3(3.0f, 0.0f, 3.0f);
public static Quaternion fakeCameraRotation = Quaternion.LookRotation((new Vector3(0.0f, 1.0f, 0.0f) - fakeCameraPositon).normalized, Vector3.up);
public static float fakeCameraFov = 60.0f;
public static float fakeCameraAspect = 16.0f / 9.0f;
/// <summary>
/// Composition object
/// </summary>
public static OVRComposition currentComposition = null;
/// <summary>
/// Updates the internal state of the Mixed Reality Camera. Called by OVRManager.
/// </summary>
public static void Update(GameObject parentObject, Camera mainCamera, OVRManager.CompositionMethod compositionMethod, bool useDynamicLighting, OVRManager.CameraDevice cameraDevice, OVRManager.DepthQuality depthQuality)
{
if (!OVRPlugin.initialized)
{
Debug.LogError("OVRPlugin not initialized");
return;
}
if (!OVRPlugin.IsMixedRealityInitialized())
OVRPlugin.InitializeMixedReality();
if (!OVRPlugin.IsMixedRealityInitialized())
{
Debug.LogError("Unable to initialize MixedReality");
return;
}
OVRPlugin.UpdateExternalCamera();
OVRPlugin.UpdateCameraDevices();
if (currentComposition != null && currentComposition.CompositionMethod() != compositionMethod)
{
currentComposition.Cleanup();
currentComposition = null;
}
if (compositionMethod == OVRManager.CompositionMethod.External)
{
if (currentComposition == null)
{
currentComposition = new OVRExternalComposition(parentObject, mainCamera);
}
}
else if (compositionMethod == OVRManager.CompositionMethod.Direct)
{
if (currentComposition == null)
{
currentComposition = new OVRDirectComposition(parentObject, mainCamera, cameraDevice, useDynamicLighting, depthQuality);
}
}
else if (compositionMethod == OVRManager.CompositionMethod.Sandwich)
{
if (currentComposition == null)
{
currentComposition = new OVRSandwichComposition(parentObject, mainCamera, cameraDevice, useDynamicLighting, depthQuality);
}
}
else
{
Debug.LogError("Unknown CompositionMethod : " + compositionMethod);
return;
}
currentComposition.Update(mainCamera);
}
public static void Cleanup()
{
if (currentComposition != null)
{
currentComposition.Cleanup();
currentComposition = null;
}
if (OVRPlugin.IsMixedRealityInitialized())
{
OVRPlugin.ShutdownMixedReality();
}
}
public static void RecenterPose()
{
if (currentComposition != null)
{
currentComposition.RecenterPose();
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 5daf6258e951ab84bb8b3e3b03386396
timeCreated: 1497574405
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,38 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
#if UNITY_ANDROID && !UNITY_EDITOR
public abstract class OVROnCompleteListener : AndroidJavaProxy
{
public OVROnCompleteListener() : base("com.oculus.svclib.OnCompleteListener")
{
}
public abstract void onSuccess();
public abstract void onFailure();
}
#endif

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8730118d7f00f9b47b09be73f7e91d2b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,668 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System;
using System.Collections;
using System.Runtime.InteropServices;
/// <summary>
/// Add OVROverlay script to an object with an optional mesh primitive
/// rendered as a TimeWarp overlay instead by drawing it into the eye buffer.
/// This will take full advantage of the display resolution and avoid double
/// resampling of the texture.
///
/// We support 3 types of Overlay shapes right now
/// 1. Quad : This is most common overlay type , you render a quad in Timewarp space.
/// 2. Cylinder: [Mobile Only][Experimental], Display overlay as partial surface of a cylinder
/// * The cylinder's center will be your game object's center
/// * We encoded the cylinder's parameters in transform.scale,
/// **[scale.z] is the radius of the cylinder
/// **[scale.y] is the height of the cylinder
/// **[scale.x] is the length of the arc of cylinder
/// * Limitations
/// **Only the half of the cylinder can be displayed, which means the arc angle has to be smaller than 180 degree, [scale.x] / [scale.z] <= PI
/// **Your camera has to be inside of the inscribed sphere of the cylinder, the overlay will be faded out automatically when the camera is close to the inscribed sphere's surface.
/// **Translation only works correctly with vrDriver 1.04 or above
/// 3. Cubemap: Display overlay as a cube map
/// 4. OffcenterCubemap: [Mobile Only] Display overlay as a cube map with a texture coordinate offset
/// * The actually sampling will looks like [color = texture(cubeLayerSampler, normalize(direction) + offset)] instead of [color = texture( cubeLayerSampler, direction )]
/// * The extra center offset can be feed from transform.position
/// * Note: if transform.position's magnitude is greater than 1, which will cause some cube map pixel always invisible
/// Which is usually not what people wanted, we don't kill the ability for developer to do so here, but will warn out.
/// 5. Equirect: Display overlay as a 360-degree equirectangular skybox.
/// </summary>
public class OVROverlay : MonoBehaviour
{
#region Interface
/// <summary>
/// Determines the on-screen appearance of a layer.
/// </summary>
public enum OverlayShape
{
Quad = OVRPlugin.OverlayShape.Quad,
Cylinder = OVRPlugin.OverlayShape.Cylinder,
Cubemap = OVRPlugin.OverlayShape.Cubemap,
OffcenterCubemap = OVRPlugin.OverlayShape.OffcenterCubemap,
Equirect = OVRPlugin.OverlayShape.Equirect,
}
/// <summary>
/// Whether the layer appears behind or infront of other content in the scene.
/// </summary>
public enum OverlayType
{
None,
Underlay,
Overlay,
};
/// <summary>
/// Specify overlay's type
/// </summary>
[Tooltip("Specify overlay's type")]
public OverlayType currentOverlayType = OverlayType.Overlay;
/// <summary>
/// If true, the texture's content is copied to the compositor each frame.
/// </summary>
[Tooltip("If true, the texture's content is copied to the compositor each frame.")]
public bool isDynamic = false;
/// <summary>
/// If true, the layer would be used to present protected content (e.g. HDCP). The flag is effective only on PC.
/// </summary>
[Tooltip("If true, the layer would be used to present protected content (e.g. HDCP). The flag is effective only on PC.")]
public bool isProtectedContent = false;
/// <summary>
/// The compositionDepth defines the order of the OVROverlays in composition. The overlay/underlay with smaller compositionDepth would be composited in the front of the overlay/underlay with larger compositionDepth.
/// </summary>
[Tooltip("The compositionDepth defines the order of the OVROverlays in composition. The overlay/underlay with smaller compositionDepth would be composited in the front of the overlay/underlay with larger compositionDepth.")]
public int compositionDepth = 0;
/// <summary>
/// Specify overlay's shape
/// </summary>
[Tooltip("Specify overlay's shape")]
public OverlayShape currentOverlayShape = OverlayShape.Quad;
private OverlayShape prevOverlayShape = OverlayShape.Quad;
/// <summary>
/// The left- and right-eye Textures to show in the layer.
/// \note If you need to change the texture on a per-frame basis, please use OverrideOverlayTextureInfo(..) to avoid caching issues.
/// </summary>
[Tooltip("The left- and right-eye Textures to show in the layer.")]
public Texture[] textures = new Texture[] { null, null };
protected IntPtr[] texturePtrs = new IntPtr[] { IntPtr.Zero, IntPtr.Zero };
/// <summary>
/// Use this function to set texture and texNativePtr when app is running
/// GetNativeTexturePtr is a slow behavior, the value should be pre-cached
/// </summary>
#if UNITY_2017_2_OR_NEWER
public void OverrideOverlayTextureInfo(Texture srcTexture, IntPtr nativePtr, UnityEngine.XR.XRNode node)
#else
public void OverrideOverlayTextureInfo(Texture srcTexture, IntPtr nativePtr, UnityEngine.VR.VRNode node)
#endif
{
#if UNITY_2017_2_OR_NEWER
int index = (node == UnityEngine.XR.XRNode.RightEye) ? 1 : 0;
#else
int index = (node == UnityEngine.VR.VRNode.RightEye) ? 1 : 0;
#endif
if (textures.Length <= index)
return;
textures[index] = srcTexture;
texturePtrs[index] = nativePtr;
isOverridePending = true;
}
protected bool isOverridePending;
internal const int maxInstances = 15;
internal static OVROverlay[] instances = new OVROverlay[maxInstances];
#endregion
private static Material tex2DMaterial;
private static Material cubeMaterial;
private OVRPlugin.LayerLayout layout {
get {
#if UNITY_ANDROID && !UNITY_EDITOR
if (textures.Length == 2 && textures[1] != null)
return OVRPlugin.LayerLayout.Stereo;
#endif
return OVRPlugin.LayerLayout.Mono;
}
}
private struct LayerTexture {
public Texture appTexture;
public IntPtr appTexturePtr;
public Texture[] swapChain;
public IntPtr[] swapChainPtr;
};
private LayerTexture[] layerTextures;
private OVRPlugin.LayerDesc layerDesc;
private int stageCount = -1;
private int layerIndex = -1; // Controls the composition order based on wake-up time.
private int layerId = 0; // The layer's internal handle in the compositor.
private GCHandle layerIdHandle;
private IntPtr layerIdPtr = IntPtr.Zero;
private int frameIndex = 0;
private int prevFrameIndex = -1;
private Renderer rend;
private int texturesPerStage { get { return (layout == OVRPlugin.LayerLayout.Stereo) ? 2 : 1; } }
private bool CreateLayer(int mipLevels, int sampleCount, OVRPlugin.EyeTextureFormat etFormat, int flags, OVRPlugin.Sizei size, OVRPlugin.OverlayShape shape)
{
if (!layerIdHandle.IsAllocated || layerIdPtr == IntPtr.Zero)
{
layerIdHandle = GCHandle.Alloc(layerId, GCHandleType.Pinned);
layerIdPtr = layerIdHandle.AddrOfPinnedObject();
}
if (layerIndex == -1)
{
for (int i = 0; i < maxInstances; ++i)
{
if (instances[i] == null || instances[i] == this)
{
layerIndex = i;
instances[i] = this;
break;
}
}
}
bool needsSetup = (
isOverridePending ||
layerDesc.MipLevels != mipLevels ||
layerDesc.SampleCount != sampleCount ||
layerDesc.Format != etFormat ||
layerDesc.Layout != layout ||
layerDesc.LayerFlags != flags ||
!layerDesc.TextureSize.Equals(size) ||
layerDesc.Shape != shape);
if (!needsSetup)
return false;
OVRPlugin.LayerDesc desc = OVRPlugin.CalculateLayerDesc(shape, layout, size, mipLevels, sampleCount, etFormat, flags);
OVRPlugin.EnqueueSetupLayer(desc, compositionDepth, layerIdPtr);
layerId = (int)layerIdHandle.Target;
if (layerId > 0)
{
layerDesc = desc;
stageCount = OVRPlugin.GetLayerTextureStageCount(layerId);
}
isOverridePending = false;
return true;
}
private bool CreateLayerTextures(bool useMipmaps, OVRPlugin.Sizei size, bool isHdr)
{
bool needsCopy = false;
if (stageCount <= 0)
return false;
// For newer SDKs, blit directly to the surface that will be used in compositing.
if (layerTextures == null)
layerTextures = new LayerTexture[texturesPerStage];
for (int eyeId = 0; eyeId < texturesPerStage; ++eyeId)
{
if (layerTextures[eyeId].swapChain == null)
layerTextures[eyeId].swapChain = new Texture[stageCount];
if (layerTextures[eyeId].swapChainPtr == null)
layerTextures[eyeId].swapChainPtr = new IntPtr[stageCount];
for (int stage = 0; stage < stageCount; ++stage)
{
Texture sc = layerTextures[eyeId].swapChain[stage];
IntPtr scPtr = layerTextures[eyeId].swapChainPtr[stage];
if (sc != null && scPtr != IntPtr.Zero)
continue;
if (scPtr == IntPtr.Zero)
scPtr = OVRPlugin.GetLayerTexture(layerId, stage, (OVRPlugin.Eye)eyeId);
if (scPtr == IntPtr.Zero)
continue;
var txFormat = (isHdr) ? TextureFormat.RGBAHalf : TextureFormat.RGBA32;
if (currentOverlayShape != OverlayShape.Cubemap && currentOverlayShape != OverlayShape.OffcenterCubemap)
sc = Texture2D.CreateExternalTexture(size.w, size.h, txFormat, useMipmaps, true, scPtr);
#if UNITY_2017_1_OR_NEWER
else
sc = Cubemap.CreateExternalTexture(size.w, txFormat, useMipmaps, scPtr);
#endif
layerTextures[eyeId].swapChain[stage] = sc;
layerTextures[eyeId].swapChainPtr[stage] = scPtr;
needsCopy = true;
}
}
return needsCopy;
}
private void DestroyLayerTextures()
{
for (int eyeId = 0; layerTextures != null && eyeId < texturesPerStage; ++eyeId)
{
if (layerTextures[eyeId].swapChain != null)
{
for (int stage = 0; stage < stageCount; ++stage)
DestroyImmediate(layerTextures[eyeId].swapChain[stage]);
}
}
layerTextures = null;
}
private void DestroyLayer()
{
if (layerIndex != -1)
{
// Turn off the overlay if it was on.
OVRPlugin.EnqueueSubmitLayer(true, false, IntPtr.Zero, IntPtr.Zero, -1, 0, OVRPose.identity.ToPosef(), Vector3.one.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)prevOverlayShape);
instances[layerIndex] = null;
layerIndex = -1;
}
if (layerIdPtr != IntPtr.Zero)
{
OVRPlugin.EnqueueDestroyLayer(layerIdPtr);
layerIdPtr = IntPtr.Zero;
layerIdHandle.Free();
layerId = 0;
}
layerDesc = new OVRPlugin.LayerDesc();
frameIndex = 0;
prevFrameIndex = -1;
}
private bool LatchLayerTextures()
{
for (int i = 0; i < texturesPerStage; ++i)
{
if (textures[i] != layerTextures[i].appTexture || layerTextures[i].appTexturePtr == IntPtr.Zero)
{
if (textures[i] != null)
{
#if UNITY_EDITOR
var assetPath = UnityEditor.AssetDatabase.GetAssetPath(textures[i]);
var importer = (UnityEditor.TextureImporter)UnityEditor.TextureImporter.GetAtPath(assetPath);
if (importer && importer.textureType != UnityEditor.TextureImporterType.Default)
{
Debug.LogError("Need Default Texture Type for overlay");
return false;
}
#endif
var rt = textures[i] as RenderTexture;
if (rt && !rt.IsCreated())
rt.Create();
layerTextures[i].appTexturePtr = (texturePtrs[i] != IntPtr.Zero) ? texturePtrs[i] : textures[i].GetNativeTexturePtr();
if (layerTextures[i].appTexturePtr != IntPtr.Zero)
layerTextures[i].appTexture = textures[i];
}
}
if (currentOverlayShape == OverlayShape.Cubemap)
{
if (textures[i] as Cubemap == null)
{
Debug.LogError("Need Cubemap texture for cube map overlay");
return false;
}
}
}
#if !UNITY_ANDROID || UNITY_EDITOR
if (currentOverlayShape == OverlayShape.OffcenterCubemap)
{
Debug.LogWarning("Overlay shape " + currentOverlayShape + " is not supported on current platform");
return false;
}
#endif
if (layerTextures[0].appTexture == null || layerTextures[0].appTexturePtr == IntPtr.Zero)
return false;
return true;
}
private OVRPlugin.LayerDesc GetCurrentLayerDesc()
{
OVRPlugin.LayerDesc newDesc = new OVRPlugin.LayerDesc() {
Format = OVRPlugin.EyeTextureFormat.R8G8B8A8_sRGB,
LayerFlags = (int)OVRPlugin.LayerFlags.TextureOriginAtBottomLeft,
Layout = layout,
MipLevels = 1,
SampleCount = 1,
Shape = (OVRPlugin.OverlayShape)currentOverlayShape,
TextureSize = new OVRPlugin.Sizei() { w = textures[0].width, h = textures[0].height }
};
var tex2D = textures[0] as Texture2D;
if (tex2D != null)
{
if (tex2D.format == TextureFormat.RGBAHalf || tex2D.format == TextureFormat.RGBAFloat)
newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
newDesc.MipLevels = tex2D.mipmapCount;
}
var texCube = textures[0] as Cubemap;
if (texCube != null)
{
if (texCube.format == TextureFormat.RGBAHalf || texCube.format == TextureFormat.RGBAFloat)
newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
newDesc.MipLevels = texCube.mipmapCount;
}
var rt = textures[0] as RenderTexture;
if (rt != null)
{
newDesc.SampleCount = rt.antiAliasing;
if (rt.format == RenderTextureFormat.ARGBHalf || rt.format == RenderTextureFormat.ARGBFloat || rt.format == RenderTextureFormat.RGB111110Float)
newDesc.Format = OVRPlugin.EyeTextureFormat.R16G16B16A16_FP;
}
if (isProtectedContent)
{
newDesc.LayerFlags |= (int)OVRPlugin.LayerFlags.ProtectedContent;
}
return newDesc;
}
private bool PopulateLayer(int mipLevels, bool isHdr, OVRPlugin.Sizei size, int sampleCount, int stage)
{
bool ret = false;
RenderTextureFormat rtFormat = (isHdr) ? RenderTextureFormat.ARGBHalf : RenderTextureFormat.ARGB32;
for (int eyeId = 0; eyeId < texturesPerStage; ++eyeId)
{
Texture et = layerTextures[eyeId].swapChain[stage];
if (et == null)
continue;
for (int mip = 0; mip < mipLevels; ++mip)
{
int width = size.w >> mip;
if (width < 1) width = 1;
int height = size.h >> mip;
if (height < 1) height = 1;
#if UNITY_2017_1_1 || UNITY_2017_2_OR_NEWER
RenderTextureDescriptor descriptor = new RenderTextureDescriptor(width, height, rtFormat, 0);
descriptor.msaaSamples = sampleCount;
descriptor.useMipMap = true;
descriptor.autoGenerateMips = false;
descriptor.sRGB = false;
var tempRTDst = RenderTexture.GetTemporary(descriptor);
#else
var tempRTDst = RenderTexture.GetTemporary(width, height, 0, rtFormat, RenderTextureReadWrite.Linear, sampleCount);
#endif
if (!tempRTDst.IsCreated())
tempRTDst.Create();
tempRTDst.DiscardContents();
bool dataIsLinear = isHdr || (QualitySettings.activeColorSpace == ColorSpace.Linear);
#if !UNITY_2017_1_OR_NEWER
var rt = textures[eyeId] as RenderTexture;
dataIsLinear |= rt != null && rt.sRGB; //HACK: Unity 5.6 and earlier convert to linear on read from sRGB RenderTexture.
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
dataIsLinear = true; //HACK: Graphics.CopyTexture causes linear->srgb conversion on target write with D3D but not GLES.
#endif
if (currentOverlayShape != OverlayShape.Cubemap && currentOverlayShape != OverlayShape.OffcenterCubemap)
{
tex2DMaterial.SetInt("_linearToSrgb", (!isHdr && dataIsLinear) ? 1 : 0);
//Resolve, decompress, swizzle, etc not handled by simple CopyTexture.
#if !UNITY_ANDROID || UNITY_EDITOR
// The PC compositor uses premultiplied alpha, so multiply it here.
tex2DMaterial.SetInt("_premultiply", 1);
#endif
Graphics.Blit(textures[eyeId], tempRTDst, tex2DMaterial);
Graphics.CopyTexture(tempRTDst, 0, 0, et, 0, mip);
}
#if UNITY_2017_1_OR_NEWER
else // Cubemap
{
for (int face = 0; face < 6; ++face)
{
cubeMaterial.SetInt("_linearToSrgb", (!isHdr && dataIsLinear) ? 1 : 0);
#if !UNITY_ANDROID || UNITY_EDITOR
// The PC compositor uses premultiplied alpha, so multiply it here.
cubeMaterial.SetInt("_premultiply", 1);
#endif
cubeMaterial.SetInt("_face", face);
//Resolve, decompress, swizzle, etc not handled by simple CopyTexture.
Graphics.Blit(textures[eyeId], tempRTDst, cubeMaterial);
Graphics.CopyTexture(tempRTDst, 0, 0, et, face, mip);
}
}
#endif
RenderTexture.ReleaseTemporary(tempRTDst);
ret = true;
}
}
return ret;
}
private bool SubmitLayer(bool overlay, bool headLocked, OVRPose pose, Vector3 scale, int frameIndex)
{
int rightEyeIndex = (texturesPerStage >= 2) ? 1 : 0;
bool isOverlayVisible = OVRPlugin.EnqueueSubmitLayer(overlay, headLocked, layerTextures[0].appTexturePtr, layerTextures[rightEyeIndex].appTexturePtr, layerId, frameIndex, pose.flipZ().ToPosef(), scale.ToVector3f(), layerIndex, (OVRPlugin.OverlayShape)currentOverlayShape);
prevOverlayShape = currentOverlayShape;
return isOverlayVisible;
}
#region Unity Messages
void Awake()
{
Debug.Log("Overlay Awake");
if (tex2DMaterial == null)
tex2DMaterial = new Material(Shader.Find("Oculus/Texture2D Blit"));
if (cubeMaterial == null)
cubeMaterial = new Material(Shader.Find("Oculus/Cubemap Blit"));
rend = GetComponent<Renderer>();
if (textures.Length == 0)
textures = new Texture[] { null };
// Backward compatibility
if (rend != null && textures[0] == null)
textures[0] = rend.material.mainTexture;
}
void OnEnable()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
}
void OnDisable()
{
if ((gameObject.hideFlags & HideFlags.DontSaveInBuild) != 0)
return;
DestroyLayerTextures();
DestroyLayer();
}
void OnDestroy()
{
DestroyLayerTextures();
DestroyLayer();
}
bool ComputeSubmit(ref OVRPose pose, ref Vector3 scale, ref bool overlay, ref bool headLocked)
{
Camera headCamera = Camera.main;
overlay = (currentOverlayType == OverlayType.Overlay);
headLocked = false;
for (var t = transform; t != null && !headLocked; t = t.parent)
headLocked |= (t == headCamera.transform);
pose = (headLocked) ? transform.ToHeadSpacePose(headCamera) : transform.ToTrackingSpacePose(headCamera);
scale = transform.lossyScale;
for (int i = 0; i < 3; ++i)
scale[i] /= headCamera.transform.lossyScale[i];
if (currentOverlayShape == OverlayShape.Cubemap)
{
#if UNITY_ANDROID && !UNITY_EDITOR
//HACK: VRAPI cubemaps assume are yawed 180 degrees relative to LibOVR.
pose.orientation = pose.orientation * Quaternion.AngleAxis(180, Vector3.up);
#endif
pose.position = headCamera.transform.position;
}
// Pack the offsetCenter directly into pose.position for offcenterCubemap
if (currentOverlayShape == OverlayShape.OffcenterCubemap)
{
pose.position = transform.position;
if (pose.position.magnitude > 1.0f)
{
Debug.LogWarning("Your cube map center offset's magnitude is greater than 1, which will cause some cube map pixel always invisible .");
return false;
}
}
// Cylinder overlay sanity checking
if (currentOverlayShape == OverlayShape.Cylinder)
{
float arcAngle = scale.x / scale.z / (float)Math.PI * 180.0f;
if (arcAngle > 180.0f)
{
Debug.LogWarning("Cylinder overlay's arc angle has to be below 180 degree, current arc angle is " + arcAngle + " degree." );
return false;
}
}
return true;
}
void LateUpdate()
{
// The overlay must be specified every eye frame, because it is positioned relative to the
// current head location. If frames are dropped, it will be time warped appropriately,
// just like the eye buffers.
if (currentOverlayType == OverlayType.None || textures.Length < texturesPerStage || textures[0] == null)
return;
OVRPose pose = OVRPose.identity;
Vector3 scale = Vector3.one;
bool overlay = false;
bool headLocked = false;
if (!ComputeSubmit(ref pose, ref scale, ref overlay, ref headLocked))
return;
OVRPlugin.LayerDesc newDesc = GetCurrentLayerDesc();
bool isHdr = (newDesc.Format == OVRPlugin.EyeTextureFormat.R16G16B16A16_FP);
bool createdLayer = CreateLayer(newDesc.MipLevels, newDesc.SampleCount, newDesc.Format, newDesc.LayerFlags, newDesc.TextureSize, newDesc.Shape);
if (layerIndex == -1 || layerId <= 0)
return;
bool useMipmaps = (newDesc.MipLevels > 1);
createdLayer |= CreateLayerTextures(useMipmaps, newDesc.TextureSize, isHdr);
if (layerTextures[0].appTexture as RenderTexture != null)
isDynamic = true;
if (!LatchLayerTextures())
return;
// Don't populate the same frame image twice.
if (frameIndex > prevFrameIndex)
{
int stage = frameIndex % stageCount;
if (!PopulateLayer (newDesc.MipLevels, isHdr, newDesc.TextureSize, newDesc.SampleCount, stage))
return;
}
bool isOverlayVisible = SubmitLayer(overlay, headLocked, pose, scale, frameIndex);
prevFrameIndex = frameIndex;
if (isDynamic)
++frameIndex;
// Backward compatibility: show regular renderer if overlay isn't visible.
if (rend)
rend.enabled = !isOverlayVisible;
}
#endregion
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4444ce35d262aa648ad0c425a559b931
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,129 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using VR = UnityEngine.VR;
using System.Collections;
using System.Collections.Generic;
/// <summary>
/// Shows the Oculus plaform UI.
/// </summary>
public class OVRPlatformMenu : MonoBehaviour
{
/// <summary>
/// The key code.
/// </summary>
private OVRInput.RawButton inputCode = OVRInput.RawButton.Back;
public enum eHandler
{
ShowConfirmQuit,
RetreatOneLevel,
};
public eHandler shortPressHandler = eHandler.ShowConfirmQuit;
/// <summary>
/// Callback to handle short press. Returns true if ConfirmQuit menu should be shown.
/// </summary>
public System.Func<bool> OnShortPress;
private static Stack<string> sceneStack = new Stack<string>();
enum eBackButtonAction
{
NONE,
SHORT_PRESS
};
eBackButtonAction HandleBackButtonState()
{
eBackButtonAction action = eBackButtonAction.NONE;
if (OVRInput.GetDown(inputCode))
{
action = eBackButtonAction.SHORT_PRESS;
}
return action;
}
/// <summary>
/// Instantiate the cursor timer
/// </summary>
void Awake()
{
if (shortPressHandler == eHandler.RetreatOneLevel && OnShortPress == null)
OnShortPress = RetreatOneLevel;
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
sceneStack.Push(UnityEngine.SceneManagement.SceneManager.GetActiveScene().name);
}
/// <summary>
/// Show the confirm quit menu
/// </summary>
void ShowConfirmQuitMenu()
{
#if UNITY_ANDROID && !UNITY_EDITOR
Debug.Log("[PlatformUI-ConfirmQuit] Showing @ " + Time.time);
OVRManager.PlatformUIConfirmQuit();
#endif
}
/// <summary>
/// Sample handler for short press which retreats to the previous scene that used OVRPlatformMenu.
/// </summary>
private static bool RetreatOneLevel()
{
if (sceneStack.Count > 1)
{
string parentScene = sceneStack.Pop();
UnityEngine.SceneManagement.SceneManager.LoadSceneAsync (parentScene);
return false;
}
return true;
}
/// <summary>
/// Tests for long-press and activates global platform menu when detected.
/// as per the Unity integration doc, the back button responds to "mouse 1" button down/up/etc
/// </summary>
void Update()
{
#if UNITY_ANDROID
eBackButtonAction action = HandleBackButtonState();
if (action == eBackButtonAction.SHORT_PRESS)
{
if (OnShortPress == null || OnShortPress())
{
ShowConfirmQuitMenu();
}
}
#endif
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 2afcf575f4a68de4db434c7b7233c451
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 61abd23f3aff5394ba8027ee380760b8
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,55 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using System.Threading;
using VR = UnityEngine.VR;
/// <summary>
/// (Deprecated) Contains information about the user's preferences and body dimensions.
/// </summary>
public class OVRProfile : Object
{
[System.Obsolete]
public enum State
{
NOT_TRIGGERED,
LOADING,
READY,
ERROR
};
[System.Obsolete]
public string id { get { return "000abc123def"; } }
[System.Obsolete]
public string userName { get { return "Oculus User"; } }
[System.Obsolete]
public string locale { get { return "en_US"; } }
public float ipd { get { return Vector3.Distance (OVRPlugin.GetNodePose (OVRPlugin.Node.EyeLeft, OVRPlugin.Step.Render).ToOVRPose ().position, OVRPlugin.GetNodePose (OVRPlugin.Node.EyeRight, OVRPlugin.Step.Render).ToOVRPose ().position); } }
public float eyeHeight { get { return OVRPlugin.eyeHeight; } }
public float eyeDepth { get { return OVRPlugin.eyeDepth; } }
public float neckHeight { get { return eyeHeight - 0.075f; } }
[System.Obsolete]
public State state { get { return State.READY; } }
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 08539141953f28e439731aaf7cd5362f
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,199 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Runtime.InteropServices;
using UnityEngine;
using VR = UnityEngine.VR;
/// <summary>
/// An infrared camera that tracks the position of a head-mounted display.
/// </summary>
public class OVRTracker
{
/// <summary>
/// The (symmetric) visible area in front of the sensor.
/// </summary>
public struct Frustum
{
/// <summary>
/// The sensor's minimum supported distance to the HMD.
/// </summary>
public float nearZ;
/// <summary>
/// The sensor's maximum supported distance to the HMD.
/// </summary>
public float farZ;
/// <summary>
/// The sensor's horizontal and vertical fields of view in degrees.
/// </summary>
public Vector2 fov;
}
/// <summary>
/// If true, a sensor is attached to the system.
/// </summary>
public bool isPresent
{
get {
if (!OVRManager.isHmdPresent)
return false;
return OVRPlugin.positionSupported;
}
}
/// <summary>
/// If true, the sensor is actively tracking the HMD's position. Otherwise the HMD may be temporarily occluded, the system may not support position tracking, etc.
/// </summary>
public bool isPositionTracked
{
get {
return OVRPlugin.positionTracked;
}
}
/// <summary>
/// If this is true and a sensor is available, the system will use position tracking when isPositionTracked is also true.
/// </summary>
public bool isEnabled
{
get {
if (!OVRManager.isHmdPresent)
return false;
return OVRPlugin.position;
}
set {
if (!OVRManager.isHmdPresent)
return;
OVRPlugin.position = value;
}
}
/// <summary>
/// Returns the number of sensors currently connected to the system.
/// </summary>
public int count
{
get {
int count = 0;
for (int i = 0; i < (int)OVRPlugin.Tracker.Count; ++i)
{
if (GetPresent(i))
count++;
}
return count;
}
}
/// <summary>
/// Gets the sensor's viewing frustum.
/// </summary>
public Frustum GetFrustum(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return new Frustum();
return OVRPlugin.GetTrackerFrustum((OVRPlugin.Tracker)tracker).ToFrustum();
}
/// <summary>
/// Gets the sensor's pose, relative to the head's pose at the time of the last pose recentering.
/// </summary>
public OVRPose GetPose(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return OVRPose.identity;
OVRPose p;
switch (tracker)
{
case 0:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerZero, OVRPlugin.Step.Render).ToOVRPose();
break;
case 1:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerOne, OVRPlugin.Step.Render).ToOVRPose();
break;
case 2:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerTwo, OVRPlugin.Step.Render).ToOVRPose();
break;
case 3:
p = OVRPlugin.GetNodePose(OVRPlugin.Node.TrackerThree, OVRPlugin.Step.Render).ToOVRPose();
break;
default:
return OVRPose.identity;
}
return new OVRPose()
{
position = p.position,
orientation = p.orientation * Quaternion.Euler(0, 180, 0)
};
}
/// <summary>
/// If true, the pose of the sensor is valid and is ready to be queried.
/// </summary>
public bool GetPoseValid(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return false;
switch (tracker)
{
case 0:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerZero);
case 1:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerOne);
case 2:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerTwo);
case 3:
return OVRPlugin.GetNodePositionTracked(OVRPlugin.Node.TrackerThree);
default:
return false;
}
}
public bool GetPresent(int tracker = 0)
{
if (!OVRManager.isHmdPresent)
return false;
switch (tracker)
{
case 0:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerZero);
case 1:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerOne);
case 2:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerTwo);
case 3:
return OVRPlugin.GetNodePresent(OVRPlugin.Node.TrackerThree);
default:
return false;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7cb3c9d4cb0970e448c655096649e814
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,5 @@
fileFormatVersion: 2
guid: c0c7a593695f68e4bbe0cabb0f4f93f2
folderAsset: yes
DefaultImporter:
userData:

View File

@@ -0,0 +1,56 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle chromatic aberration correction with a gamepad button press.
/// </summary>
public class OVRChromaticAberration : MonoBehaviour
{
/// <summary>
/// The button that will toggle chromatic aberration correction.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.X;
private bool chromatic = false;
void Start ()
{
// Enable/Disable Chromatic Aberration Correction.
// NOTE: Enabling Chromatic Aberration for mobile has a large performance cost.
OVRManager.instance.chromatic = chromatic;
}
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle chromatic aberration correction
//*************************
chromatic = !chromatic;
OVRManager.instance.chromatic = chromatic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 3b56515a831f2fb44bc7ae02679aeebc
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,268 @@
using UnityEngine;
using System.Collections;
using System.IO;
/// <summary>
/// Helper script for capture cubemap and save it into PNG or JPG file
/// </summary>
/// <description>
/// How it works:
/// 1) This script can be attached to a existing game object, you can also use prefab Assets\OVR\Prefabs\OVRCubemapCaptureProbe
/// There are 2 ways to trigger a capture if you attached this script to a game object.
/// * Automatic capturing: if [autoTriggerAfterLaunch] is true, a automatic capturing will be triggered after [autoTriggerDelay] seconds.
/// * Keyboard trigger: press key [triggeredByKey], a capturing will be triggered.
/// 2) If you like to trigger the screen capture in your code logic, just call static function [OVRCubemapCapture.TriggerCubemapCapture] with proper input arguments.
/// </description>
public class OVRCubemapCapture : MonoBehaviour
{
/// <summary>
/// Enable the automatic screenshot trigger, which will capture a cubemap after autoTriggerDelay (seconds)
/// </summary>
public bool autoTriggerAfterLaunch = true;
public float autoTriggerDelay = 1.0f;
private float autoTriggerElapse = 0.0f;
/// <summary>
/// Trigger cubemap screenshot if user pressed key triggeredByKey
/// </summary>
public KeyCode triggeredByKey = KeyCode.F8;
/// <summary>
/// The complete file path for saving the cubemap screenshot, including the filename and extension
/// if pathName is blank, screenshots will be saved into %USERPROFILE%\Documents\OVR_ScreenShot360
/// </summary>
public string pathName;
/// <summary>
/// The cube face resolution
/// </summary>
public int cubemapSize = 2048;
// Update is called once per frame
void Update()
{
// Trigger after autoTriggerDelay
if (autoTriggerAfterLaunch)
{
autoTriggerElapse += Time.deltaTime;
if (autoTriggerElapse >= autoTriggerDelay)
{
autoTriggerAfterLaunch = false;
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
// Trigger by press triggeredByKey
if ( Input.GetKeyDown( triggeredByKey ) )
{
TriggerCubemapCapture(transform.position, cubemapSize, pathName);
}
}
/// <summary>
/// Generate unity cubemap at specific location and save into JPG/PNG
/// </summary>
/// <description>
/// Default save folder: your app's persistentDataPath
/// Default file name: using current time OVR_hh_mm_ss.png
/// Note1: this will take a few seconds to finish
/// Note2: if you only want to specify path not filename, please end [pathName] with "/"
/// </description>
public static void TriggerCubemapCapture(Vector3 capturePos, int cubemapSize = 2048, string pathName = null)
{
GameObject ownerObj = new GameObject("CubemapCamera", typeof(Camera));
ownerObj.hideFlags = HideFlags.HideAndDontSave;
ownerObj.transform.position = capturePos;
ownerObj.transform.rotation = Quaternion.identity;
Camera camComponent = ownerObj.GetComponent<Camera>();
camComponent.farClipPlane = 10000.0f;
camComponent.enabled = false;
Cubemap cubemap = new Cubemap(cubemapSize, TextureFormat.RGB24, false);
RenderIntoCubemap(camComponent, cubemap);
SaveCubemapCapture(cubemap, pathName);
DestroyImmediate(cubemap);
DestroyImmediate(ownerObj);
}
public static void RenderIntoCubemap(Camera ownerCamera, Cubemap outCubemap)
{
int width = (int)outCubemap.width;
int height = (int)outCubemap.height;
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
Vector3[] faceAngles = new Vector3[] { new Vector3(0.0f, 90.0f, 0.0f), new Vector3(0.0f, -90.0f, 0.0f), new Vector3(-90.0f, 0.0f, 0.0f), new Vector3(90.0f, 0.0f, 0.0f), new Vector3(0.0f, 0.0f, 0.0f), new Vector3(0.0f, 180.0f, 0.0f) };
// Backup states
RenderTexture backupRenderTex = RenderTexture.active;
float backupFieldOfView = ownerCamera.fieldOfView;
float backupAspect = ownerCamera.aspect;
Quaternion backupRot = ownerCamera.transform.rotation;
//RenderTexture backupRT = ownerCamera.targetTexture;
// Enable 8X MSAA
RenderTexture faceTexture = new RenderTexture(width, height, 24);
faceTexture.antiAliasing = 8;
faceTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex2D;
faceTexture.hideFlags = HideFlags.HideAndDontSave;
// For intermediate saving
Texture2D swapTex = new Texture2D(width, height, TextureFormat.RGB24, false);
swapTex.hideFlags = HideFlags.HideAndDontSave;
// Capture 6 Directions
ownerCamera.targetTexture = faceTexture;
ownerCamera.fieldOfView = 90;
ownerCamera.aspect = 1.0f;
Color[] mirroredPixels = new Color[swapTex.height * swapTex.width];
for (int i = 0; i < faces.Length; i++)
{
ownerCamera.transform.eulerAngles = faceAngles[i];
ownerCamera.Render();
RenderTexture.active = faceTexture;
swapTex.ReadPixels(new Rect(0, 0, width, height), 0, 0);
// Mirror vertically to meet the standard of unity cubemap
Color[] OrignalPixels = swapTex.GetPixels();
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
mirroredPixels[y1 * width + x1] = OrignalPixels[((height - 1 - y1) * width) + x1];
}
};
outCubemap.SetPixels(mirroredPixels, faces[i]);
}
outCubemap.SmoothEdges();
// Restore states
RenderTexture.active = backupRenderTex;
ownerCamera.fieldOfView = backupFieldOfView;
ownerCamera.aspect = backupAspect;
ownerCamera.transform.rotation = backupRot;
ownerCamera.targetTexture = backupRenderTex;
DestroyImmediate(swapTex);
DestroyImmediate(faceTexture);
}
/// <summary>
/// Save unity cubemap into NPOT 6x1 cubemap/texture atlas in the following format PX NX PY NY PZ NZ
/// </summary>
/// <description>
/// Supported format: PNG/JPG
/// Default file name: using current time OVR_hh_mm_ss.png
/// </description>
public static bool SaveCubemapCapture(Cubemap cubemap, string pathName = null)
{
string fileName;
string dirName;
int width = cubemap.width;
int height = cubemap.height;
int x = 0;
int y = 0;
bool saveToPNG = true;
if (string.IsNullOrEmpty(pathName))
{
dirName = Application.persistentDataPath + "/OVR_ScreenShot360/";
fileName = null;
}
else
{
dirName = Path.GetDirectoryName(pathName);
fileName = Path.GetFileName(pathName);
if (dirName[dirName.Length - 1] != '/' || dirName[dirName.Length - 1] != '\\')
dirName += "/";
}
if (string.IsNullOrEmpty(fileName))
fileName = "OVR_" + System.DateTime.Now.ToString("hh_mm_ss") + ".png";
string extName = Path.GetExtension(fileName);
if (extName == ".png")
{
saveToPNG = true;
}
else if (extName == ".jpg")
{
saveToPNG = false;
}
else
{
Debug.LogError("Unsupported file format" + extName);
return false;
}
// Validate path
try
{
System.IO.Directory.CreateDirectory(dirName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to create path " + dirName + " since " + e.ToString());
return false;
}
// Create the new texture
Texture2D tex = new Texture2D(width * 6, height, TextureFormat.RGB24, false);
if (tex == null)
{
Debug.LogError("[OVRScreenshotWizard] Failed creating the texture!");
return false;
}
// Merge all the cubemap faces into the texture
// Reference cubemap format: http://docs.unity3d.com/Manual/class-Cubemap.html
CubemapFace[] faces = new CubemapFace[] { CubemapFace.PositiveX, CubemapFace.NegativeX, CubemapFace.PositiveY, CubemapFace.NegativeY, CubemapFace.PositiveZ, CubemapFace.NegativeZ };
for (int i = 0; i < faces.Length; i++)
{
// get the pixels from the cubemap
Color[] srcPixels = null;
Color[] pixels = cubemap.GetPixels(faces[i]);
// if desired, flip them as they are ordered left to right, bottom to top
srcPixels = new Color[pixels.Length];
for (int y1 = 0; y1 < height; y1++)
{
for (int x1 = 0; x1 < width; x1++)
{
srcPixels[y1 * width + x1] = pixels[((height - 1 - y1) * width) + x1];
}
}
// Copy them to the dest texture
tex.SetPixels(x, y, width, height, srcPixels);
x += width;
}
try
{
// Encode the texture and save it to disk
byte[] bytes = saveToPNG ? tex.EncodeToPNG() : tex.EncodeToJPG();
System.IO.File.WriteAllBytes(dirName + fileName, bytes);
Debug.Log("Cubemap file created " + dirName + fileName);
}
catch (System.Exception e)
{
Debug.LogError("Failed to save cubemap file since " + e.ToString());
return false;
}
DestroyImmediate(tex);
return true;
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7a489178b0acf0147846b3873447beaf
timeCreated: 1464728890
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,438 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.UI;
//-------------------------------------------------------------------------------------
/// <summary>
/// Shows debug information on a heads-up display.
/// </summary>
public class OVRDebugInfo : MonoBehaviour
{
#region GameObjects for Debug Information UIs
GameObject debugUIManager;
GameObject debugUIObject;
GameObject riftPresent;
GameObject fps;
GameObject ipd;
GameObject fov;
GameObject height;
GameObject depth;
GameObject resolutionEyeTexture;
GameObject latencies;
GameObject texts;
#endregion
#region Debug strings
string strRiftPresent = null; // "VR DISABLED"
string strFPS = null; // "FPS: 0";
string strIPD = null; // "IPD: 0.000";
string strFOV = null; // "FOV: 0.0f";
string strHeight = null; // "Height: 0.0f";
string strDepth = null; // "Depth: 0.0f";
string strResolutionEyeTexture = null; // "Resolution : {0} x {1}"
string strLatencies = null; // "R: {0:F3} TW: {1:F3} PP: {2:F3} RE: {3:F3} TWE: {4:F3}"
#endregion
/// <summary>
/// Variables for FPS
/// </summary>
float updateInterval = 0.5f;
float accum = 0.0f;
int frames = 0;
float timeLeft = 0.0f;
/// <summary>
/// Managing for UI initialization
/// </summary>
bool initUIComponent = false;
bool isInited = false;
/// <summary>
/// UIs Y offset
/// </summary>
float offsetY = 55.0f;
/// <summary>
/// Managing for rift detection UI
/// </summary>
float riftPresentTimeout = 0.0f;
/// <summary>
/// Turn on / off VR variables
/// </summary>
bool showVRVars = false;
#region MonoBehaviour handler
/// <summary>
/// Initialization
/// </summary>
void Awake()
{
// Create canvas for using new GUI
debugUIManager = new GameObject();
debugUIManager.name = "DebugUIManager";
debugUIManager.transform.parent = GameObject.Find("LeftEyeAnchor").transform;
RectTransform rectTransform = debugUIManager.AddComponent<RectTransform>();
rectTransform.sizeDelta = new Vector2(100f, 100f);
rectTransform.localScale = new Vector3(0.001f, 0.001f, 0.001f);
rectTransform.localPosition = new Vector3(0.01f, 0.17f, 0.53f);
rectTransform.localEulerAngles = Vector3.zero;
Canvas canvas = debugUIManager.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
canvas.pixelPerfect = false;
}
/// <summary>
/// Updating VR variables and managing UI present
/// </summary>
void Update()
{
if (initUIComponent && !isInited)
{
InitUIComponents();
}
if (Input.GetKeyDown(KeyCode.Space) && riftPresentTimeout < 0.0f)
{
initUIComponent = true;
showVRVars ^= true;
}
UpdateDeviceDetection();
// Presenting VR variables
if (showVRVars)
{
debugUIManager.SetActive(true);
UpdateVariable();
UpdateStrings();
}
else
{
debugUIManager.SetActive(false);
}
}
/// <summary>
/// Initialize isInited value on OnDestroy
/// </summary>
void OnDestroy()
{
isInited = false;
}
#endregion
#region Private Functions
/// <summary>
/// Initialize UI GameObjects
/// </summary>
void InitUIComponents()
{
float posY = 0.0f;
int fontSize = 20;
debugUIObject = new GameObject();
debugUIObject.name = "DebugInfo";
debugUIObject.transform.parent = GameObject.Find("DebugUIManager").transform;
debugUIObject.transform.localPosition = new Vector3(0.0f, 100.0f, 0.0f);
debugUIObject.transform.localEulerAngles = Vector3.zero;
debugUIObject.transform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
// Print out for FPS
if (!string.IsNullOrEmpty(strFPS))
{
fps = VariableObjectManager(fps, "FPS", posY -= offsetY, strFPS, fontSize);
}
// Print out for IPD
if (!string.IsNullOrEmpty(strIPD))
{
ipd = VariableObjectManager(ipd, "IPD", posY -= offsetY, strIPD, fontSize);
}
// Print out for FOV
if (!string.IsNullOrEmpty(strFOV))
{
fov = VariableObjectManager(fov, "FOV", posY -= offsetY, strFOV, fontSize);
}
// Print out for Height
if (!string.IsNullOrEmpty(strHeight))
{
height = VariableObjectManager(height, "Height", posY -= offsetY, strHeight, fontSize);
}
// Print out for Depth
if (!string.IsNullOrEmpty(strDepth))
{
depth = VariableObjectManager(depth, "Depth", posY -= offsetY, strDepth, fontSize);
}
// Print out for Resoulution of Eye Texture
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
{
resolutionEyeTexture = VariableObjectManager(resolutionEyeTexture, "Resolution", posY -= offsetY, strResolutionEyeTexture, fontSize);
}
// Print out for Latency
if (!string.IsNullOrEmpty(strLatencies))
{
latencies = VariableObjectManager(latencies, "Latency", posY -= offsetY, strLatencies, 17);
posY = 0.0f;
}
initUIComponent = false;
isInited = true;
}
/// <summary>
/// Update VR Variables
/// </summary>
void UpdateVariable()
{
UpdateIPD();
UpdateEyeHeightOffset();
UpdateEyeDepthOffset();
UpdateFOV();
UpdateResolutionEyeTexture();
UpdateLatencyValues();
UpdateFPS();
}
/// <summary>
/// Update Strings
/// </summary>
void UpdateStrings()
{
if (debugUIObject == null)
return;
if (!string.IsNullOrEmpty(strFPS))
fps.GetComponentInChildren<Text>().text = strFPS;
if (!string.IsNullOrEmpty(strIPD))
ipd.GetComponentInChildren<Text>().text = strIPD;
if (!string.IsNullOrEmpty(strFOV))
fov.GetComponentInChildren<Text>().text = strFOV;
if (!string.IsNullOrEmpty(strResolutionEyeTexture))
resolutionEyeTexture.GetComponentInChildren<Text>().text = strResolutionEyeTexture;
if (!string.IsNullOrEmpty(strLatencies))
{
latencies.GetComponentInChildren<Text>().text = strLatencies;
latencies.GetComponentInChildren<Text>().fontSize = 14;
}
if (!string.IsNullOrEmpty(strHeight))
height.GetComponentInChildren<Text>().text = strHeight;
if (!string.IsNullOrEmpty(strDepth))
depth.GetComponentInChildren<Text>().text = strDepth;
}
/// <summary>
/// It's for rift present GUI
/// </summary>
void RiftPresentGUI(GameObject guiMainOBj)
{
riftPresent = ComponentComposition(riftPresent);
riftPresent.transform.SetParent(guiMainOBj.transform);
riftPresent.name = "RiftPresent";
RectTransform rectTransform = riftPresent.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, 0.0f, 0.0f);
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
rectTransform.localEulerAngles = Vector3.zero;
Text text = riftPresent.GetComponentInChildren<Text>();
text.text = strRiftPresent;
text.fontSize = 20;
}
/// <summary>
/// Updates the device detection.
/// </summary>
void UpdateDeviceDetection()
{
if (riftPresentTimeout >= 0.0f)
{
riftPresentTimeout -= Time.deltaTime;
}
}
/// <summary>
/// Object Manager for Variables
/// </summary>
/// <returns> gameobject for each Variable </returns>
GameObject VariableObjectManager(GameObject gameObject, string name, float posY, string str, int fontSize)
{
gameObject = ComponentComposition(gameObject);
gameObject.name = name;
gameObject.transform.SetParent(debugUIObject.transform);
RectTransform rectTransform = gameObject.GetComponent<RectTransform>();
rectTransform.localPosition = new Vector3(0.0f, posY -= offsetY, 0.0f);
Text text = gameObject.GetComponentInChildren<Text>();
text.text = str;
text.fontSize = fontSize;
gameObject.transform.localEulerAngles = Vector3.zero;
rectTransform.localScale = new Vector3(1.0f, 1.0f, 1.0f);
return gameObject;
}
/// <summary>
/// Component composition
/// </summary>
/// <returns> Composed gameobject. </returns>
GameObject ComponentComposition(GameObject GO)
{
GO = new GameObject();
GO.AddComponent<RectTransform>();
GO.AddComponent<CanvasRenderer>();
GO.AddComponent<Image>();
GO.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
GO.GetComponent<Image>().color = new Color(7f / 255f, 45f / 255f, 71f / 255f, 200f / 255f);
texts = new GameObject();
texts.AddComponent<RectTransform>();
texts.AddComponent<CanvasRenderer>();
texts.AddComponent<Text>();
texts.GetComponent<RectTransform>().sizeDelta = new Vector2(350f, 50f);
texts.GetComponent<Text>().font = Resources.GetBuiltinResource(typeof(Font), "Arial.ttf") as Font;
texts.GetComponent<Text>().alignment = TextAnchor.MiddleCenter;
texts.transform.SetParent(GO.transform);
texts.name = "TextBox";
return GO;
}
#endregion
#region Debugging variables handler
/// <summary>
/// Updates the IPD.
/// </summary>
void UpdateIPD()
{
strIPD = System.String.Format("IPD (mm): {0:F4}", OVRManager.profile.ipd * 1000.0f);
}
/// <summary>
/// Updates the eye height offset.
/// </summary>
void UpdateEyeHeightOffset()
{
float eyeHeight = OVRManager.profile.eyeHeight;
strHeight = System.String.Format("Eye Height (m): {0:F3}", eyeHeight);
}
/// <summary>
/// Updates the eye depth offset.
/// </summary>
void UpdateEyeDepthOffset()
{
float eyeDepth = OVRManager.profile.eyeDepth;
strDepth = System.String.Format("Eye Depth (m): {0:F3}", eyeDepth);
}
/// <summary>
/// Updates the FOV.
/// </summary>
void UpdateFOV()
{
#if UNITY_2017_2_OR_NEWER
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
#else
OVRDisplay.EyeRenderDesc eyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.LeftEye);
#endif
strFOV = System.String.Format("FOV (deg): {0:F3}", eyeDesc.fov.y);
}
/// <summary>
/// Updates resolution of eye texture
/// </summary>
void UpdateResolutionEyeTexture()
{
#if UNITY_2017_2_OR_NEWER
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.XR.XRNode.RightEye);
float scale = UnityEngine.XR.XRSettings.renderViewportScale;
#else
OVRDisplay.EyeRenderDesc leftEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.LeftEye);
OVRDisplay.EyeRenderDesc rightEyeDesc = OVRManager.display.GetEyeRenderDesc(UnityEngine.VR.VRNode.RightEye);
float scale = UnityEngine.VR.VRSettings.renderViewportScale;
#endif
float w = (int)(scale * (float)(leftEyeDesc.resolution.x + rightEyeDesc.resolution.x));
float h = (int)(scale * (float)Mathf.Max(leftEyeDesc.resolution.y, rightEyeDesc.resolution.y));
strResolutionEyeTexture = System.String.Format("Resolution : {0} x {1}", w, h);
}
/// <summary>
/// Updates latency values
/// </summary>
void UpdateLatencyValues()
{
#if !UNITY_ANDROID || UNITY_EDITOR
OVRDisplay.LatencyData latency = OVRManager.display.latency;
if (latency.render < 0.000001f && latency.timeWarp < 0.000001f && latency.postPresent < 0.000001f)
strLatencies = System.String.Format("Latency values are not available.");
else
strLatencies = System.String.Format("Render: {0:F3} TimeWarp: {1:F3} Post-Present: {2:F3}\nRender Error: {3:F3} TimeWarp Error: {4:F3}",
latency.render,
latency.timeWarp,
latency.postPresent,
latency.renderError,
latency.timeWarpError);
#endif
}
/// <summary>
/// Updates the FPS.
/// </summary>
void UpdateFPS()
{
timeLeft -= Time.unscaledDeltaTime;
accum += Time.unscaledDeltaTime;
++frames;
// Interval ended - update GUI text and start new interval
if (timeLeft <= 0.0)
{
// display two fractional digits (f2 format)
float fps = frames / accum;
strFPS = System.String.Format("FPS: {0:F2}", fps);
timeLeft += updateInterval;
accum = 0.0f;
frames = 0;
}
}
#endregion
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b71d1996d67004241a3b69960856ffcb
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,289 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
using UnityEngine.UI;
/// <summary>
/// UI pointer driven by gaze input.
/// </summary>
public class OVRGazePointer : MonoBehaviour {
private Transform gazeIcon; //the transform that rotates according to our movement
[Tooltip("Should the pointer be hidden when not over interactive objects.")]
public bool hideByDefault = true;
[Tooltip("Time after leaving interactive object before pointer fades.")]
public float showTimeoutPeriod = 1;
[Tooltip("Time after mouse pointer becoming inactive before pointer unfades.")]
public float hideTimeoutPeriod = 0.1f;
[Tooltip("Keep a faint version of the pointer visible while using a mouse")]
public bool dimOnHideRequest = true;
[Tooltip("Angular scale of pointer")]
public float depthScaleMultiplier = 0.03f;
/// <summary>
/// The gaze ray.
/// </summary>
public Transform rayTransform;
/// <summary>
/// Is gaze pointer current visible
/// </summary>
public bool hidden { get; private set; }
/// <summary>
/// Current scale applied to pointer
/// </summary>
public float currentScale { get; private set; }
/// <summary>
/// Current depth of pointer from camera
/// </summary>
private float depth;
private float hideUntilTime;
/// <summary>
/// How many times position has been set this frame. Used to detect when there are no position sets in a frame.
/// </summary>
private int positionSetsThisFrame = 0;
/// <summary>
/// Last time code requested the pointer be shown. Usually when pointer passes over interactive elements.
/// </summary>
private float lastShowRequestTime;
/// <summary>
/// Last time pointer was requested to be hidden. Usually mouse pointer activity.
/// </summary>
private float lastHideRequestTime;
[Tooltip("Radius of the cursor. Used for preventing geometry intersections.")]
public float cursorRadius = 1f;
// Optionally present GUI element displaying progress when using gaze-to-select mechanics
private OVRProgressIndicator progressIndicator;
private static OVRGazePointer _instance;
public static OVRGazePointer instance
{
// If there's no GazePointer already in the scene, instanciate one now.
get
{
if (_instance == null)
{
Debug.Log(string.Format("Instanciating GazePointer", 0));
_instance = (OVRGazePointer)GameObject.Instantiate((OVRGazePointer)Resources.Load("Prefabs/GazePointerRing", typeof(OVRGazePointer)));
}
return _instance;
}
}
/// <summary>
/// Used to determine alpha level of gaze cursor. Could also be used to determine cursor size, for example, as the cursor fades out.
/// </summary>
public float visibilityStrength
{
get
{
// It's possible there are reasons to show the cursor - such as it hovering over some UI - and reasons to hide
// the cursor - such as another input method (e.g. mouse) being used. We take both of these in to account.
float strengthFromShowRequest;
if (hideByDefault)
{
// fade the cursor out with time
strengthFromShowRequest = Mathf.Clamp01(1 - (Time.time - lastShowRequestTime) / showTimeoutPeriod);
}
else
{
// keep it fully visible
strengthFromShowRequest = 1;
}
// Now consider factors requesting pointer to be hidden
float strengthFromHideRequest;
strengthFromHideRequest = (lastHideRequestTime + hideTimeoutPeriod > Time.time) ? (dimOnHideRequest ? 0.1f : 0) : 1;
// Hide requests take priority
return Mathf.Min(strengthFromShowRequest, strengthFromHideRequest);
}
}
public float SelectionProgress
{
get
{
return progressIndicator ? progressIndicator.currentProgress : 0;
}
set
{
if (progressIndicator)
progressIndicator.currentProgress = value;
}
}
public void Awake()
{
currentScale = 1;
// Only allow one instance at runtime.
if (_instance != null && _instance != this)
{
enabled = false;
DestroyImmediate(this);
return;
}
_instance = this;
gazeIcon = transform.Find("GazeIcon");
progressIndicator = transform.GetComponent<OVRProgressIndicator>();
}
void Update ()
{
if (rayTransform == null && Camera.main != null)
rayTransform = Camera.main.transform;
// Move the gaze cursor to keep it in the middle of the view
transform.position = rayTransform.position + rayTransform.forward * depth;
// Should we show or hide the gaze cursor?
if (visibilityStrength == 0 && !hidden)
{
Hide();
}
else if (visibilityStrength > 0 && hidden)
{
Show();
}
}
/// <summary>
/// Set position and orientation of pointer
/// </summary>
/// <param name="pos"></param>
/// <param name="normal"></param>
public void SetPosition(Vector3 pos, Vector3 normal)
{
transform.position = pos;
// Set the rotation to match the normal of the surface it's on.
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(normal, rayTransform.up);
transform.rotation = newRot;
// record depth so that distance doesn't pop when pointer leaves an object
depth = (rayTransform.position - pos).magnitude;
//set scale based on depth
currentScale = depth * depthScaleMultiplier;
transform.localScale = new Vector3(currentScale, currentScale, currentScale);
positionSetsThisFrame++;
}
/// <summary>
/// SetPosition overload without normal. Just makes cursor face user
/// </summary>
/// <param name="pos"></param>
public void SetPosition(Vector3 pos)
{
SetPosition(pos, rayTransform.forward);
}
public float GetCurrentRadius()
{
return cursorRadius * currentScale;
}
void LateUpdate()
{
// This happens after all Updates so we know that if positionSetsThisFrame is zero then nothing set the position this frame
if (positionSetsThisFrame == 0)
{
// No geometry intersections, so gazing into space. Make the cursor face directly at the camera
Quaternion newRot = transform.rotation;
newRot.SetLookRotation(rayTransform.forward, rayTransform.up);
transform.rotation = newRot;
}
Quaternion iconRotation = gazeIcon.rotation;
iconRotation.SetLookRotation(transform.rotation * new Vector3(0, 0, 1));
gazeIcon.rotation = iconRotation;
positionSetsThisFrame = 0;
}
/// <summary>
/// Request the pointer be hidden
/// </summary>
public void RequestHide()
{
if (!dimOnHideRequest)
{
Hide();
}
lastHideRequestTime = Time.time;
}
/// <summary>
/// Request the pointer be shown. Hide requests take priority
/// </summary>
public void RequestShow()
{
Show();
lastShowRequestTime = Time.time;
}
// Disable/Enable child elements when we show/hide the cursor. For performance reasons.
void Hide()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(false);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = false;
hidden = true;
}
void Show()
{
foreach (Transform child in transform)
{
child.gameObject.SetActive(true);
}
if (GetComponent<Renderer>())
GetComponent<Renderer>().enabled = true;
hidden = false;
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 30530ad0e40d0a64ea26d753ee4996ea
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,217 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
using System.Collections.Generic;
using System.Text;
public class OVRGearVrControllerTest : MonoBehaviour
{
public class BoolMonitor
{
public delegate bool BoolGenerator();
private string m_name = "";
private BoolGenerator m_generator;
private bool m_prevValue = false;
private bool m_currentValue = false;
private bool m_currentValueRecentlyChanged = false;
private float m_displayTimeout = 0.0f;
private float m_displayTimer = 0.0f;
public BoolMonitor(string name, BoolGenerator generator, float displayTimeout = 0.5f)
{
m_name = name;
m_generator = generator;
m_displayTimeout = displayTimeout;
}
public void Update()
{
m_prevValue = m_currentValue;
m_currentValue = m_generator();
if (m_currentValue != m_prevValue)
{
m_currentValueRecentlyChanged = true;
m_displayTimer = m_displayTimeout;
}
if (m_displayTimer > 0.0f)
{
m_displayTimer -= Time.deltaTime;
if (m_displayTimer <= 0.0f)
{
m_currentValueRecentlyChanged = false;
m_displayTimer = 0.0f;
}
}
}
public void AppendToStringBuilder(ref StringBuilder sb)
{
sb.Append(m_name);
if (m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *True*\n");
else if (m_currentValue)
sb.Append(": True \n");
else if (!m_currentValue && m_currentValueRecentlyChanged)
sb.Append(": *False*\n");
else if (!m_currentValue)
sb.Append(": False \n");
}
}
public Text uiText;
private List<BoolMonitor> monitors;
private StringBuilder data;
void Start()
{
if (uiText != null)
{
uiText.supportRichText = false;
}
data = new StringBuilder(2048);
monitors = new List<BoolMonitor>()
{
// virtual
new BoolMonitor("WasRecentered", () => OVRInput.GetControllerWasRecentered()),
new BoolMonitor("One", () => OVRInput.Get(OVRInput.Button.One)),
new BoolMonitor("OneDown", () => OVRInput.GetDown(OVRInput.Button.One)),
new BoolMonitor("OneUp", () => OVRInput.GetUp(OVRInput.Button.One)),
new BoolMonitor("One (Touch)", () => OVRInput.Get(OVRInput.Touch.One)),
new BoolMonitor("OneDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.One)),
new BoolMonitor("OneUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.One)),
new BoolMonitor("Two", () => OVRInput.Get(OVRInput.Button.Two)),
new BoolMonitor("TwoDown", () => OVRInput.GetDown(OVRInput.Button.Two)),
new BoolMonitor("TwoUp", () => OVRInput.GetUp(OVRInput.Button.Two)),
new BoolMonitor("PrimaryIndexTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTrigger (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryIndexTriggerUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryIndexTrigger)),
new BoolMonitor("PrimaryHandTrigger", () => OVRInput.Get(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerDown", () => OVRInput.GetDown(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("PrimaryHandTriggerUp", () => OVRInput.GetUp(OVRInput.Button.PrimaryHandTrigger)),
new BoolMonitor("Up", () => OVRInput.Get(OVRInput.Button.Up)),
new BoolMonitor("Down", () => OVRInput.Get(OVRInput.Button.Down)),
new BoolMonitor("Left", () => OVRInput.Get(OVRInput.Button.Left)),
new BoolMonitor("Right", () => OVRInput.Get(OVRInput.Button.Right)),
new BoolMonitor("Touchpad (Click)", () => OVRInput.Get(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("TouchpadDown (Click)", () => OVRInput.GetDown(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("TouchpadUp (Click)", () => OVRInput.GetUp(OVRInput.Button.PrimaryTouchpad)),
new BoolMonitor("Touchpad (Touch)", () => OVRInput.Get(OVRInput.Touch.PrimaryTouchpad)),
new BoolMonitor("TouchpadDown (Touch)", () => OVRInput.GetDown(OVRInput.Touch.PrimaryTouchpad)),
new BoolMonitor("TouchpadUp (Touch)", () => OVRInput.GetUp(OVRInput.Touch.PrimaryTouchpad)),
// raw
new BoolMonitor("Start", () => OVRInput.Get(OVRInput.RawButton.Start)),
new BoolMonitor("StartDown", () => OVRInput.GetDown(OVRInput.RawButton.Start)),
new BoolMonitor("StartUp", () => OVRInput.GetUp(OVRInput.RawButton.Start)),
new BoolMonitor("Back", () => OVRInput.Get(OVRInput.RawButton.Back)),
new BoolMonitor("BackDown", () => OVRInput.GetDown(OVRInput.RawButton.Back)),
new BoolMonitor("BackUp", () => OVRInput.GetUp(OVRInput.RawButton.Back)),
new BoolMonitor("A", () => OVRInput.Get(OVRInput.RawButton.A)),
new BoolMonitor("ADown", () => OVRInput.GetDown(OVRInput.RawButton.A)),
new BoolMonitor("AUp", () => OVRInput.GetUp(OVRInput.RawButton.A)),
};
}
static string prevConnected = "";
static BoolMonitor controllers = new BoolMonitor("Controllers Changed", () => { return OVRInput.GetConnectedControllers().ToString() != prevConnected; });
void Update()
{
OVRInput.Controller activeController = OVRInput.GetActiveController();
data.Length = 0;
byte recenterCount = OVRInput.GetControllerRecenterCount();
data.AppendFormat("RecenterCount: {0}\n", recenterCount);
byte battery = OVRInput.GetControllerBatteryPercentRemaining();
data.AppendFormat("Battery: {0}\n", battery);
float framerate = OVRPlugin.GetAppFramerate();
data.AppendFormat("Framerate: {0:F2}\n", framerate);
string activeControllerName = activeController.ToString();
data.AppendFormat("Active: {0}\n", activeControllerName);
string connectedControllerNames = OVRInput.GetConnectedControllers().ToString();
data.AppendFormat("Connected: {0}\n", connectedControllerNames);
data.AppendFormat("PrevConnected: {0}\n", prevConnected);
controllers.Update();
controllers.AppendToStringBuilder(ref data);
prevConnected = connectedControllerNames;
Quaternion rot = OVRInput.GetLocalControllerRotation(activeController);
data.AppendFormat("Orientation: ({0:F2}, {1:F2}, {2:F2}, {3:F2})\n", rot.x, rot.y, rot.z, rot.w);
Vector3 angVel = OVRInput.GetLocalControllerAngularVelocity(activeController);
data.AppendFormat("AngVel: ({0:F2}, {1:F2}, {2:F2})\n", angVel.x, angVel.y, angVel.z);
Vector3 angAcc = OVRInput.GetLocalControllerAngularAcceleration(activeController);
data.AppendFormat("AngAcc: ({0:F2}, {1:F2}, {2:F2})\n", angAcc.x, angAcc.y, angAcc.z);
Vector3 pos = OVRInput.GetLocalControllerPosition(activeController);
data.AppendFormat("Position: ({0:F2}, {1:F2}, {2:F2})\n", pos.x, pos.y, pos.z);
Vector3 vel = OVRInput.GetLocalControllerVelocity(activeController);
data.AppendFormat("Vel: ({0:F2}, {1:F2}, {2:F2})\n", vel.x, vel.y, vel.z);
Vector3 acc = OVRInput.GetLocalControllerAcceleration(activeController);
data.AppendFormat("Acc: ({0:F2}, {1:F2}, {2:F2})\n", acc.x, acc.y, acc.z);
Vector2 primaryTouchpad = OVRInput.Get(OVRInput.Axis2D.PrimaryTouchpad);
data.AppendFormat("PrimaryTouchpad: ({0:F2}, {1:F2})\n", primaryTouchpad.x, primaryTouchpad.y);
Vector2 secondaryTouchpad = OVRInput.Get(OVRInput.Axis2D.SecondaryTouchpad);
data.AppendFormat("SecondaryTouchpad: ({0:F2}, {1:F2})\n", secondaryTouchpad.x, secondaryTouchpad.y);
float indexTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
data.AppendFormat("PrimaryIndexTriggerAxis1D: ({0:F2})\n", indexTrigger);
float handTrigger = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger);
data.AppendFormat("PrimaryHandTriggerAxis1D: ({0:F2})\n", handTrigger);
for (int i = 0; i < monitors.Count; i++)
{
monitors[i].Update();
monitors[i].AppendToStringBuilder(ref data);
}
if (uiText != null)
{
uiText.text = data.ToString();
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7acc4619d4cb5e64e9ed05e5a7a8099f
timeCreated: 1486173066
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,169 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 02d61468f8b77ae4b92c344bc9a600fb
timeCreated: 1481833527
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,379 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Allows grabbing and throwing of objects with the OVRGrabbable component on them.
/// </summary>
[RequireComponent(typeof(Rigidbody))]
public class OVRGrabber : MonoBehaviour
{
// Grip trigger thresholds for picking up objects, with some hysteresis.
public float grabBegin = 0.55f;
public float grabEnd = 0.35f;
// Demonstrates parenting the held object to the hand's transform when grabbed.
// When false, the grabbed object is moved every FixedUpdate using MovePosition.
// Note that MovePosition is required for proper physics simulation. If you set this to true, you can
// easily observe broken physics simulation by, for example, moving the bottom cube of a stacked
// tower and noting a complete loss of friction.
[SerializeField]
protected bool m_parentHeldObject = false;
// Child/attached transforms of the grabber, indicating where to snap held objects to (if you snap them).
// Also used for ranking grab targets in case of multiple candidates.
[SerializeField]
protected Transform m_gripTransform = null;
// Child/attached Colliders to detect candidate grabbable objects.
[SerializeField]
protected Collider[] m_grabVolumes = null;
// Should be OVRInput.Controller.LTouch or OVRInput.Controller.RTouch.
[SerializeField]
protected OVRInput.Controller m_controller;
[SerializeField]
protected Transform m_parentTransform;
protected bool m_grabVolumeEnabled = true;
protected Vector3 m_lastPos;
protected Quaternion m_lastRot;
protected Quaternion m_anchorOffsetRotation;
protected Vector3 m_anchorOffsetPosition;
protected float m_prevFlex;
protected OVRGrabbable m_grabbedObj = null;
protected Vector3 m_grabbedObjectPosOff;
protected Quaternion m_grabbedObjectRotOff;
protected Dictionary<OVRGrabbable, int> m_grabCandidates = new Dictionary<OVRGrabbable, int>();
protected bool operatingWithoutOVRCameraRig = true;
/// <summary>
/// The currently grabbed object.
/// </summary>
public OVRGrabbable grabbedObject
{
get { return m_grabbedObj; }
}
public void ForceRelease(OVRGrabbable grabbable)
{
bool canRelease = (
(m_grabbedObj != null) &&
(m_grabbedObj == grabbable)
);
if (canRelease)
{
GrabEnd();
}
}
protected virtual void Awake()
{
m_anchorOffsetPosition = transform.localPosition;
m_anchorOffsetRotation = transform.localRotation;
// If we are being used with an OVRCameraRig, let it drive input updates, which may come from Update or FixedUpdate.
OVRCameraRig rig = null;
if (transform.parent != null && transform.parent.parent != null)
rig = transform.parent.parent.GetComponent<OVRCameraRig>();
if (rig != null)
{
rig.UpdatedAnchors += (r) => {OnUpdatedAnchors();};
operatingWithoutOVRCameraRig = false;
}
}
protected virtual void Start()
{
m_lastPos = transform.position;
m_lastRot = transform.rotation;
if(m_parentTransform == null)
{
if(gameObject.transform.parent != null)
{
m_parentTransform = gameObject.transform.parent.transform;
}
else
{
m_parentTransform = new GameObject().transform;
m_parentTransform.position = Vector3.zero;
m_parentTransform.rotation = Quaternion.identity;
}
}
}
void FixedUpdate()
{
if (operatingWithoutOVRCameraRig)
OnUpdatedAnchors();
}
// Hands follow the touch anchors by calling MovePosition each frame to reach the anchor.
// This is done instead of parenting to achieve workable physics. If you don't require physics on
// your hands or held objects, you may wish to switch to parenting.
void OnUpdatedAnchors()
{
Vector3 handPos = OVRInput.GetLocalControllerPosition(m_controller);
Quaternion handRot = OVRInput.GetLocalControllerRotation(m_controller);
Vector3 destPos = m_parentTransform.TransformPoint(m_anchorOffsetPosition + handPos);
Quaternion destRot = m_parentTransform.rotation * handRot * m_anchorOffsetRotation;
GetComponent<Rigidbody>().MovePosition(destPos);
GetComponent<Rigidbody>().MoveRotation(destRot);
if (!m_parentHeldObject)
{
MoveGrabbedObject(destPos, destRot);
}
m_lastPos = transform.position;
m_lastRot = transform.rotation;
float prevFlex = m_prevFlex;
// Update values from inputs
m_prevFlex = OVRInput.Get(OVRInput.Axis1D.PrimaryHandTrigger, m_controller);
CheckForGrabOrRelease(prevFlex);
}
void OnDestroy()
{
if (m_grabbedObj != null)
{
GrabEnd();
}
}
void OnTriggerEnter(Collider otherCollider)
{
// Get the grab trigger
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Add the grabbable
int refCount = 0;
m_grabCandidates.TryGetValue(grabbable, out refCount);
m_grabCandidates[grabbable] = refCount + 1;
}
void OnTriggerExit(Collider otherCollider)
{
OVRGrabbable grabbable = otherCollider.GetComponent<OVRGrabbable>() ?? otherCollider.GetComponentInParent<OVRGrabbable>();
if (grabbable == null) return;
// Remove the grabbable
int refCount = 0;
bool found = m_grabCandidates.TryGetValue(grabbable, out refCount);
if (!found)
{
return;
}
if (refCount > 1)
{
m_grabCandidates[grabbable] = refCount - 1;
}
else
{
m_grabCandidates.Remove(grabbable);
}
}
protected void CheckForGrabOrRelease(float prevFlex)
{
if ((m_prevFlex >= grabBegin) && (prevFlex < grabBegin))
{
GrabBegin();
}
else if ((m_prevFlex <= grabEnd) && (prevFlex > grabEnd))
{
GrabEnd();
}
}
protected virtual void GrabBegin()
{
float closestMagSq = float.MaxValue;
OVRGrabbable closestGrabbable = null;
Collider closestGrabbableCollider = null;
// Iterate grab candidates and find the closest grabbable candidate
foreach (OVRGrabbable grabbable in m_grabCandidates.Keys)
{
bool canGrab = !(grabbable.isGrabbed && !grabbable.allowOffhandGrab);
if (!canGrab)
{
continue;
}
for (int j = 0; j < grabbable.grabPoints.Length; ++j)
{
Collider grabbableCollider = grabbable.grabPoints[j];
// Store the closest grabbable
Vector3 closestPointOnBounds = grabbableCollider.ClosestPointOnBounds(m_gripTransform.position);
float grabbableMagSq = (m_gripTransform.position - closestPointOnBounds).sqrMagnitude;
if (grabbableMagSq < closestMagSq)
{
closestMagSq = grabbableMagSq;
closestGrabbable = grabbable;
closestGrabbableCollider = grabbableCollider;
}
}
}
// Disable grab volumes to prevent overlaps
GrabVolumeEnable(false);
if (closestGrabbable != null)
{
if (closestGrabbable.isGrabbed)
{
closestGrabbable.grabbedBy.OffhandGrabbed(closestGrabbable);
}
m_grabbedObj = closestGrabbable;
m_grabbedObj.GrabBegin(this, closestGrabbableCollider);
m_lastPos = transform.position;
m_lastRot = transform.rotation;
// Set up offsets for grabbed object desired position relative to hand.
if(m_grabbedObj.snapPosition)
{
m_grabbedObjectPosOff = m_gripTransform.localPosition;
if(m_grabbedObj.snapOffset)
{
Vector3 snapOffset = m_grabbedObj.snapOffset.position;
if (m_controller == OVRInput.Controller.LTouch) snapOffset.x = -snapOffset.x;
m_grabbedObjectPosOff += snapOffset;
}
}
else
{
Vector3 relPos = m_grabbedObj.transform.position - transform.position;
relPos = Quaternion.Inverse(transform.rotation) * relPos;
m_grabbedObjectPosOff = relPos;
}
if (m_grabbedObj.snapOrientation)
{
m_grabbedObjectRotOff = m_gripTransform.localRotation;
if(m_grabbedObj.snapOffset)
{
m_grabbedObjectRotOff = m_grabbedObj.snapOffset.rotation * m_grabbedObjectRotOff;
}
}
else
{
Quaternion relOri = Quaternion.Inverse(transform.rotation) * m_grabbedObj.transform.rotation;
m_grabbedObjectRotOff = relOri;
}
// Note: force teleport on grab, to avoid high-speed travel to dest which hits a lot of other objects at high
// speed and sends them flying. The grabbed object may still teleport inside of other objects, but fixing that
// is beyond the scope of this demo.
MoveGrabbedObject(m_lastPos, m_lastRot, true);
if(m_parentHeldObject)
{
m_grabbedObj.transform.parent = transform;
}
}
}
protected virtual void MoveGrabbedObject(Vector3 pos, Quaternion rot, bool forceTeleport = false)
{
if (m_grabbedObj == null)
{
return;
}
Rigidbody grabbedRigidbody = m_grabbedObj.grabbedRigidbody;
Vector3 grabbablePosition = pos + rot * m_grabbedObjectPosOff;
Quaternion grabbableRotation = rot * m_grabbedObjectRotOff;
if (forceTeleport)
{
grabbedRigidbody.transform.position = grabbablePosition;
grabbedRigidbody.transform.rotation = grabbableRotation;
}
else
{
grabbedRigidbody.MovePosition(grabbablePosition);
grabbedRigidbody.MoveRotation(grabbableRotation);
}
}
protected void GrabEnd()
{
if (m_grabbedObj != null)
{
OVRPose localPose = new OVRPose { position = OVRInput.GetLocalControllerPosition(m_controller), orientation = OVRInput.GetLocalControllerRotation(m_controller) };
OVRPose offsetPose = new OVRPose { position = m_anchorOffsetPosition, orientation = m_anchorOffsetRotation };
localPose = localPose * offsetPose;
OVRPose trackingSpace = transform.ToOVRPose() * localPose.Inverse();
Vector3 linearVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerVelocity(m_controller);
Vector3 angularVelocity = trackingSpace.orientation * OVRInput.GetLocalControllerAngularVelocity(m_controller);
GrabbableRelease(linearVelocity, angularVelocity);
}
// Re-enable grab volumes to allow overlap events
GrabVolumeEnable(true);
}
protected void GrabbableRelease(Vector3 linearVelocity, Vector3 angularVelocity)
{
m_grabbedObj.GrabEnd(linearVelocity, angularVelocity);
if(m_parentHeldObject) m_grabbedObj.transform.parent = null;
m_grabbedObj = null;
}
protected virtual void GrabVolumeEnable(bool enabled)
{
if (m_grabVolumeEnabled == enabled)
{
return;
}
m_grabVolumeEnabled = enabled;
for (int i = 0; i < m_grabVolumes.Length; ++i)
{
Collider grabVolume = m_grabVolumes[i];
grabVolume.enabled = m_grabVolumeEnabled;
}
if (!m_grabVolumeEnabled)
{
m_grabCandidates.Clear();
}
}
protected virtual void OffhandGrabbed(OVRGrabbable grabbable)
{
if (m_grabbedObj == grabbable)
{
GrabbableRelease(Vector3.zero, Vector3.zero);
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: fd425c2d06f39bf4899d07c05d0f10eb
timeCreated: 1481832436
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,195 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Diagnostic display with a regular grid of cubes for visual testing of
/// tracking and distortion.
/// </summary>
public class OVRGridCube : MonoBehaviour
{
/// <summary>
/// The key that toggles the grid of cubes.
/// </summary>
public KeyCode GridKey = KeyCode.G;
private GameObject CubeGrid = null;
private bool CubeGridOn = false;
private bool CubeSwitchColorOld = false;
private bool CubeSwitchColor = false;
private int gridSizeX = 6;
private int gridSizeY = 4;
private int gridSizeZ = 6;
private float gridScale = 0.3f;
private float cubeScale = 0.03f;
// Handle to OVRCameraRig
private OVRCameraRig CameraController = null;
/// <summary>
/// Update this instance.
/// </summary>
void Update ()
{
UpdateCubeGrid();
}
/// <summary>
/// Sets the OVR camera controller.
/// </summary>
/// <param name="cameraController">Camera controller.</param>
public void SetOVRCameraController(ref OVRCameraRig cameraController)
{
CameraController = cameraController;
}
void UpdateCubeGrid()
{
// Toggle the grid cube display on 'G'
if(Input.GetKeyDown(GridKey))
{
if(CubeGridOn == false)
{
CubeGridOn = true;
Debug.LogWarning("CubeGrid ON");
if(CubeGrid != null)
CubeGrid.SetActive(true);
else
CreateCubeGrid();
}
else
{
CubeGridOn = false;
Debug.LogWarning("CubeGrid OFF");
if(CubeGrid != null)
CubeGrid.SetActive(false);
}
}
if(CubeGrid != null)
{
// Set cube colors to let user know if camera is tracking
CubeSwitchColor = !OVRManager.tracker.isPositionTracked;
if(CubeSwitchColor != CubeSwitchColorOld)
CubeGridSwitchColor(CubeSwitchColor);
CubeSwitchColorOld = CubeSwitchColor;
}
}
void CreateCubeGrid()
{
Debug.LogWarning("Create CubeGrid");
// Create the visual cube grid
CubeGrid = new GameObject("CubeGrid");
// Set a layer to target a specific camera
CubeGrid.layer = CameraController.gameObject.layer;
for (int x = -gridSizeX; x <= gridSizeX; x++)
for (int y = -gridSizeY; y <= gridSizeY; y++)
for (int z = -gridSizeZ; z <= gridSizeZ; z++)
{
// Set the cube type:
// 0 = non-axis cube
// 1 = axis cube
// 2 = center cube
int CubeType = 0;
if ((x == 0 && y == 0) || (x == 0 && z == 0) || (y == 0 && z == 0))
{
if((x == 0) && (y == 0) && (z == 0))
CubeType = 2;
else
CubeType = 1;
}
GameObject cube = GameObject.CreatePrimitive(PrimitiveType.Cube);
BoxCollider bc = cube.GetComponent<BoxCollider>();
bc.enabled = false;
cube.layer = CameraController.gameObject.layer;
// No shadows
Renderer r = cube.GetComponent<Renderer>();
#if UNITY_4_0 || UNITY_4_1 || UNITY_4_2 || UNITY_4_3 || UNITY_4_5 || UNITY_4_6
// Renderer.castShadows was deprecated starting in Unity 5.0
r.castShadows = false;
#else
r.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
#endif
r.receiveShadows = false;
// Cube line is white down the middle
if (CubeType == 0)
r.material.color = Color.red;
else if (CubeType == 1)
r.material.color = Color.white;
else
r.material.color = Color.yellow;
cube.transform.position =
new Vector3(((float)x * gridScale),
((float)y * gridScale),
((float)z * gridScale));
float s = 0.7f;
// Axis cubes are bigger
if(CubeType == 1)
s = 1.0f;
// Center cube is the largest
if(CubeType == 2)
s = 2.0f;
cube.transform.localScale =
new Vector3(cubeScale * s, cubeScale * s, cubeScale * s);
cube.transform.parent = CubeGrid.transform;
}
}
/// <summary>
/// Switch the Cube grid color.
/// </summary>
/// <param name="CubeSwitchColor">If set to <c>true</c> cube switch color.</param>
void CubeGridSwitchColor(bool CubeSwitchColor)
{
Color c = Color.red;
if(CubeSwitchColor == true)
c = Color.blue;
foreach(Transform child in CubeGrid.transform)
{
Material m = child.GetComponent<Renderer>().material;
// Cube line is white down the middle
if(m.color == Color.red || m.color == Color.blue)
m.color = c;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4988596c8a187f94f8e6a345ebb4254b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,913 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// VR extension of PointerInputModule which supports gaze and controller pointing.
/// </summary>
public class OVRInputModule : PointerInputModule
{
[Tooltip("Object which points with Z axis. E.g. CentreEyeAnchor from OVRCameraRig")]
public Transform rayTransform;
[Tooltip("Gamepad button to act as gaze click")]
public OVRInput.Button joyPadClickButton = OVRInput.Button.One;
[Tooltip("Keyboard button to act as gaze click")]
public KeyCode gazeClickKey = KeyCode.Space;
[Header("Physics")]
[Tooltip("Perform an sphere cast to determine correct depth for gaze pointer")]
public bool performSphereCastForGazepointer;
[Tooltip("Match the gaze pointer normal to geometry normal for physics colliders")]
public bool matchNormalOnPhysicsColliders;
[Header("Gamepad Stick Scroll")]
[Tooltip("Enable scrolling with the right stick on a gamepad")]
public bool useRightStickScroll = true;
[Tooltip("Deadzone for right stick to prevent accidental scrolling")]
public float rightStickDeadZone = 0.15f;
[Header("Touchpad Swipe Scroll")]
[Tooltip("Enable scrolling by swiping the GearVR touchpad")]
public bool useSwipeScroll = true;
[Tooltip("Minimum trackpad movement in pixels to start swiping")]
public float swipeDragThreshold = 2;
[Tooltip("Distance scrolled when swipe scroll occurs")]
public float swipeDragScale = 1f;
/* It's debatable which way left and right are on the Gear VR touchpad since it's facing away from you
* the following bool allows this to be swapped*/
[Tooltip("Invert X axis on touchpad")]
public bool InvertSwipeXAxis = false;
// The raycaster that gets to do pointer interaction (e.g. with a mouse), gaze interaction always works
[NonSerialized]
public OVRRaycaster activeGraphicRaycaster;
[Header("Dragging")]
[Tooltip("Minimum pointer movement in degrees to start dragging")]
public float angleDragThreshold = 1;
// The following region contains code exactly the same as the implementation
// of StandaloneInputModule. It is copied here rather than inheriting from StandaloneInputModule
// because most of StandaloneInputModule is private so it isn't possible to easily derive from.
// Future changes from Unity to StandaloneInputModule will make it possible for this class to
// derive from StandaloneInputModule instead of PointerInput module.
//
// The following functions are not present in the following region since they have modified
// versions in the next region:
// Process
// ProcessMouseEvent
// UseMouse
#region StandaloneInputModule code
private float m_NextAction;
private Vector2 m_LastMousePosition;
private Vector2 m_MousePosition;
protected OVRInputModule()
{}
#if UNITY_EDITOR
protected override void Reset()
{
allowActivationOnMobileDevice = true;
}
#endif
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public enum InputMode
{
Mouse,
Buttons
}
[Obsolete("Mode is no longer needed on input module as it handles both mouse and keyboard simultaneously.", false)]
public InputMode inputMode
{
get { return InputMode.Mouse; }
}
[Header("Standalone Input Module")]
[SerializeField]
private string m_HorizontalAxis = "Horizontal";
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
[SerializeField]
private string m_VerticalAxis = "Vertical";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_SubmitButton = "Submit";
/// <summary>
/// Name of the submit button.
/// </summary>
[SerializeField]
private string m_CancelButton = "Cancel";
[SerializeField]
private float m_InputActionsPerSecond = 10;
[SerializeField]
private bool m_AllowActivationOnMobileDevice;
public bool allowActivationOnMobileDevice
{
get { return m_AllowActivationOnMobileDevice; }
set { m_AllowActivationOnMobileDevice = value; }
}
public float inputActionsPerSecond
{
get { return m_InputActionsPerSecond; }
set { m_InputActionsPerSecond = value; }
}
/// <summary>
/// Name of the horizontal axis for movement (if axis events are used).
/// </summary>
public string horizontalAxis
{
get { return m_HorizontalAxis; }
set { m_HorizontalAxis = value; }
}
/// <summary>
/// Name of the vertical axis for movement (if axis events are used).
/// </summary>
public string verticalAxis
{
get { return m_VerticalAxis; }
set { m_VerticalAxis = value; }
}
public string submitButton
{
get { return m_SubmitButton; }
set { m_SubmitButton = value; }
}
public string cancelButton
{
get { return m_CancelButton; }
set { m_CancelButton = value; }
}
public override void UpdateModule()
{
m_LastMousePosition = m_MousePosition;
m_MousePosition = Input.mousePosition;
}
public override bool IsModuleSupported()
{
// Check for mouse presence instead of whether touch is supported,
// as you can connect mouse to a tablet and in that case we'd want
// to use StandaloneInputModule for non-touch input events.
return m_AllowActivationOnMobileDevice || Input.mousePresent;
}
public override bool ShouldActivateModule()
{
if (!base.ShouldActivateModule())
return false;
var shouldActivate = Input.GetButtonDown(m_SubmitButton);
shouldActivate |= Input.GetButtonDown(m_CancelButton);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_HorizontalAxis), 0.0f);
shouldActivate |= !Mathf.Approximately(Input.GetAxisRaw(m_VerticalAxis), 0.0f);
shouldActivate |= (m_MousePosition - m_LastMousePosition).sqrMagnitude > 0.0f;
shouldActivate |= Input.GetMouseButtonDown(0);
return shouldActivate;
}
public override void ActivateModule()
{
base.ActivateModule();
m_MousePosition = Input.mousePosition;
m_LastMousePosition = Input.mousePosition;
var toSelect = eventSystem.currentSelectedGameObject;
if (toSelect == null)
toSelect = eventSystem.firstSelectedGameObject;
eventSystem.SetSelectedGameObject(toSelect, GetBaseEventData());
}
public override void DeactivateModule()
{
base.DeactivateModule();
ClearSelection();
}
/// <summary>
/// Process submit keys.
/// </summary>
private bool SendSubmitEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
if (Input.GetButtonDown(m_SubmitButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.submitHandler);
if (Input.GetButtonDown(m_CancelButton))
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.cancelHandler);
return data.used;
}
private bool AllowMoveEventProcessing(float time)
{
bool allow = Input.GetButtonDown(m_HorizontalAxis);
allow |= Input.GetButtonDown(m_VerticalAxis);
allow |= (time > m_NextAction);
return allow;
}
private Vector2 GetRawMoveVector()
{
Vector2 move = Vector2.zero;
move.x = Input.GetAxisRaw(m_HorizontalAxis);
move.y = Input.GetAxisRaw(m_VerticalAxis);
if (Input.GetButtonDown(m_HorizontalAxis))
{
if (move.x < 0)
move.x = -1f;
if (move.x > 0)
move.x = 1f;
}
if (Input.GetButtonDown(m_VerticalAxis))
{
if (move.y < 0)
move.y = -1f;
if (move.y > 0)
move.y = 1f;
}
return move;
}
/// <summary>
/// Process keyboard events.
/// </summary>
private bool SendMoveEventToSelectedObject()
{
float time = Time.unscaledTime;
if (!AllowMoveEventProcessing(time))
return false;
Vector2 movement = GetRawMoveVector();
// Debug.Log(m_ProcessingEvent.rawType + " axis:" + m_AllowAxisEvents + " value:" + "(" + x + "," + y + ")");
var axisEventData = GetAxisEventData(movement.x, movement.y, 0.6f);
if (!Mathf.Approximately(axisEventData.moveVector.x, 0f)
|| !Mathf.Approximately(axisEventData.moveVector.y, 0f))
{
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, axisEventData, ExecuteEvents.moveHandler);
}
m_NextAction = time + 1f / m_InputActionsPerSecond;
return axisEventData.used;
}
private bool SendUpdateEventToSelectedObject()
{
if (eventSystem.currentSelectedGameObject == null)
return false;
var data = GetBaseEventData();
ExecuteEvents.Execute(eventSystem.currentSelectedGameObject, data, ExecuteEvents.updateSelectedHandler);
return data.used;
}
/// <summary>
/// Process the current mouse press.
/// </summary>
private void ProcessMousePress(MouseButtonEventData data)
{
var pointerEvent = data.buttonData;
var currentOverGo = pointerEvent.pointerCurrentRaycast.gameObject;
// PointerDown notification
if (data.PressedThisFrame())
{
pointerEvent.eligibleForClick = true;
pointerEvent.delta = Vector2.zero;
pointerEvent.dragging = false;
pointerEvent.useDragThreshold = true;
pointerEvent.pressPosition = pointerEvent.position;
if (pointerEvent.IsVRPointer())
{
pointerEvent.SetSwipeStart(Input.mousePosition);
}
pointerEvent.pointerPressRaycast = pointerEvent.pointerCurrentRaycast;
DeselectIfSelectionChanged(currentOverGo, pointerEvent);
// search for the control that will receive the press
// if we can't find a press handler set the press
// handler to be what would receive a click.
var newPressed = ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.pointerDownHandler);
// didnt find a press handler... search for a click handler
if (newPressed == null)
newPressed = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// Debug.Log("Pressed: " + newPressed);
float time = Time.unscaledTime;
if (newPressed == pointerEvent.lastPress)
{
var diffTime = time - pointerEvent.clickTime;
if (diffTime < 0.3f)
++pointerEvent.clickCount;
else
pointerEvent.clickCount = 1;
pointerEvent.clickTime = time;
}
else
{
pointerEvent.clickCount = 1;
}
pointerEvent.pointerPress = newPressed;
pointerEvent.rawPointerPress = currentOverGo;
pointerEvent.clickTime = time;
// Save the drag handler as well
pointerEvent.pointerDrag = ExecuteEvents.GetEventHandler<IDragHandler>(currentOverGo);
if (pointerEvent.pointerDrag != null)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.initializePotentialDrag);
}
// PointerUp notification
if (data.ReleasedThisFrame())
{
// Debug.Log("Executing pressup on: " + pointer.pointerPress);
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
// Debug.Log("KeyCode: " + pointer.eventData.keyCode);
// see if we mouse up on the same element that we clicked on...
var pointerUpHandler = ExecuteEvents.GetEventHandler<IPointerClickHandler>(currentOverGo);
// PointerClick and Drop events
if (pointerEvent.pointerPress == pointerUpHandler && pointerEvent.eligibleForClick)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerClickHandler);
}
else if (pointerEvent.pointerDrag != null)
{
ExecuteEvents.ExecuteHierarchy(currentOverGo, pointerEvent, ExecuteEvents.dropHandler);
}
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
if (pointerEvent.pointerDrag != null && pointerEvent.dragging)
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.endDragHandler);
pointerEvent.dragging = false;
pointerEvent.pointerDrag = null;
// redo pointer enter / exit to refresh state
// so that if we moused over somethign that ignored it before
// due to having pressed on something else
// it now gets it.
if (currentOverGo != pointerEvent.pointerEnter)
{
HandlePointerExitAndEnter(pointerEvent, null);
HandlePointerExitAndEnter(pointerEvent, currentOverGo);
}
}
}
#endregion
#region Modified StandaloneInputModule methods
/// <summary>
/// Process all mouse events. This is the same as the StandaloneInputModule version except that
/// it takes MouseState as a parameter, allowing it to be used for both Gaze and Mouse
/// pointerss.
/// </summary>
private void ProcessMouseEvent(MouseState mouseData)
{
var pressed = mouseData.AnyPressesThisFrame();
var released = mouseData.AnyReleasesThisFrame();
var leftButtonData = mouseData.GetButtonState(PointerEventData.InputButton.Left).eventData;
if (!UseMouse(pressed, released, leftButtonData.buttonData))
return;
// Process the first mouse button fully
ProcessMousePress(leftButtonData);
ProcessMove(leftButtonData.buttonData);
ProcessDrag(leftButtonData.buttonData);
// Now process right / middle clicks
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Right).eventData.buttonData);
ProcessMousePress(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData);
ProcessDrag(mouseData.GetButtonState(PointerEventData.InputButton.Middle).eventData.buttonData);
if (!Mathf.Approximately(leftButtonData.buttonData.scrollDelta.sqrMagnitude, 0.0f))
{
var scrollHandler = ExecuteEvents.GetEventHandler<IScrollHandler>(leftButtonData.buttonData.pointerCurrentRaycast.gameObject);
ExecuteEvents.ExecuteHierarchy(scrollHandler, leftButtonData.buttonData, ExecuteEvents.scrollHandler);
}
}
/// <summary>
/// Process this InputModule. Same as the StandaloneInputModule version, except that it calls
/// ProcessMouseEvent twice, once for gaze pointers, and once for mouse pointers.
/// </summary>
public override void Process()
{
bool usedEvent = SendUpdateEventToSelectedObject();
if (eventSystem.sendNavigationEvents)
{
if (!usedEvent)
usedEvent |= SendMoveEventToSelectedObject();
if (!usedEvent)
SendSubmitEventToSelectedObject();
}
ProcessMouseEvent(GetGazePointerData());
#if !UNITY_ANDROID
ProcessMouseEvent(GetCanvasPointerData());
#endif
}
/// <summary>
/// Decide if mouse events need to be processed this frame. Same as StandloneInputModule except
/// that the IsPointerMoving method from this class is used, instead of the method on PointerEventData
/// </summary>
private static bool UseMouse(bool pressed, bool released, PointerEventData pointerData)
{
if (pressed || released || IsPointerMoving(pointerData) || pointerData.IsScrolling())
return true;
return false;
}
#endregion
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected void CopyFromTo(OVRPointerEventData @from, OVRPointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
@to.worldSpaceRay = @from.worldSpaceRay;
}
/// <summary>
/// Convenience function for cloning PointerEventData
/// </summary>
/// <param name="from">Copy this value</param>
/// <param name="to">to this object</param>
protected new void CopyFromTo(PointerEventData @from, PointerEventData @to)
{
@to.position = @from.position;
@to.delta = @from.delta;
@to.scrollDelta = @from.scrollDelta;
@to.pointerCurrentRaycast = @from.pointerCurrentRaycast;
@to.pointerEnter = @from.pointerEnter;
}
// In the following region we extend the PointerEventData system implemented in PointerInputModule
// We define an additional dictionary for ray(e.g. gaze) based pointers. Mouse pointers still use the dictionary
// in PointerInputModule
#region PointerEventData pool
// Pool for OVRRayPointerEventData for ray based pointers
protected Dictionary<int, OVRPointerEventData> m_VRRayPointerData = new Dictionary<int, OVRPointerEventData>();
protected bool GetPointerData(int id, out OVRPointerEventData data, bool create)
{
if (!m_VRRayPointerData.TryGetValue(id, out data) && create)
{
data = new OVRPointerEventData(eventSystem)
{
pointerId = id,
};
m_VRRayPointerData.Add(id, data);
return true;
}
return false;
}
/// <summary>
/// Clear pointer state for both types of pointer
/// </summary>
protected new void ClearSelection()
{
var baseEventData = GetBaseEventData();
foreach (var pointer in m_PointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
foreach (var pointer in m_VRRayPointerData.Values)
{
// clear all selection
HandlePointerExitAndEnter(pointer, null);
}
m_PointerData.Clear();
eventSystem.SetSelectedGameObject(null, baseEventData);
}
#endregion
/// <summary>
/// For RectTransform, calculate it's normal in world space
/// </summary>
static Vector3 GetRectTransformNormal(RectTransform rectTransform)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
rectTransform.GetWorldCorners(corners);
return Vector3.Cross(BottomEdge, LeftEdge).normalized;
}
private readonly MouseState m_MouseState = new MouseState();
// The following 2 functions are equivalent to PointerInputModule.GetMousePointerEventData but are customized to
// get data for ray pointers and canvas mouse pointers.
/// <summary>
/// State for a pointer controlled by a world space ray. E.g. gaze pointer
/// </summary>
/// <returns></returns>
virtual protected MouseState GetGazePointerData()
{
// Get the OVRRayPointerEventData reference
OVRPointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
//Now set the world space ray. This ray is what the user uses to point at UI elements
leftData.worldSpaceRay = new Ray(rayTransform.position, rayTransform.forward);
leftData.scrollDelta = GetExtraScrollDelta();
//Populate some default values
leftData.button = PointerEventData.InputButton.Left;
leftData.useDragThreshold = true;
// Perform raycast to find intersections with world
eventSystem.RaycastAll(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
// We're only interested in intersections from OVRRaycasters
if (ovrRaycaster)
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
leftData.position = ovrRaycaster.GetScreenPosition(raycast);
// Find the world position and normal the Graphic the ray intersected
RectTransform graphicRect = raycast.gameObject.GetComponent<RectTransform>();
if (graphicRect != null)
{
// Set are gaze indicator with this world position and normal
Vector3 worldPos = raycast.worldPosition;
Vector3 normal = GetRectTransformNormal(graphicRect);
OVRGazePointer.instance.SetPosition(worldPos, normal);
// Make sure it's being shown
OVRGazePointer.instance.RequestShow();
}
}
// Now process physical raycast intersections
OVRPhysicsRaycaster physicsRaycaster = raycast.module as OVRPhysicsRaycaster;
if (physicsRaycaster)
{
Vector3 position = raycast.worldPosition;
if (performSphereCastForGazepointer)
{
// Here we cast a sphere into the scene rather than a ray. This gives a more accurate depth
// for positioning a circular gaze pointer
List<RaycastResult> results = new List<RaycastResult>();
physicsRaycaster.Spherecast(leftData, results, OVRGazePointer.instance.GetCurrentRadius());
if (results.Count > 0 && results[0].distance < raycast.distance)
{
position = results[0].worldPosition;
}
}
leftData.position = physicsRaycaster.GetScreenPos(raycast.worldPosition);
// Show the cursor while pointing at an interactable object
OVRGazePointer.instance.RequestShow();
if (matchNormalOnPhysicsColliders)
{
OVRGazePointer.instance.SetPosition(position, raycast.worldNormal);
}
else
{
OVRGazePointer.instance.SetPosition(position);
}
}
// Stick default data values in right and middle slots for compatability
// copy the apropriate data into right and middle slots
OVRPointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
OVRPointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, GetGazeButtonState(), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, PointerEventData.FramePressState.NotChanged, rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, PointerEventData.FramePressState.NotChanged, middleData);
return m_MouseState;
}
/// <summary>
/// Get state for pointer which is a pointer moving in world space across the surface of a world space canvas.
/// </summary>
/// <returns></returns>
protected MouseState GetCanvasPointerData()
{
// Get the OVRRayPointerEventData reference
PointerEventData leftData;
GetPointerData(kMouseLeftId, out leftData, true );
leftData.Reset();
// Setup default values here. Set position to zero because we don't actually know the pointer
// positions. Each canvas knows the position of its canvas pointer.
leftData.position = Vector2.zero;
leftData.scrollDelta = Input.mouseScrollDelta;
leftData.button = PointerEventData.InputButton.Left;
if (activeGraphicRaycaster)
{
// Let the active raycaster find intersections on its canvas
activeGraphicRaycaster.RaycastPointer(leftData, m_RaycastResultCache);
var raycast = FindFirstRaycast(m_RaycastResultCache);
leftData.pointerCurrentRaycast = raycast;
m_RaycastResultCache.Clear();
OVRRaycaster ovrRaycaster = raycast.module as OVRRaycaster;
if (ovrRaycaster) // raycast may not actually contain a result
{
// The Unity UI system expects event data to have a screen position
// so even though this raycast came from a world space ray we must get a screen
// space position for the camera attached to this raycaster for compatability
Vector2 position = ovrRaycaster.GetScreenPosition(raycast);
leftData.delta = position - leftData.position;
leftData.position = position;
}
}
// copy the apropriate data into right and middle slots
PointerEventData rightData;
GetPointerData(kMouseRightId, out rightData, true );
CopyFromTo(leftData, rightData);
rightData.button = PointerEventData.InputButton.Right;
PointerEventData middleData;
GetPointerData(kMouseMiddleId, out middleData, true );
CopyFromTo(leftData, middleData);
middleData.button = PointerEventData.InputButton.Middle;
m_MouseState.SetButtonState(PointerEventData.InputButton.Left, StateForMouseButton(0), leftData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Right, StateForMouseButton(1), rightData);
m_MouseState.SetButtonState(PointerEventData.InputButton.Middle, StateForMouseButton(2), middleData);
return m_MouseState;
}
/// <summary>
/// New version of ShouldStartDrag implemented first in PointerInputModule. This version differs in that
/// for ray based pointers it makes a decision about whether a drag should start based on the angular change
/// the pointer has made so far, as seen from the camera. This also works when the world space ray is
/// translated rather than rotated, since the beginning and end of the movement are considered as angle from
/// the same point.
/// </summary>
private bool ShouldStartDrag(PointerEventData pointerEvent)
{
if (!pointerEvent.useDragThreshold)
return true;
if (!pointerEvent.IsVRPointer())
{
// Same as original behaviour for canvas based pointers
return (pointerEvent.pressPosition - pointerEvent.position).sqrMagnitude >= eventSystem.pixelDragThreshold * eventSystem.pixelDragThreshold;
}
else
{
#if UNITY_ANDROID && !UNITY_EDITOR // On android allow swiping to start drag
if (useSwipeScroll && ((Vector3)pointerEvent.GetSwipeStart() - Input.mousePosition).magnitude > swipeDragThreshold)
{
return true;
}
#endif
// When it's not a screen space pointer we have to look at the angle it moved rather than the pixels distance
// For gaze based pointing screen-space distance moved will always be near 0
Vector3 cameraPos = pointerEvent.pressEventCamera.transform.position;
Vector3 pressDir = (pointerEvent.pointerPressRaycast.worldPosition - cameraPos).normalized;
Vector3 currentDir = (pointerEvent.pointerCurrentRaycast.worldPosition - cameraPos).normalized;
return Vector3.Dot(pressDir, currentDir) < Mathf.Cos(Mathf.Deg2Rad * (angleDragThreshold));
}
}
/// <summary>
/// The purpose of this function is to allow us to switch between using the standard IsPointerMoving
/// method for mouse driven pointers, but to always return true when it's a ray based pointer.
/// All real-world ray-based input devices are always moving so for simplicity we just return true
/// for them.
///
/// If PointerEventData.IsPointerMoving was virtual we could just override that in
/// OVRRayPointerEventData.
/// </summary>
/// <param name="pointerEvent"></param>
/// <returns></returns>
static bool IsPointerMoving(PointerEventData pointerEvent)
{
if (pointerEvent.IsVRPointer())
return true;
else
return pointerEvent.IsPointerMoving();
}
protected Vector2 SwipeAdjustedPosition(Vector2 originalPosition, PointerEventData pointerEvent)
{
#if UNITY_ANDROID && !UNITY_EDITOR
// On android we use the touchpad position (accessed through Input.mousePosition) to modify
// the effective cursor position for events related to dragging. This allows the user to
// use the touchpad to drag draggable UI elements
if (useSwipeScroll)
{
Vector2 delta = (Vector2)Input.mousePosition - pointerEvent.GetSwipeStart();
if (InvertSwipeXAxis)
delta.x *= -1;
return originalPosition + delta * swipeDragScale;
}
#endif
// If not Gear VR or swipe scroll isn't enabled just return original position
return originalPosition;
}
/// <summary>
/// Exactly the same as the code from PointerInputModule, except that we call our own
/// IsPointerMoving.
///
/// This would also not be necessary if PointerEventData.IsPointerMoving was virtual
/// </summary>
/// <param name="pointerEvent"></param>
protected override void ProcessDrag(PointerEventData pointerEvent)
{
Vector2 originalPosition = pointerEvent.position;
bool moving = IsPointerMoving(pointerEvent);
if (moving && pointerEvent.pointerDrag != null
&& !pointerEvent.dragging
&& ShouldStartDrag(pointerEvent))
{
if (pointerEvent.IsVRPointer())
{
//adjust the position used based on swiping action. Allowing the user to
//drag items by swiping on the GearVR touchpad
pointerEvent.position = SwipeAdjustedPosition (originalPosition, pointerEvent);
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.beginDragHandler);
pointerEvent.dragging = true;
}
// Drag notification
if (pointerEvent.dragging && moving && pointerEvent.pointerDrag != null)
{
if (pointerEvent.IsVRPointer())
{
pointerEvent.position = SwipeAdjustedPosition(originalPosition, pointerEvent);
}
// Before doing drag we should cancel any pointer down state
// And clear selection!
if (pointerEvent.pointerPress != pointerEvent.pointerDrag)
{
ExecuteEvents.Execute(pointerEvent.pointerPress, pointerEvent, ExecuteEvents.pointerUpHandler);
pointerEvent.eligibleForClick = false;
pointerEvent.pointerPress = null;
pointerEvent.rawPointerPress = null;
}
ExecuteEvents.Execute(pointerEvent.pointerDrag, pointerEvent, ExecuteEvents.dragHandler);
}
}
/// <summary>
/// Get state of button corresponding to gaze pointer
/// </summary>
/// <returns></returns>
virtual protected PointerEventData.FramePressState GetGazeButtonState()
{
var pressed = Input.GetKeyDown(gazeClickKey) || OVRInput.GetDown(joyPadClickButton);
var released = Input.GetKeyUp(gazeClickKey) || OVRInput.GetUp(joyPadClickButton);
#if UNITY_ANDROID && !UNITY_EDITOR
// On Gear VR the mouse button events correspond to touch pad events. We only use these as gaze pointer clicks
// on Gear VR because on PC the mouse clicks are used for actual mouse pointer interactions.
pressed |= Input.GetMouseButtonDown(0);
released |= Input.GetMouseButtonUp(0);
#endif
if (pressed && released)
return PointerEventData.FramePressState.PressedAndReleased;
if (pressed)
return PointerEventData.FramePressState.Pressed;
if (released)
return PointerEventData.FramePressState.Released;
return PointerEventData.FramePressState.NotChanged;
}
/// <summary>
/// Get extra scroll delta from gamepad
/// </summary>
protected Vector2 GetExtraScrollDelta()
{
Vector2 scrollDelta = new Vector2();
if (useRightStickScroll)
{
Vector2 s = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
if (Mathf.Abs(s.x) < rightStickDeadZone) s.x = 0;
if (Mathf.Abs(s.y) < rightStickDeadZone) s.y = 0;
scrollDelta = s;
}
return scrollDelta;
}
};
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8f1a9a1d119a5944aacfb87d1ec283a2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,122 @@
using UnityEngine;
using System;
using System.IO;
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
public class OVRMixedRealityCaptureSettings : ScriptableObject
{
public bool enableMixedReality = false;
public LayerMask extraHiddenLayers;
public OVRManager.CompositionMethod compositionMethod = OVRManager.CompositionMethod.External;
public OVRManager.CameraDevice capturingCameraDevice = OVRManager.CameraDevice.WebCamera0;
public bool flipCameraFrameHorizontally = false;
public bool flipCameraFrameVertically = false;
public float handPoseStateLatency = 0.0f;
public float sandwichCompositionRenderLatency = 0.0f;
public int sandwichCompositionBufferedFrames = 8;
public Color chromaKeyColor = Color.green;
public float chromaKeySimilarity = 0.6f;
public float chromaKeySmoothRange = 0.03f;
public float chromaKeySpillRange = 0.04f;
public bool useDynamicLighting = false;
public OVRManager.DepthQuality depthQuality = OVRManager.DepthQuality.Medium;
public float dynamicLightingSmoothFactor = 8.0f;
public float dynamicLightingDepthVariationClampingValue = 0.001f;
public OVRManager.VirtualGreenScreenType virtualGreenScreenType = OVRManager.VirtualGreenScreenType.Off;
public float virtualGreenScreenTopY;
public float virtualGreenScreenBottomY;
public bool virtualGreenScreenApplyDepthCulling = false;
public float virtualGreenScreenDepthTolerance = 0.2f;
public void ReadFrom(OVRManager manager)
{
enableMixedReality = manager.enableMixedReality;
compositionMethod = manager.compositionMethod;
extraHiddenLayers = manager.extraHiddenLayers;
capturingCameraDevice = manager.capturingCameraDevice;
flipCameraFrameHorizontally = manager.flipCameraFrameHorizontally;
flipCameraFrameVertically = manager.flipCameraFrameVertically;
handPoseStateLatency = manager.handPoseStateLatency;
sandwichCompositionRenderLatency = manager.sandwichCompositionRenderLatency;
sandwichCompositionBufferedFrames = manager.sandwichCompositionBufferedFrames;
chromaKeyColor = manager.chromaKeyColor;
chromaKeySimilarity = manager.chromaKeySimilarity;
chromaKeySmoothRange = manager.chromaKeySmoothRange;
chromaKeySpillRange = manager.chromaKeySpillRange;
useDynamicLighting = manager.useDynamicLighting;
depthQuality = manager.depthQuality;
dynamicLightingSmoothFactor = manager.dynamicLightingSmoothFactor;
dynamicLightingDepthVariationClampingValue = manager.dynamicLightingDepthVariationClampingValue;
virtualGreenScreenType = manager.virtualGreenScreenType;
virtualGreenScreenTopY = manager.virtualGreenScreenTopY;
virtualGreenScreenBottomY = manager.virtualGreenScreenBottomY;
virtualGreenScreenApplyDepthCulling = manager.virtualGreenScreenApplyDepthCulling;
virtualGreenScreenDepthTolerance = manager.virtualGreenScreenDepthTolerance;
}
public void ApplyTo(OVRManager manager)
{
manager.enableMixedReality = enableMixedReality;
manager.compositionMethod = compositionMethod;
manager.extraHiddenLayers = extraHiddenLayers;
manager.capturingCameraDevice = capturingCameraDevice;
manager.flipCameraFrameHorizontally = flipCameraFrameHorizontally;
manager.flipCameraFrameVertically = flipCameraFrameVertically;
manager.handPoseStateLatency = handPoseStateLatency;
manager.sandwichCompositionRenderLatency = sandwichCompositionRenderLatency;
manager.sandwichCompositionBufferedFrames = sandwichCompositionBufferedFrames;
manager.chromaKeyColor = chromaKeyColor;
manager.chromaKeySimilarity = chromaKeySimilarity;
manager.chromaKeySmoothRange = chromaKeySmoothRange;
manager.chromaKeySpillRange = chromaKeySpillRange;
manager.useDynamicLighting = useDynamicLighting;
manager.depthQuality = depthQuality;
manager.dynamicLightingSmoothFactor = dynamicLightingSmoothFactor;
manager.dynamicLightingDepthVariationClampingValue = dynamicLightingDepthVariationClampingValue;
manager.virtualGreenScreenType = virtualGreenScreenType;
manager.virtualGreenScreenTopY = virtualGreenScreenTopY;
manager.virtualGreenScreenBottomY = virtualGreenScreenBottomY;
manager.virtualGreenScreenApplyDepthCulling = virtualGreenScreenApplyDepthCulling;
manager.virtualGreenScreenDepthTolerance = virtualGreenScreenDepthTolerance;
}
const string configFileName = "mrc.config";
public void WriteToConfigurationFile()
{
string text = JsonUtility.ToJson(this, true);
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
Debug.Log("Write OVRMixedRealityCaptureSettings to " + configPath);
File.WriteAllText(configPath, text);
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
public void CombineWithConfigurationFile()
{
try
{
string configPath = Path.Combine(Application.dataPath, configFileName);
if (File.Exists(configPath))
{
Debug.Log("MixedRealityCapture configuration file found at " + configPath);
string text = File.ReadAllText(configPath);
Debug.Log("Apply MixedRealityCapture configuration");
JsonUtility.FromJsonOverwrite(text, this);
}
else
{
Debug.Log("MixedRealityCapture configuration file doesn't exist at " + configPath);
}
}
catch(Exception e)
{
Debug.LogWarning("Exception caught " + e.Message);
}
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 99bbd170d56da4248941de890e6d7af5
timeCreated: 1501004238
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,86 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Logs when the application enters power save mode and allows you to a low-power CPU/GPU level with a button press.
/// </summary>
public class OVRModeParms : MonoBehaviour
{
#region Member Variables
/// <summary>
/// The gamepad button that will switch the application to CPU level 0 and GPU level 1.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.X;
#endregion
/// <summary>
/// Invoke power state mode test.
/// </summary>
void Start()
{
if (!OVRManager.isHmdPresent)
{
enabled = false;
return;
}
// Call TestPowerLevelState after 10 seconds
// and repeats every 10 seconds.
InvokeRepeating ( "TestPowerStateMode", 10, 10.0f );
}
/// <summary>
/// Change default vr mode parms dynamically.
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if ( OVRInput.GetDown(resetButton))
{
//*************************
// Dynamically change VrModeParms cpu and gpu level.
// NOTE: Reset will cause 1 frame of flicker as it leaves
// and re-enters Vr mode.
//*************************
OVRPlugin.cpuLevel = 0;
OVRPlugin.gpuLevel = 1;
}
}
/// <summary>
/// Check current power state mode.
/// </summary>
void TestPowerStateMode()
{
//*************************
// Check power-level state mode
//*************************
if (OVRPlugin.powerSaving)
{
// The device has been throttled
Debug.Log("POWER SAVE MODE ACTIVATED");
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 6a6ae8e8def81df429a8fdfc00f63e5c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,53 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to toggle monoscopic rendering with a gamepad button press.
/// </summary>
public class OVRMonoscopic : MonoBehaviour
{
/// <summary>
/// The gamepad button that will toggle monoscopic rendering.
/// </summary>
public OVRInput.RawButton toggleButton = OVRInput.RawButton.B;
private bool monoscopic = false;
/// <summary>
/// Check input and toggle monoscopic rendering mode if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(toggleButton))
{
//*************************
// toggle monoscopic rendering mode
//*************************
monoscopic = !monoscopic;
OVRManager.instance.monoscopic = monoscopic;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 06ef2a389c534554c848533f88dbb32c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,185 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System.Collections.Generic;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Simple event system using physics raycasts. Very closely based on UnityEngine.EventSystems.PhysicsRaycaster
/// </summary>
[RequireComponent(typeof(OVRCameraRig))]
public class OVRPhysicsRaycaster : BaseRaycaster
{
/// <summary>
/// Const to use for clarity when no event mask is set
/// </summary>
protected const int kNoEventMaskSet = -1;
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
[SerializeField]
protected LayerMask m_EventMask = kNoEventMaskSet;
protected OVRPhysicsRaycaster()
{ }
public override Camera eventCamera
{
get
{
return GetComponent<OVRCameraRig>().leftEyeCamera;
}
}
/// <summary>
/// Depth used to determine the order of event processing.
/// </summary>
public virtual int depth
{
get { return (eventCamera != null) ? (int)eventCamera.depth : 0xFFFFFF; }
}
public int sortOrder = 20;
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
/// <summary>
/// Event mask used to determine which objects will receive events.
/// </summary>
public int finalEventMask
{
get { return (eventCamera != null) ? eventCamera.cullingMask & m_EventMask : kNoEventMaskSet; }
}
/// <summary>
/// Layer mask used to filter events. Always combined with the camera's culling mask if a camera is used.
/// </summary>
public LayerMask eventMask
{
get { return m_EventMask; }
set { m_EventMask = value; }
}
/// <summary>
/// Perform a raycast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
// This function is closely based on PhysicsRaycaster.Raycast
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.RaycastAll(ray, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Perform a Spherecast using the worldSpaceRay in eventData.
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
/// <param name="radius">Radius of the sphere</param>
public void Spherecast(PointerEventData eventData, List<RaycastResult> resultAppendList, float radius)
{
if (eventCamera == null)
return;
if (!eventData.IsVRPointer())
return;
var ray = eventData.GetRay();
float dist = eventCamera.farClipPlane - eventCamera.nearClipPlane;
var hits = Physics.SphereCastAll(ray, radius, dist, finalEventMask);
if (hits.Length > 1)
System.Array.Sort(hits, (r1, r2) => r1.distance.CompareTo(r2.distance));
if (hits.Length != 0)
{
for (int b = 0, bmax = hits.Length; b < bmax; ++b)
{
var result = new RaycastResult
{
gameObject = hits[b].collider.gameObject,
module = this,
distance = hits[b].distance,
index = resultAppendList.Count,
worldPosition = hits[0].point,
worldNormal = hits[0].normal,
};
resultAppendList.Add(result);
}
}
}
/// <summary>
/// Get screen position of this world position as seen by the event camera of this OVRPhysicsRaycaster
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPos(Vector3 worldPosition)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(worldPosition);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f8e7ff1cdf4c4e74db00c3684108bc9a
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,593 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using UnityEngine;
/// <summary>
/// Controls the player's movement in virtual reality.
/// </summary>
[RequireComponent(typeof(CharacterController))]
public class OVRPlayerController : MonoBehaviour
{
/// <summary>
/// The rate acceleration during movement.
/// </summary>
public float Acceleration = 0.1f;
/// <summary>
/// The rate of damping on movement.
/// </summary>
public float Damping = 0.3f;
/// <summary>
/// The rate of additional damping when moving sideways or backwards.
/// </summary>
public float BackAndSideDampen = 0.5f;
/// <summary>
/// The force applied to the character when jumping.
/// </summary>
public float JumpForce = 0.3f;
/// <summary>
/// The rate of rotation when using a gamepad.
/// </summary>
public float RotationAmount = 1.5f;
/// <summary>
/// The rate of rotation when using the keyboard.
/// </summary>
public float RotationRatchet = 45.0f;
/// <summary>
/// The player will rotate in fixed steps if Snap Rotation is enabled.
/// </summary>
[Tooltip("The player will rotate in fixed steps if Snap Rotation is enabled.")]
public bool SnapRotation = true;
/// <summary>
/// How many fixed speeds to use with linear movement? 0=linear control
/// </summary>
[Tooltip("How many fixed speeds to use with linear movement? 0=linear control")]
public int FixedSpeedSteps;
/// <summary>
/// If true, reset the initial yaw of the player controller when the Hmd pose is recentered.
/// </summary>
public bool HmdResetsY = true;
/// <summary>
/// If true, tracking data from a child OVRCameraRig will update the direction of movement.
/// </summary>
public bool HmdRotatesY = true;
/// <summary>
/// Modifies the strength of gravity.
/// </summary>
public float GravityModifier = 0.379f;
/// <summary>
/// If true, each OVRPlayerController will use the player's physical height.
/// </summary>
public bool useProfileData = true;
/// <summary>
/// The CameraHeight is the actual height of the HMD and can be used to adjust the height of the character controller, which will affect the
/// ability of the character to move into areas with a low ceiling.
/// </summary>
[NonSerialized]
public float CameraHeight;
/// <summary>
/// This event is raised after the character controller is moved. This is used by the OVRAvatarLocomotion script to keep the avatar transform synchronized
/// with the OVRPlayerController.
/// </summary>
public event Action<Transform> TransformUpdated;
/// <summary>
/// This bool is set to true whenever the player controller has been teleported. It is reset after every frame. Some systems, such as
/// CharacterCameraConstraint, test this boolean in order to disable logic that moves the character controller immediately
/// following the teleport.
/// </summary>
[NonSerialized] // This doesn't need to be visible in the inspector.
public bool Teleported;
/// <summary>
/// This event is raised immediately after the camera transform has been updated, but before movement is updated.
/// </summary>
public event Action CameraUpdated;
/// <summary>
/// This event is raised right before the character controller is actually moved in order to provide other systems the opportunity to
/// move the character controller in response to things other than user input, such as movement of the HMD. See CharacterCameraConstraint.cs
/// for an example of this.
/// </summary>
public event Action PreCharacterMove;
/// <summary>
/// When true, user input will be applied to linear movement. Set this to false whenever the player controller needs to ignore input for
/// linear movement.
/// </summary>
public bool EnableLinearMovement = true;
/// <summary>
/// When true, user input will be applied to rotation. Set this to false whenever the player controller needs to ignore input for rotation.
/// </summary>
public bool EnableRotation = true;
protected CharacterController Controller = null;
protected OVRCameraRig CameraRig = null;
private float MoveScale = 1.0f;
private Vector3 MoveThrottle = Vector3.zero;
private float FallSpeed = 0.0f;
private OVRPose? InitialPose;
public float InitialYRotation { get; private set; }
private float MoveScaleMultiplier = 1.0f;
private float RotationScaleMultiplier = 1.0f;
private bool SkipMouseRotation = true; // It is rare to want to use mouse movement in VR, so ignore the mouse by default.
private bool HaltUpdateMovement = false;
private bool prevHatLeft = false;
private bool prevHatRight = false;
private float SimulationRate = 60f;
private float buttonRotation = 0f;
private bool ReadyToSnapTurn; // Set to true when a snap turn has occurred, code requires one frame of centered thumbstick to enable another snap turn.
void Start()
{
// Add eye-depth as a camera offset from the player controller
var p = CameraRig.transform.localPosition;
p.z = OVRManager.profile.eyeDepth;
CameraRig.transform.localPosition = p;
}
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if(Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if(CameraRigs.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
InitialYRotation = transform.rotation.eulerAngles.y;
}
void OnEnable()
{
OVRManager.display.RecenteredPose += ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors += UpdateTransform;
}
}
void OnDisable()
{
OVRManager.display.RecenteredPose -= ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors -= UpdateTransform;
}
}
void Update()
{
//Use keys to ratchet rotation
if (Input.GetKeyDown(KeyCode.Q))
buttonRotation -= RotationRatchet;
if (Input.GetKeyDown(KeyCode.E))
buttonRotation += RotationRatchet;
}
protected virtual void UpdateController()
{
if (useProfileData)
{
if (InitialPose == null)
{
// Save the initial pose so it can be recovered if useProfileData
// is turned off later.
InitialPose = new OVRPose()
{
position = CameraRig.transform.localPosition,
orientation = CameraRig.transform.localRotation
};
}
var p = CameraRig.transform.localPosition;
if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel)
{
p.y = OVRManager.profile.eyeHeight - (0.5f * Controller.height) + Controller.center.y;
}
else if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.FloorLevel)
{
p.y = - (0.5f * Controller.height) + Controller.center.y;
}
CameraRig.transform.localPosition = p;
}
else if (InitialPose != null)
{
// Return to the initial pose if useProfileData was turned off at runtime
CameraRig.transform.localPosition = InitialPose.Value.position;
CameraRig.transform.localRotation = InitialPose.Value.orientation;
InitialPose = null;
}
CameraHeight = CameraRig.centerEyeAnchor.localPosition.y;
if (CameraUpdated != null)
{
CameraUpdated();
}
UpdateMovement();
Vector3 moveDirection = Vector3.zero;
float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime));
MoveThrottle.x /= motorDamp;
MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
MoveThrottle.z /= motorDamp;
moveDirection += MoveThrottle * SimulationRate * Time.deltaTime;
// Gravity
if (Controller.isGrounded && FallSpeed <= 0)
FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
else
FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime);
moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime;
if (Controller.isGrounded && MoveThrottle.y <= transform.lossyScale.y * 0.001f)
{
// Offset correction for uneven ground
float bumpUpOffset = Mathf.Max(Controller.stepOffset, new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
moveDirection -= bumpUpOffset * Vector3.up;
}
if (PreCharacterMove != null)
{
PreCharacterMove();
Teleported = false;
}
Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1));
// Move contoller
Controller.Move(moveDirection);
Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));
if (predictedXZ != actualXZ)
MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime);
}
public virtual void UpdateMovement()
{
if (HaltUpdateMovement)
return;
if (EnableLinearMovement)
{
bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow);
bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow);
bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow);
bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow);
bool dpad_move = false;
if (OVRInput.Get(OVRInput.Button.DpadUp))
{
moveForward = true;
dpad_move = true;
}
if (OVRInput.Get(OVRInput.Button.DpadDown))
{
moveBack = true;
dpad_move = true;
}
MoveScale = 1.0f;
if ((moveForward && moveLeft) || (moveForward && moveRight) ||
(moveBack && moveLeft) || (moveBack && moveRight))
MoveScale = 0.70710678f;
// No positional movement if we are in the air
if (!Controller.isGrounded)
MoveScale = 0.0f;
MoveScale *= SimulationRate * Time.deltaTime;
// Compute this for key movement
float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
// Run!
if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift))
moveInfluence *= 2.0f;
Quaternion ort = transform.rotation;
Vector3 ortEuler = ort.eulerAngles;
ortEuler.z = ortEuler.x = 0f;
ort = Quaternion.Euler(ortEuler);
if (moveForward)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward);
if (moveBack)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back);
if (moveLeft)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left);
if (moveRight)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right);
moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
#if !UNITY_ANDROID // LeftTrigger not avail on Android game pad
moveInfluence *= 1.0f + OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
#endif
Vector2 primaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
// If speed quantization is enabled, adjust the input to the number of fixed speed steps.
if (FixedSpeedSteps > 0)
{
primaryAxis.y = Mathf.Round(primaryAxis.y * FixedSpeedSteps) / FixedSpeedSteps;
primaryAxis.x = Mathf.Round(primaryAxis.x * FixedSpeedSteps) / FixedSpeedSteps;
}
if (primaryAxis.y > 0.0f)
MoveThrottle += ort * (primaryAxis.y * transform.lossyScale.z * moveInfluence * Vector3.forward);
if (primaryAxis.y < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.y) * transform.lossyScale.z * moveInfluence *
BackAndSideDampen * Vector3.back);
if (primaryAxis.x < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.x) * transform.lossyScale.x * moveInfluence *
BackAndSideDampen * Vector3.left);
if (primaryAxis.x > 0.0f)
MoveThrottle += ort * (primaryAxis.x * transform.lossyScale.x * moveInfluence * BackAndSideDampen *
Vector3.right);
}
if (EnableRotation)
{
Vector3 euler = transform.rotation.eulerAngles;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
bool curHatLeft = OVRInput.Get(OVRInput.Button.PrimaryShoulder);
if (curHatLeft && !prevHatLeft)
euler.y -= RotationRatchet;
prevHatLeft = curHatLeft;
bool curHatRight = OVRInput.Get(OVRInput.Button.SecondaryShoulder);
if (curHatRight && !prevHatRight)
euler.y += RotationRatchet;
prevHatRight = curHatRight;
euler.y += buttonRotation;
buttonRotation = 0f;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!SkipMouseRotation)
euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f;
#endif
if (SnapRotation)
{
if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickLeft))
{
if (ReadyToSnapTurn)
{
euler.y -= RotationRatchet;
ReadyToSnapTurn = false;
}
}
else if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickRight))
{
if (ReadyToSnapTurn)
{
euler.y += RotationRatchet;
ReadyToSnapTurn = false;
}
}
else
{
ReadyToSnapTurn = true;
}
}
else
{
Vector2 secondaryAxis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
euler.y += secondaryAxis.x * rotateInfluence;
}
transform.rotation = Quaternion.Euler(euler);
}
}
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY && !Teleported)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
UpdateController();
if (TransformUpdated != null)
{
TransformUpdated(root);
}
}
/// <summary>
/// Jump! Must be enabled manually.
/// </summary>
public bool Jump()
{
if (!Controller.isGrounded)
return false;
MoveThrottle += new Vector3(0, transform.lossyScale.y * JumpForce, 0);
return true;
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop()
{
Controller.Move(Vector3.zero);
MoveThrottle = Vector3.zero;
FallSpeed = 0.0f;
}
/// <summary>
/// Gets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void GetMoveScaleMultiplier(ref float moveScaleMultiplier)
{
moveScaleMultiplier = MoveScaleMultiplier;
}
/// <summary>
/// Sets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void SetMoveScaleMultiplier(float moveScaleMultiplier)
{
MoveScaleMultiplier = moveScaleMultiplier;
}
/// <summary>
/// Gets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier)
{
rotationScaleMultiplier = RotationScaleMultiplier;
}
/// <summary>
/// Sets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void SetRotationScaleMultiplier(float rotationScaleMultiplier)
{
RotationScaleMultiplier = rotationScaleMultiplier;
}
/// <summary>
/// Gets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">Allow mouse rotation.</param>
public void GetSkipMouseRotation(ref bool skipMouseRotation)
{
skipMouseRotation = SkipMouseRotation;
}
/// <summary>
/// Sets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param>
public void SetSkipMouseRotation(bool skipMouseRotation)
{
SkipMouseRotation = skipMouseRotation;
}
/// <summary>
/// Gets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">Halt update movement.</param>
public void GetHaltUpdateMovement(ref bool haltUpdateMovement)
{
haltUpdateMovement = HaltUpdateMovement;
}
/// <summary>
/// Sets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param>
public void SetHaltUpdateMovement(bool haltUpdateMovement)
{
HaltUpdateMovement = haltUpdateMovement;
}
/// <summary>
/// Resets the player look rotation when the device orientation is reset.
/// </summary>
public void ResetOrientation()
{
if (HmdResetsY && !HmdRotatesY)
{
Vector3 euler = transform.rotation.eulerAngles;
euler.y = InitialYRotation;
transform.rotation = Quaternion.Euler(euler);
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 0950df82e7936c84983497630bde5b54
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,99 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Text;
using UnityEngine;
using UnityEngine.Assertions;
namespace UnityEngine.EventSystems
{
/// <summary>
/// Extension of Unity's PointerEventData to support ray based pointing and also touchpad swiping
/// </summary>
public class OVRPointerEventData : PointerEventData
{
public OVRPointerEventData(EventSystem eventSystem)
: base(eventSystem)
{
}
public Ray worldSpaceRay;
public Vector2 swipeStart;
public override string ToString()
{
var sb = new StringBuilder();
sb.AppendLine("<b>Position</b>: " + position);
sb.AppendLine("<b>delta</b>: " + delta);
sb.AppendLine("<b>eligibleForClick</b>: " + eligibleForClick);
sb.AppendLine("<b>pointerEnter</b>: " + pointerEnter);
sb.AppendLine("<b>pointerPress</b>: " + pointerPress);
sb.AppendLine("<b>lastPointerPress</b>: " + lastPress);
sb.AppendLine("<b>pointerDrag</b>: " + pointerDrag);
sb.AppendLine("<b>worldSpaceRay</b>: " + worldSpaceRay);
sb.AppendLine("<b>swipeStart</b>: " + swipeStart);
sb.AppendLine("<b>Use Drag Threshold</b>: " + useDragThreshold);
return sb.ToString();
}
}
/// <summary>
/// Static helpers for OVRPointerEventData.
/// </summary>
public static class PointerEventDataExtension
{
public static bool IsVRPointer(this PointerEventData pointerEventData)
{
return (pointerEventData is OVRPointerEventData);
}
public static Ray GetRay(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.worldSpaceRay;
}
public static Vector2 GetSwipeStart(this PointerEventData pointerEventData)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
return vrPointerEventData.swipeStart;
}
public static void SetSwipeStart(this PointerEventData pointerEventData, Vector2 start)
{
OVRPointerEventData vrPointerEventData = pointerEventData as OVRPointerEventData;
Assert.IsNotNull(vrPointerEventData);
vrPointerEventData.swipeStart = start;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 646c937ce12610744adc2b5e487f77ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,163 @@
#if UNITY_EDITOR
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using Assets.OVR.Scripts;
public class OVRProfiler : EditorWindow
{
enum TargetPlatform
{
OculusGo,
GearVR,
SantaCruz,
OculusRift
};
private static List<RangedRecord> mRecords = new List<RangedRecord>();
private Vector2 mScrollPosition;
static private TargetPlatform mTargetPlatform;
[MenuItem("Tools/Oculus/OVR Profiler")]
static void Init()
{
// Get existing open window or if none, make a new one:
EditorWindow.GetWindow(typeof(OVRProfiler));
#if UNITY_ANDROID
mTargetPlatform = TargetPlatform.OculusGo;
#else
mTargetPlatform = TargetPlatform.OculusRift;
#endif
}
void OnGUI()
{
GUILayout.Label("OVR Profiler", EditorStyles.boldLabel);
string[] options = new string[]
{
"Oculus Go", "Gear VR", "Santa Cruz", "Oculus Rift",
};
mTargetPlatform = (TargetPlatform)EditorGUILayout.Popup("Target Oculus Platform", (int)mTargetPlatform, options);
if (EditorApplication.isPlaying)
{
UpdateRecords();
DrawResults();
}
else
{
ShowCenterAlignedMessageLabel("Click Run in Unity to view stats.");
}
}
void OnInspectorUpdate()
{
Repaint();
}
void DrawResults()
{
string lastCategory = "";
mScrollPosition = EditorGUILayout.BeginScrollView(mScrollPosition);
foreach (RangedRecord record in mRecords)
{
// Add separator and label for new category
if (!record.category.Equals(lastCategory))
{
lastCategory = record.category;
EditorGUILayout.Separator();
EditorGUILayout.BeginHorizontal();
GUILayout.Label(lastCategory, EditorStyles.label, GUILayout.Width(200));
EditorGUILayout.EndHorizontal();
}
// Draw records
EditorGUILayout.BeginHorizontal();
Rect r = EditorGUILayout.BeginVertical();
EditorGUI.ProgressBar(r, record.value / (record.max * 2), record.category + " " + record.value.ToString());
GUILayout.Space(16);
EditorGUILayout.EndVertical();
EditorGUILayout.EndHorizontal();
EditorGUILayout.BeginHorizontal();
GUILayout.Label(record.message);
EditorGUILayout.EndHorizontal();
GUI.enabled = true;
}
EditorGUILayout.EndScrollView();
}
private void UpdateRecords()
{
mRecords.Clear();
if (mTargetPlatform == TargetPlatform.OculusRift)
{
AddRecord("Client Frame CPU Time (ms)", "", UnityStats.frameTime * 1000, 0, 11);
AddRecord("Render Frame CPU Time (ms)", "", UnityStats.renderTime * 1000, 0, 11);
}
else
{
// Graphics memory
long memSizeByte = UnityStats.usedTextureMemorySize + UnityStats.vboTotalBytes;
AddRecord("Graphics Memory (MB)", "Please use less than 1024 MB of vertex and texture memory.", ConvertBytes(memSizeByte, "MB"), 0, 1024);
}
float triVertRec = mTargetPlatform == TargetPlatform.OculusRift ? 1000000 : 100000;
// Triangle count
AddRecord("Triangles", "Please use less than 100000 triangles.", UnityStats.triangles, 0, triVertRec);
// Vertices count
AddRecord("Vertices", "Please use less than 100000 vertices.", UnityStats.vertices, 0, triVertRec);
float dcRec = mTargetPlatform == TargetPlatform.OculusRift ? 1000 : 100;
// Draw call count
AddRecord("Draw Call", "Please use less than 100 draw calls.", UnityStats.drawCalls, 0, dcRec);
}
private string FormatBytes(long bytes, string target)
{
return System.String.Format("{0:0.##} {1}", ConvertBytes(bytes, target), target);
}
private float ConvertBytes(long bytes, string target)
{
string[] Suffix = { "B", "KB", "MB", "GB", "TB" };
int i;
double dblSByte = bytes;
for (i = 0; i < Suffix.Length; i++, bytes /= 1024)
{
if (Suffix[i] == target)
return (float)dblSByte;
dblSByte = bytes / 1024.0;
}
return 0;
}
private void ShowCenterAlignedMessageLabel(string message)
{
GUILayout.BeginVertical();
GUILayout.FlexibleSpace();
GUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
GUILayout.Label(message, EditorStyles.boldLabel);
GUILayout.FlexibleSpace();
GUILayout.EndHorizontal();
GUILayout.FlexibleSpace();
GUILayout.EndVertical();
}
private void AddRecord(string category, string message, float value, float min, float max)
{
RangedRecord record = new RangedRecord(category, message, value, min, max);
mRecords.Add(record);
}
}
#endif

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3303d4232ee59ac40a9fdc223870fbbc
timeCreated: 1520636357
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using UnityEngine.UI;
using System.Collections;
/// <summary>
/// Visualizes progress for operations such as loading.
/// </summary>
public class OVRProgressIndicator : MonoBehaviour
{
public MeshRenderer progressImage;
[Range(0, 1)]
public float currentProgress = 0.7f;
void Awake()
{
progressImage.sortingOrder = 150;
}
// Update is called once per frame
void Update()
{
progressImage.sharedMaterial.SetFloat("_AlphaCutoff", 1-currentProgress);
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f14ece5575e2b1e4d80619901d65b428
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,320 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.EventSystems;
using UnityEngine.Serialization;
/// <summary>
/// Extension of GraphicRaycaster to support ray casting with world space rays instead of just screen-space
/// pointer positions
/// </summary>
[RequireComponent(typeof(Canvas))]
public class OVRRaycaster : GraphicRaycaster, IPointerEnterHandler
{
[Tooltip("A world space pointer for this canvas")]
public GameObject pointer;
public int sortOrder = 0;
protected OVRRaycaster()
{ }
[NonSerialized]
private Canvas m_Canvas;
private Canvas canvas
{
get
{
if (m_Canvas != null)
return m_Canvas;
m_Canvas = GetComponent<Canvas>();
return m_Canvas;
}
}
public override Camera eventCamera
{
get
{
return canvas.worldCamera;
}
}
public override int sortOrderPriority
{
get
{
return sortOrder;
}
}
/// <summary>
/// For the given ray, find graphics on this canvas which it intersects and are not blocked by other
/// world objects
/// </summary>
[NonSerialized]
private List<RaycastHit> m_RaycastResults = new List<RaycastHit>();
private void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList, Ray ray, bool checkForBlocking)
{
//This function is closely based on
//void GraphicRaycaster.Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
if (canvas == null)
return;
float hitDistance = float.MaxValue;
if (checkForBlocking && blockingObjects != BlockingObjects.None)
{
float dist = eventCamera.farClipPlane;
if (blockingObjects == BlockingObjects.ThreeD || blockingObjects == BlockingObjects.All)
{
var hits = Physics.RaycastAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].distance < hitDistance)
{
hitDistance = hits[0].distance;
}
}
if (blockingObjects == BlockingObjects.TwoD || blockingObjects == BlockingObjects.All)
{
var hits = Physics2D.GetRayIntersectionAll(ray, dist, m_BlockingMask);
if (hits.Length > 0 && hits[0].fraction * dist < hitDistance)
{
hitDistance = hits[0].fraction * dist;
}
}
}
m_RaycastResults.Clear();
GraphicRaycast(canvas, ray, m_RaycastResults);
for (var index = 0; index < m_RaycastResults.Count; index++)
{
var go = m_RaycastResults[index].graphic.gameObject;
bool appendGraphic = true;
if (ignoreReversedGraphics)
{
// If we have a camera compare the direction against the cameras forward.
var cameraFoward = ray.direction;
var dir = go.transform.rotation * Vector3.forward;
appendGraphic = Vector3.Dot(cameraFoward, dir) > 0;
}
// Ignore points behind us (can happen with a canvas pointer)
if (eventCamera.transform.InverseTransformPoint(m_RaycastResults[index].worldPos).z <= 0)
{
appendGraphic = false;
}
if (appendGraphic)
{
float distance = Vector3.Distance(ray.origin, m_RaycastResults[index].worldPos);
if (distance >= hitDistance)
{
continue;
}
var castResult = new RaycastResult
{
gameObject = go,
module = this,
distance = distance,
index = resultAppendList.Count,
depth = m_RaycastResults[index].graphic.depth,
worldPosition = m_RaycastResults[index].worldPos
};
resultAppendList.Add(castResult);
}
}
}
/// <summary>
/// Performs a raycast using eventData.worldSpaceRay
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public override void Raycast(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (eventData.IsVRPointer())
{
Raycast(eventData, resultAppendList, eventData.GetRay(), true);
}
}
/// <summary>
/// Performs a raycast using the pointer object attached to this OVRRaycaster
/// </summary>
/// <param name="eventData"></param>
/// <param name="resultAppendList"></param>
public void RaycastPointer(PointerEventData eventData, List<RaycastResult> resultAppendList)
{
if (pointer != null && pointer.activeInHierarchy)
{
Raycast(eventData, resultAppendList, new Ray(eventCamera.transform.position, (pointer.transform.position - eventCamera.transform.position).normalized), false);
}
}
/// <summary>
/// Perform a raycast into the screen and collect all graphics underneath it.
/// </summary>
[NonSerialized]
static readonly List<RaycastHit> s_SortedGraphics = new List<RaycastHit>();
private void GraphicRaycast(Canvas canvas, Ray ray, List<RaycastHit> results)
{
//This function is based closely on :
// void GraphicRaycaster.Raycast(Canvas canvas, Camera eventCamera, Vector2 pointerPosition, List<Graphic> results)
// But modified to take a Ray instead of a canvas pointer, and also to explicitly ignore
// the graphic associated with the pointer
// Necessary for the event system
var foundGraphics = GraphicRegistry.GetGraphicsForCanvas(canvas);
s_SortedGraphics.Clear();
for (int i = 0; i < foundGraphics.Count; ++i)
{
Graphic graphic = foundGraphics[i];
// -1 means it hasn't been processed by the canvas, which means it isn't actually drawn
if (graphic.depth == -1 || (pointer == graphic.gameObject))
continue;
Vector3 worldPos;
if (RayIntersectsRectTransform(graphic.rectTransform, ray, out worldPos))
{
//Work out where this is on the screen for compatibility with existing Unity UI code
Vector2 screenPos = eventCamera.WorldToScreenPoint(worldPos);
// mask/image intersection - See Unity docs on eventAlphaThreshold for when this does anything
if (graphic.Raycast(screenPos, eventCamera))
{
RaycastHit hit;
hit.graphic = graphic;
hit.worldPos = worldPos;
hit.fromMouse = false;
s_SortedGraphics.Add(hit);
}
}
}
s_SortedGraphics.Sort((g1, g2) => g2.graphic.depth.CompareTo(g1.graphic.depth));
for (int i = 0; i < s_SortedGraphics.Count; ++i)
{
results.Add(s_SortedGraphics[i]);
}
}
/// <summary>
/// Get screen position of worldPosition contained in this RaycastResult
/// </summary>
/// <param name="worldPosition"></param>
/// <returns></returns>
public Vector2 GetScreenPosition(RaycastResult raycastResult)
{
// In future versions of Uinty RaycastResult will contain screenPosition so this will not be necessary
return eventCamera.WorldToScreenPoint(raycastResult.worldPosition);
}
/// <summary>
/// Detects whether a ray intersects a RectTransform and if it does also
/// returns the world position of the intersection.
/// </summary>
/// <param name="rectTransform"></param>
/// <param name="ray"></param>
/// <param name="worldPos"></param>
/// <returns></returns>
static bool RayIntersectsRectTransform(RectTransform rectTransform, Ray ray, out Vector3 worldPos)
{
Vector3[] corners = new Vector3[4];
rectTransform.GetWorldCorners(corners);
Plane plane = new Plane(corners[0], corners[1], corners[2]);
float enter;
if (!plane.Raycast(ray, out enter))
{
worldPos = Vector3.zero;
return false;
}
Vector3 intersection = ray.GetPoint(enter);
Vector3 BottomEdge = corners[3] - corners[0];
Vector3 LeftEdge = corners[1] - corners[0];
float BottomDot = Vector3.Dot(intersection - corners[0], BottomEdge);
float LeftDot = Vector3.Dot(intersection - corners[0], LeftEdge);
if (BottomDot < BottomEdge.sqrMagnitude && // Can use sqrMag because BottomEdge is not normalized
LeftDot < LeftEdge.sqrMagnitude &&
BottomDot >= 0 &&
LeftDot >= 0)
{
worldPos = corners[0] + LeftDot * LeftEdge / LeftEdge.sqrMagnitude + BottomDot * BottomEdge / BottomEdge.sqrMagnitude;
return true;
}
else
{
worldPos = Vector3.zero;
return false;
}
}
struct RaycastHit
{
public Graphic graphic;
public Vector3 worldPos;
public bool fromMouse;
};
/// <summary>
/// Is this the currently focussed Raycaster according to the InputModule
/// </summary>
/// <returns></returns>
public bool IsFocussed()
{
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
return inputModule && inputModule.activeGraphicRaycaster == this;
}
public void OnPointerEnter(PointerEventData e)
{
if (e.IsVRPointer())
{
// Gaze has entered this canvas. We'll make it the active one so that canvas-mouse pointer can be used.
OVRInputModule inputModule = EventSystem.current.currentInputModule as OVRInputModule;
inputModule.activeGraphicRaycaster = this;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 7aaf960227867044282d921171d2d7ac
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,51 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Assets.OVR.Scripts
{
public class Record
{
public string category;
public string message;
public Record(string cat, string msg)
{
category = cat;
message = msg;
}
}
public class RangedRecord : Record
{
public float value;
public float min;
public float max;
public RangedRecord(string cat, string msg, float val, float minVal, float maxVal)
: base(cat, msg)
{
value = val;
min = minVal;
max = maxVal;
}
}
public delegate void FixMethodDelegate(UnityEngine.Object obj, bool isLastInSet, int selectedIndex);
public class FixRecord : Record
{
public FixMethodDelegate fixMethod;
public UnityEngine.Object targetObject;
public string[] buttonNames;
public bool complete;
public FixRecord(string cat, string msg, FixMethodDelegate fix, UnityEngine.Object target, string[] buttons)
: base(cat, msg)
{
buttonNames = buttons;
fixMethod = fix;
targetObject = target;
complete = false;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 63f0fe0d60ddeb54f9f43d701286af2d
timeCreated: 1520636357
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,49 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Allows you to reset VR input tracking with a gamepad button press.
/// </summary>
public class OVRResetOrientation : MonoBehaviour
{
/// <summary>
/// The gamepad button that will reset VR input tracking.
/// </summary>
public OVRInput.RawButton resetButton = OVRInput.RawButton.Y;
/// <summary>
/// Check input and reset orientation if necessary
/// See the input mapping setup in the Unity Integration guide
/// </summary>
void Update()
{
// NOTE: some of the buttons defined in OVRInput.RawButton are not available on the Android game pad controller
if (OVRInput.GetDown(resetButton))
{
//*************************
// reset orientation
//*************************
OVRManager.display.RecenterPose();
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 09bb0a17b6a704298b65be4fb08ef480
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,218 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Sample that allows you to play with various VR settings.
/// </summary>
public class OVRSceneSampleController : MonoBehaviour
{
/// <summary>
/// The key that quits the application.
/// </summary>
public KeyCode quitKey = KeyCode.Escape;
/// <summary>
/// An optional texture that appears before the menu fades in.
/// </summary>
public Texture fadeInTexture = null;
/// <summary>
/// Controls how quickly the player's speed and rotation change based on input.
/// </summary>
public float speedRotationIncrement = 0.05f;
private OVRPlayerController playerController = null;
// Handle to OVRCameraRig
private OVRCameraRig cameraController = null;
/// <summary>
/// We can set the layer to be anything we want to, this allows
/// a specific camera to render it.
/// </summary>
public string layerName = "Default";
// Vision mode on/off
private bool visionMode = true;
// We want to hold onto GridCube, for potential sharing
// of the menu RenderTarget
OVRGridCube gridCube = null;
#if SHOW_DK2_VARIABLES
private string strVisionMode = "Vision Enabled: ON";
#endif
#region MonoBehaviour Message Handlers
/// <summary>
/// Awake this instance.
/// </summary>
void Awake()
{
// Find camera controller
OVRCameraRig[] cameraControllers;
cameraControllers = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (cameraControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRCameraRig attached.");
}
else if (cameraControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRCameraRig attached.");
}
else
{
cameraController = cameraControllers[0];
}
// Find player controller
OVRPlayerController[] playerControllers;
playerControllers = gameObject.GetComponentsInChildren<OVRPlayerController>();
if (playerControllers.Length == 0)
{
Debug.LogWarning("OVRMainMenu: No OVRPlayerController attached.");
}
else if (playerControllers.Length > 1)
{
Debug.LogWarning("OVRMainMenu: More then 1 OVRPlayerController attached.");
}
else
{
playerController = playerControllers[0];
}
}
/// <summary>
/// Start this instance.
/// </summary>
void Start()
{
// Make sure to hide cursor
if (Application.isEditor == false)
{
Cursor.visible = false;
Cursor.lockState = CursorLockMode.Locked;
}
// CameraController updates
if (cameraController != null)
{
// Add a GridCube component to this object
gridCube = gameObject.AddComponent<OVRGridCube>();
gridCube.SetOVRCameraController(ref cameraController);
}
}
/// <summary>
/// Update this instance.
/// </summary>
void Update()
{
// Recenter pose
UpdateRecenterPose();
// Turn On/Off Vision Mode
UpdateVisionMode();
// Update Speed and Rotation Scale
if (playerController != null)
UpdateSpeedAndRotationScaleMultiplier();
// Toggle Fullscreen
if (Input.GetKeyDown(KeyCode.F11))
Screen.fullScreen = !Screen.fullScreen;
if (Input.GetKeyDown(KeyCode.M))
#if UNITY_2017_2_OR_NEWER
UnityEngine.XR.XRSettings.showDeviceView = !UnityEngine.XR.XRSettings.showDeviceView;
#else
UnityEngine.VR.VRSettings.showDeviceView = !UnityEngine.VR.VRSettings.showDeviceView;
#endif
#if !UNITY_ANDROID || UNITY_EDITOR
// Escape Application
if (Input.GetKeyDown(quitKey))
Application.Quit();
#endif
}
#endregion
/// <summary>
/// Updates the vision mode.
/// </summary>
void UpdateVisionMode()
{
if (Input.GetKeyDown(KeyCode.F2))
{
visionMode ^= visionMode;
OVRManager.tracker.isEnabled = visionMode;
}
}
/// <summary>
/// Updates the speed and rotation scale multiplier.
/// </summary>
void UpdateSpeedAndRotationScaleMultiplier()
{
float moveScaleMultiplier = 0.0f;
playerController.GetMoveScaleMultiplier(ref moveScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha7))
{
moveScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha8))
{
moveScaleMultiplier += speedRotationIncrement;
}
playerController.SetMoveScaleMultiplier(moveScaleMultiplier);
float rotationScaleMultiplier = 0.0f;
playerController.GetRotationScaleMultiplier(ref rotationScaleMultiplier);
if (Input.GetKeyDown(KeyCode.Alpha9))
{
rotationScaleMultiplier -= speedRotationIncrement;
}
else if (Input.GetKeyDown(KeyCode.Alpha0))
{
rotationScaleMultiplier += speedRotationIncrement;
}
playerController.SetRotationScaleMultiplier(rotationScaleMultiplier);
}
/// <summary>
/// Recenter pose
/// </summary>
void UpdateRecenterPose()
{
if (Input.GetKeyDown(KeyCode.R))
OVRManager.display.RecenterPose();
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 4f07515ada089df47868559a20dd6783
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,209 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections; // required for Coroutines
/// <summary>
/// Fades the screen from black after a new scene is loaded. Fade can also be controlled mid-scene using SetUIFade and SetFadeLevel
/// </summary>
public class OVRScreenFade : MonoBehaviour
{
[Tooltip("Fade duration")]
public float fadeTime = 2.0f;
[Tooltip("Screen color at maximum fade")]
public Color fadeColor = new Color(0.01f, 0.01f, 0.01f, 1.0f);
public bool fadeOnStart = true;
/// <summary>
/// The render queue used by the fade mesh. Reduce this if you need to render on top of it.
/// </summary>
public int renderQueue = 5000;
private float uiFadeAlpha = 0;
private MeshRenderer fadeRenderer;
private MeshFilter fadeMesh;
private Material fadeMaterial = null;
private bool isFading = false;
public float currentAlpha { get; private set; }
void Awake()
{
// create the fade material
fadeMaterial = new Material(Shader.Find("Oculus/Unlit Transparent Color"));
fadeMesh = gameObject.AddComponent<MeshFilter>();
fadeRenderer = gameObject.AddComponent<MeshRenderer>();
var mesh = new Mesh();
fadeMesh.mesh = mesh;
Vector3[] vertices = new Vector3[4];
float width = 2f;
float height = 2f;
float depth = 1f;
vertices[0] = new Vector3(-width, -height, depth);
vertices[1] = new Vector3(width, -height, depth);
vertices[2] = new Vector3(-width, height, depth);
vertices[3] = new Vector3(width, height, depth);
mesh.vertices = vertices;
int[] tri = new int[6];
tri[0] = 0;
tri[1] = 2;
tri[2] = 1;
tri[3] = 2;
tri[4] = 3;
tri[5] = 1;
mesh.triangles = tri;
Vector3[] normals = new Vector3[4];
normals[0] = -Vector3.forward;
normals[1] = -Vector3.forward;
normals[2] = -Vector3.forward;
normals[3] = -Vector3.forward;
mesh.normals = normals;
Vector2[] uv = new Vector2[4];
uv[0] = new Vector2(0, 0);
uv[1] = new Vector2(1, 0);
uv[2] = new Vector2(0, 1);
uv[3] = new Vector2(1, 1);
mesh.uv = uv;
SetFadeLevel(0);
}
/// <summary>
/// Start a fade out
/// </summary>
public void FadeOut()
{
StartCoroutine(Fade(0,1));
}
/// <summary>
/// Starts a fade in when a new level is loaded
/// </summary>
void OnLevelFinishedLoading(int level)
{
StartCoroutine(Fade(1,0));
}
/// <summary>
/// Automatically starts a fade in
/// </summary>
void Start()
{
if (fadeOnStart)
{
StartCoroutine(Fade(1,0));
}
}
void OnEnable()
{
if (!fadeOnStart)
{
SetFadeLevel(0);
}
}
/// <summary>
/// Cleans up the fade material
/// </summary>
void OnDestroy()
{
if (fadeRenderer != null)
Destroy(fadeRenderer);
if (fadeMaterial != null)
Destroy(fadeMaterial);
if (fadeMesh != null)
Destroy(fadeMesh);
}
/// <summary>
/// Set the UI fade level - fade due to UI in foreground
/// </summary>
public void SetUIFade(float level)
{
uiFadeAlpha = Mathf.Clamp01(level);
SetMaterialAlpha();
}
/// <summary>
/// Override current fade level
/// </summary>
/// <param name="level"></param>
public void SetFadeLevel(float level)
{
currentAlpha = level;
SetMaterialAlpha();
}
/// <summary>
/// Fades alpha from 1.0 to 0.0
/// </summary>
IEnumerator Fade(float startAlpha, float endAlpha)
{
float elapsedTime = 0.0f;
while (elapsedTime < fadeTime)
{
elapsedTime += Time.deltaTime;
currentAlpha = Mathf.Lerp(startAlpha, endAlpha, Mathf.Clamp01(elapsedTime / fadeTime));
SetMaterialAlpha();
yield return new WaitForEndOfFrame();
}
}
/// <summary>
/// Update material alpha. UI fade and the current fade due to fade in/out animations (or explicit control)
/// both affect the fade. (The max is taken)
/// </summary>
private void SetMaterialAlpha()
{
Color color = fadeColor;
color.a = Mathf.Max(currentAlpha, uiFadeAlpha);
isFading = color.a > 0;
if (fadeMaterial != null)
{
fadeMaterial.color = color;
fadeMaterial.renderQueue = renderQueue;
fadeRenderer.material = fadeMaterial;
fadeRenderer.enabled = isFading;
}
}
}

View File

@@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: df8e1d778abf442e4bec449c360e9e1c
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: -100
icon: {instanceID: 0}
userData:

View File

@@ -0,0 +1,73 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
using System.Collections;
/// <summary>
/// Simple helper script that conditionally enables rendering of a controller if it is connected.
/// </summary>
public class OVRTrackedRemote : MonoBehaviour
{
/// <summary>
/// The root GameObject that represents the GearVr Controller model.
/// </summary>
public GameObject m_modelGearVrController;
/// <summary>
/// The root GameObject that represents the Oculus Go Controller model.
/// </summary>
public GameObject m_modelOculusGoController;
/// <summary>
/// The controller that determines whether or not to enable rendering of the controller model.
/// </summary>
public OVRInput.Controller m_controller;
private bool m_isOculusGo;
private bool m_prevControllerConnected = false;
private bool m_prevControllerConnectedCached = false;
void Start()
{
m_isOculusGo = (OVRPlugin.productName == "Oculus Go");
}
void Update()
{
bool controllerConnected = OVRInput.IsControllerConnected(m_controller);
if ((controllerConnected != m_prevControllerConnected) || !m_prevControllerConnectedCached)
{
m_modelOculusGoController.SetActive(controllerConnected && m_isOculusGo);
m_modelGearVrController.SetActive(controllerConnected && !m_isOculusGo);
m_prevControllerConnected = controllerConnected;
m_prevControllerConnectedCached = true;
}
if (!controllerConnected)
{
return;
}
}
}

View File

@@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: aed62bf3ae2456c408f247f96808ce96
timeCreated: 1486166271
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -0,0 +1,38 @@
/************************************************************************************
Copyright : Copyright 2017 Oculus VR, LLC. All Rights reserved.
Licensed under the Oculus VR Rift SDK License Version 3.4.1 (the "License");
you may not use the Oculus VR Rift SDK except in compliance with the License,
which is provided at the time of installation or download, or which
otherwise accompanies this software in either electronic or hard copy form.
You may obtain a copy of the License at
https://developer.oculus.com/licenses/sdk-3.4.1
Unless required by applicable law or agreed to in writing, the Oculus VR SDK
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
************************************************************************************/
using UnityEngine;
/// <summary>
/// Rotates this GameObject at a given speed.
/// </summary>
public class OVRWaitCursor : MonoBehaviour
{
public Vector3 rotateSpeeds = new Vector3(0.0f, 0.0f, -60.0f);
/// <summary>
/// Auto rotates the attached cursor.
/// </summary>
void Update()
{
transform.Rotate(rotateSpeeds * Time.smoothDeltaTime);
}
}

Some files were not shown because too many files have changed in this diff Show More