Added VR libraries
This commit is contained in:
294
Assets/Oculus/VR/Scripts/Composition/OVRCameraComposition.cs
Normal file
294
Assets/Oculus/VR/Scripts/Composition/OVRCameraComposition.cs
Normal file
@@ -0,0 +1,294 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public abstract class OVRCameraComposition : OVRComposition {
|
||||
protected GameObject cameraFramePlaneObject;
|
||||
protected float cameraFramePlaneDistance;
|
||||
|
||||
protected readonly bool hasCameraDeviceOpened = false;
|
||||
protected readonly bool useDynamicLighting = false;
|
||||
|
||||
internal readonly OVRPlugin.CameraDevice cameraDevice = OVRPlugin.CameraDevice.WebCamera0;
|
||||
|
||||
private OVRCameraRig cameraRig;
|
||||
|
||||
private Mesh boundaryMesh = null;
|
||||
private float boundaryMeshTopY = 0.0f;
|
||||
private float boundaryMeshBottomY = 0.0f;
|
||||
private OVRManager.VirtualGreenScreenType boundaryMeshType = OVRManager.VirtualGreenScreenType.Off;
|
||||
|
||||
protected OVRCameraComposition(OVRManager.CameraDevice inCameraDevice, bool inUseDynamicLighting, OVRManager.DepthQuality depthQuality)
|
||||
{
|
||||
cameraDevice = OVRCompositionUtil.ConvertCameraDevice(inCameraDevice);
|
||||
|
||||
Debug.Assert(!hasCameraDeviceOpened);
|
||||
Debug.Assert(!OVRPlugin.IsCameraDeviceAvailable(cameraDevice) || !OVRPlugin.HasCameraDeviceOpened(cameraDevice));
|
||||
hasCameraDeviceOpened = false;
|
||||
useDynamicLighting = inUseDynamicLighting;
|
||||
|
||||
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
|
||||
if (useDynamicLighting && !cameraSupportsDepth)
|
||||
{
|
||||
Debug.LogWarning("The camera device doesn't support depth. The result of dynamic lighting might not be correct");
|
||||
}
|
||||
|
||||
if (OVRPlugin.IsCameraDeviceAvailable(cameraDevice))
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
if (OVRPlugin.GetExternalCameraCount() > 0 && OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
OVRPlugin.SetCameraDevicePreferredColorFrameSize(cameraDevice, intrinsics.ImageSensorPixelResolution.w, intrinsics.ImageSensorPixelResolution.h);
|
||||
}
|
||||
|
||||
if (useDynamicLighting)
|
||||
{
|
||||
OVRPlugin.SetCameraDeviceDepthSensingMode(cameraDevice, OVRPlugin.CameraDeviceDepthSensingMode.Fill);
|
||||
OVRPlugin.CameraDeviceDepthQuality quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
|
||||
if (depthQuality == OVRManager.DepthQuality.Low)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.Low;
|
||||
}
|
||||
else if (depthQuality == OVRManager.DepthQuality.Medium)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.Medium;
|
||||
}
|
||||
else if (depthQuality == OVRManager.DepthQuality.High)
|
||||
{
|
||||
quality = OVRPlugin.CameraDeviceDepthQuality.High;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Unknown depth quality");
|
||||
}
|
||||
OVRPlugin.SetCameraDevicePreferredDepthQuality(cameraDevice, quality);
|
||||
}
|
||||
|
||||
OVRPlugin.OpenCameraDevice(cameraDevice);
|
||||
if (OVRPlugin.HasCameraDeviceOpened(cameraDevice))
|
||||
{
|
||||
hasCameraDeviceOpened = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(ref cameraFramePlaneObject);
|
||||
if (hasCameraDeviceOpened)
|
||||
{
|
||||
OVRPlugin.CloseCameraDevice(cameraDevice);
|
||||
}
|
||||
}
|
||||
|
||||
public override void RecenterPose()
|
||||
{
|
||||
boundaryMesh = null;
|
||||
}
|
||||
|
||||
protected void CreateCameraFramePlaneObject(GameObject parentObject, Camera mixedRealityCamera, bool useDynamicLighting)
|
||||
{
|
||||
Debug.Assert(cameraFramePlaneObject == null);
|
||||
cameraFramePlaneObject = GameObject.CreatePrimitive(PrimitiveType.Quad);
|
||||
cameraFramePlaneObject.name = "MRCameraFrame";
|
||||
cameraFramePlaneObject.transform.parent = parentObject.transform;
|
||||
cameraFramePlaneObject.GetComponent<Collider>().enabled = false;
|
||||
cameraFramePlaneObject.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
|
||||
Material cameraFrameMaterial = new Material(Shader.Find(useDynamicLighting ? "Oculus/OVRMRCameraFrameLit" : "Oculus/OVRMRCameraFrame"));
|
||||
cameraFramePlaneObject.GetComponent<MeshRenderer>().material = cameraFrameMaterial;
|
||||
cameraFrameMaterial.SetColor("_Color", Color.white);
|
||||
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
|
||||
cameraFramePlaneObject.transform.localScale = new Vector3(4, 4, 4);
|
||||
cameraFramePlaneObject.SetActive(true);
|
||||
OVRCameraFrameCompositionManager cameraFrameCompositionManager = mixedRealityCamera.gameObject.AddComponent<OVRCameraFrameCompositionManager>();
|
||||
cameraFrameCompositionManager.cameraFrameGameObj = cameraFramePlaneObject;
|
||||
cameraFrameCompositionManager.composition = this;
|
||||
}
|
||||
|
||||
private bool nullcameraRigWarningDisplayed = false;
|
||||
protected void UpdateCameraFramePlaneObject(Camera mainCamera, Camera mixedRealityCamera, RenderTexture boundaryMeshMaskTexture)
|
||||
{
|
||||
bool hasError = false;
|
||||
Material cameraFrameMaterial = cameraFramePlaneObject.GetComponent<MeshRenderer>().material;
|
||||
Texture2D colorTexture = Texture2D.blackTexture;
|
||||
Texture2D depthTexture = Texture2D.whiteTexture;
|
||||
if (OVRPlugin.IsCameraDeviceColorFrameAvailable(cameraDevice))
|
||||
{
|
||||
colorTexture = OVRPlugin.GetCameraDeviceColorFrameTexture(cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Camera: color frame not ready");
|
||||
hasError = true;
|
||||
}
|
||||
bool cameraSupportsDepth = OVRPlugin.DoesCameraDeviceSupportDepth(cameraDevice);
|
||||
if (useDynamicLighting && cameraSupportsDepth)
|
||||
{
|
||||
if (OVRPlugin.IsCameraDeviceDepthFrameAvailable(cameraDevice))
|
||||
{
|
||||
depthTexture = OVRPlugin.GetCameraDeviceDepthFrameTexture(cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Camera: depth frame not ready");
|
||||
hasError = true;
|
||||
}
|
||||
}
|
||||
if (!hasError)
|
||||
{
|
||||
Vector3 offset = mainCamera.transform.position - mixedRealityCamera.transform.position;
|
||||
float distance = Vector3.Dot(mixedRealityCamera.transform.forward, offset);
|
||||
cameraFramePlaneDistance = distance;
|
||||
|
||||
cameraFramePlaneObject.transform.position = mixedRealityCamera.transform.position + mixedRealityCamera.transform.forward * distance;
|
||||
cameraFramePlaneObject.transform.rotation = mixedRealityCamera.transform.rotation;
|
||||
|
||||
float tanFov = Mathf.Tan(mixedRealityCamera.fieldOfView * Mathf.Deg2Rad * 0.5f);
|
||||
cameraFramePlaneObject.transform.localScale = new Vector3(distance * mixedRealityCamera.aspect * tanFov * 2.0f, distance * tanFov * 2.0f, 1.0f);
|
||||
|
||||
float worldHeight = distance * tanFov * 2.0f;
|
||||
float worldWidth = worldHeight * mixedRealityCamera.aspect;
|
||||
|
||||
float cullingDistance = float.MaxValue;
|
||||
|
||||
cameraRig = null;
|
||||
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
|
||||
{
|
||||
cameraRig = mainCamera.GetComponentInParent<OVRCameraRig>();
|
||||
if (cameraRig != null)
|
||||
{
|
||||
if (cameraRig.centerEyeAnchor == null)
|
||||
{
|
||||
cameraRig = null;
|
||||
}
|
||||
}
|
||||
RefreshBoundaryMesh(mixedRealityCamera, out cullingDistance);
|
||||
}
|
||||
|
||||
cameraFrameMaterial.mainTexture = colorTexture;
|
||||
cameraFrameMaterial.SetTexture("_DepthTex", depthTexture);
|
||||
cameraFrameMaterial.SetVector("_FlipParams", new Vector4((OVRManager.instance.flipCameraFrameHorizontally ? 1.0f : 0.0f), (OVRManager.instance.flipCameraFrameVertically ? 1.0f : 0.0f), 0.0f, 0.0f));
|
||||
cameraFrameMaterial.SetColor("_ChromaKeyColor", OVRManager.instance.chromaKeyColor);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySimilarity", OVRManager.instance.chromaKeySimilarity);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySmoothRange", OVRManager.instance.chromaKeySmoothRange);
|
||||
cameraFrameMaterial.SetFloat("_ChromaKeySpillRange", OVRManager.instance.chromaKeySpillRange);
|
||||
cameraFrameMaterial.SetVector("_TextureDimension", new Vector4(colorTexture.width, colorTexture.height, 1.0f / colorTexture.width, 1.0f / colorTexture.height));
|
||||
cameraFrameMaterial.SetVector("_TextureWorldSize", new Vector4(worldWidth, worldHeight, 0, 0));
|
||||
cameraFrameMaterial.SetFloat("_SmoothFactor", OVRManager.instance.dynamicLightingSmoothFactor);
|
||||
cameraFrameMaterial.SetFloat("_DepthVariationClamp", OVRManager.instance.dynamicLightingDepthVariationClampingValue);
|
||||
cameraFrameMaterial.SetFloat("_CullingDistance", cullingDistance);
|
||||
if (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || boundaryMesh == null || boundaryMeshMaskTexture == null)
|
||||
{
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (cameraRig == null)
|
||||
{
|
||||
if (!nullcameraRigWarningDisplayed)
|
||||
{
|
||||
Debug.LogWarning("Could not find the OVRCameraRig/CenterEyeAnchor object. Please check if the OVRCameraRig has been setup properly. The virtual green screen has been temporarily disabled");
|
||||
nullcameraRigWarningDisplayed = true;
|
||||
}
|
||||
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", Texture2D.whiteTexture);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (nullcameraRigWarningDisplayed)
|
||||
{
|
||||
Debug.Log("OVRCameraRig/CenterEyeAnchor object found. Virtual green screen is activated");
|
||||
nullcameraRigWarningDisplayed = false;
|
||||
}
|
||||
|
||||
cameraFrameMaterial.SetTexture("_MaskTex", boundaryMeshMaskTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void RefreshBoundaryMesh(Camera camera, out float cullingDistance)
|
||||
{
|
||||
float depthTolerance = OVRManager.instance.virtualGreenScreenApplyDepthCulling ? OVRManager.instance.virtualGreenScreenDepthTolerance : float.PositiveInfinity;
|
||||
cullingDistance = OVRCompositionUtil.GetMaximumBoundaryDistance(camera, OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType)) + depthTolerance;
|
||||
if (boundaryMesh == null || boundaryMeshType != OVRManager.instance.virtualGreenScreenType || boundaryMeshTopY != OVRManager.instance.virtualGreenScreenTopY || boundaryMeshBottomY != OVRManager.instance.virtualGreenScreenBottomY)
|
||||
{
|
||||
boundaryMeshTopY = OVRManager.instance.virtualGreenScreenTopY;
|
||||
boundaryMeshBottomY = OVRManager.instance.virtualGreenScreenBottomY;
|
||||
boundaryMesh = OVRCompositionUtil.BuildBoundaryMesh(OVRCompositionUtil.ToBoundaryType(OVRManager.instance.virtualGreenScreenType), boundaryMeshTopY, boundaryMeshBottomY);
|
||||
boundaryMeshType = OVRManager.instance.virtualGreenScreenType;
|
||||
|
||||
// Creating GameObject for testing purpose only
|
||||
//GameObject boundaryMeshObject = new GameObject("BoundaryMeshObject");
|
||||
//boundaryMeshObject.AddComponent<MeshFilter>().mesh = boundaryMesh;
|
||||
//boundaryMeshObject.AddComponent<MeshRenderer>();
|
||||
}
|
||||
}
|
||||
|
||||
public class OVRCameraFrameCompositionManager : MonoBehaviour
|
||||
{
|
||||
public GameObject cameraFrameGameObj;
|
||||
public OVRCameraComposition composition;
|
||||
public RenderTexture boundaryMeshMaskTexture;
|
||||
private Material cameraFrameMaterial;
|
||||
private Material whiteMaterial;
|
||||
|
||||
void Start()
|
||||
{
|
||||
Shader shader = Shader.Find("Oculus/Unlit");
|
||||
if (!shader)
|
||||
{
|
||||
Debug.LogError("Oculus/Unlit shader does not exist");
|
||||
return;
|
||||
}
|
||||
whiteMaterial = new Material(shader);
|
||||
whiteMaterial.color = Color.white;
|
||||
}
|
||||
|
||||
void OnPreRender()
|
||||
{
|
||||
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off && boundaryMeshMaskTexture != null && composition.boundaryMesh != null)
|
||||
{
|
||||
RenderTexture oldRT = RenderTexture.active;
|
||||
RenderTexture.active = boundaryMeshMaskTexture;
|
||||
|
||||
// The camera matrices haven't been setup when OnPreRender() is executed. Load the projection manually
|
||||
GL.PushMatrix();
|
||||
GL.LoadProjectionMatrix(GetComponent<Camera>().projectionMatrix);
|
||||
|
||||
GL.Clear(false, true, Color.black);
|
||||
|
||||
for (int i = 0; i < whiteMaterial.passCount; ++i)
|
||||
{
|
||||
if (whiteMaterial.SetPass(i))
|
||||
{
|
||||
Graphics.DrawMeshNow(composition.boundaryMesh, composition.cameraRig.ComputeTrackReferenceMatrix());
|
||||
}
|
||||
}
|
||||
|
||||
GL.PopMatrix();
|
||||
RenderTexture.active = oldRT;
|
||||
}
|
||||
|
||||
if (cameraFrameGameObj)
|
||||
{
|
||||
if (cameraFrameMaterial == null)
|
||||
cameraFrameMaterial = cameraFrameGameObj.GetComponent<MeshRenderer>().material;
|
||||
cameraFrameMaterial.SetFloat("_Visible", 1.0f);
|
||||
}
|
||||
}
|
||||
void OnPostRender()
|
||||
{
|
||||
if (cameraFrameGameObj)
|
||||
{
|
||||
Debug.Assert(cameraFrameMaterial);
|
||||
cameraFrameMaterial.SetFloat("_Visible", 0.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 70818bad1fe6859439b190a61dfb6eb8
|
||||
timeCreated: 1503089686
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
56
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs
Normal file
56
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs
Normal file
@@ -0,0 +1,56 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public abstract class OVRComposition {
|
||||
|
||||
public abstract OVRManager.CompositionMethod CompositionMethod();
|
||||
|
||||
public abstract void Update(Camera mainCamera);
|
||||
public abstract void Cleanup();
|
||||
|
||||
public virtual void RecenterPose() { }
|
||||
|
||||
protected bool usingLastAttachedNodePose = false;
|
||||
protected OVRPose lastAttachedNodePose = new OVRPose(); // Sometimes the attach node pose is not readable (lose tracking, low battery, etc.) Use the last pose instead when it happens
|
||||
|
||||
internal OVRPose ComputeCameraWorldSpacePose(OVRPlugin.CameraExtrinsics extrinsics)
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
|
||||
OVRPose cameraTrackingSpacePose = extrinsics.RelativePose.ToOVRPose();
|
||||
trackingSpacePose = cameraTrackingSpacePose;
|
||||
|
||||
if (extrinsics.AttachedToNode != OVRPlugin.Node.None && OVRPlugin.GetNodePresent(extrinsics.AttachedToNode))
|
||||
{
|
||||
if (usingLastAttachedNodePose)
|
||||
{
|
||||
Debug.Log("The camera attached node get tracked");
|
||||
usingLastAttachedNodePose = false;
|
||||
}
|
||||
OVRPose attachedNodePose = OVRPlugin.GetNodePose(extrinsics.AttachedToNode, OVRPlugin.Step.Render).ToOVRPose();
|
||||
lastAttachedNodePose = attachedNodePose;
|
||||
trackingSpacePose = attachedNodePose * trackingSpacePose;
|
||||
}
|
||||
else
|
||||
{
|
||||
if (extrinsics.AttachedToNode != OVRPlugin.Node.None)
|
||||
{
|
||||
if (!usingLastAttachedNodePose)
|
||||
{
|
||||
Debug.LogWarning("The camera attached node could not be tracked, using the last pose");
|
||||
usingLastAttachedNodePose = true;
|
||||
}
|
||||
trackingSpacePose = lastAttachedNodePose * trackingSpacePose;
|
||||
}
|
||||
}
|
||||
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
return worldSpacePose;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
12
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs.meta
Normal file
12
Assets/Oculus/VR/Scripts/Composition/OVRComposition.cs.meta
Normal file
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 829a382f3380d4b46ad9670463232a0b
|
||||
timeCreated: 1502990005
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
149
Assets/Oculus/VR/Scripts/Composition/OVRCompositionUtil.cs
Normal file
149
Assets/Oculus/VR/Scripts/Composition/OVRCompositionUtil.cs
Normal file
@@ -0,0 +1,149 @@
|
||||
using UnityEngine;
|
||||
using System.Collections.Generic;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
internal class OVRCompositionUtil {
|
||||
|
||||
public static void SafeDestroy(GameObject obj)
|
||||
{
|
||||
if (Application.isPlaying)
|
||||
{
|
||||
GameObject.Destroy(obj);
|
||||
}
|
||||
else
|
||||
{
|
||||
GameObject.DestroyImmediate(obj);
|
||||
}
|
||||
}
|
||||
|
||||
public static void SafeDestroy(ref GameObject obj)
|
||||
{
|
||||
SafeDestroy(obj);
|
||||
obj = null;
|
||||
}
|
||||
|
||||
public static OVRPlugin.CameraDevice ConvertCameraDevice(OVRManager.CameraDevice cameraDevice)
|
||||
{
|
||||
if (cameraDevice == OVRManager.CameraDevice.WebCamera0)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.WebCamera0;
|
||||
}
|
||||
else if (cameraDevice == OVRManager.CameraDevice.WebCamera1)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.WebCamera1;
|
||||
}
|
||||
else if (cameraDevice == OVRManager.CameraDevice.ZEDCamera)
|
||||
{
|
||||
return OVRPlugin.CameraDevice.ZEDCamera;
|
||||
}
|
||||
else
|
||||
{
|
||||
return OVRPlugin.CameraDevice.None;
|
||||
}
|
||||
}
|
||||
|
||||
public static OVRBoundary.BoundaryType ToBoundaryType(OVRManager.VirtualGreenScreenType type)
|
||||
{
|
||||
if (type == OVRManager.VirtualGreenScreenType.OuterBoundary)
|
||||
{
|
||||
return OVRBoundary.BoundaryType.OuterBoundary;
|
||||
}
|
||||
else if (type == OVRManager.VirtualGreenScreenType.PlayArea)
|
||||
{
|
||||
return OVRBoundary.BoundaryType.PlayArea;
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Unmatched VirtualGreenScreenType");
|
||||
return OVRBoundary.BoundaryType.OuterBoundary;
|
||||
}
|
||||
}
|
||||
|
||||
public static Vector3 GetWorldPosition(Vector3 trackingSpacePosition)
|
||||
{
|
||||
OVRPose tsPose;
|
||||
tsPose.position = trackingSpacePosition;
|
||||
tsPose.orientation = Quaternion.identity;
|
||||
OVRPose wsPose = OVRExtensions.ToWorldSpacePose(tsPose);
|
||||
Vector3 pos = wsPose.position;
|
||||
return pos;
|
||||
}
|
||||
|
||||
public static float GetMaximumBoundaryDistance(Camera camera, OVRBoundary.BoundaryType boundaryType)
|
||||
{
|
||||
if (!OVRManager.boundary.GetConfigured())
|
||||
{
|
||||
return float.MaxValue;
|
||||
}
|
||||
|
||||
Vector3[] geometry = OVRManager.boundary.GetGeometry(boundaryType);
|
||||
if (geometry.Length == 0)
|
||||
{
|
||||
return float.MaxValue;
|
||||
}
|
||||
|
||||
float maxDistance = -float.MaxValue;
|
||||
foreach (Vector3 v in geometry)
|
||||
{
|
||||
Vector3 pos = GetWorldPosition(v);
|
||||
float distance = Vector3.Dot(camera.transform.forward, pos);
|
||||
if (maxDistance < distance)
|
||||
{
|
||||
maxDistance = distance;
|
||||
}
|
||||
}
|
||||
return maxDistance;
|
||||
}
|
||||
|
||||
public static Mesh BuildBoundaryMesh(OVRBoundary.BoundaryType boundaryType, float topY, float bottomY)
|
||||
{
|
||||
if (!OVRManager.boundary.GetConfigured())
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
List<Vector3> geometry = new List<Vector3>(OVRManager.boundary.GetGeometry(boundaryType));
|
||||
if (geometry.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
geometry.Add(geometry[0]);
|
||||
int numPoints = geometry.Count;
|
||||
|
||||
Vector3[] vertices = new Vector3[numPoints * 2];
|
||||
Vector2[] uvs = new Vector2[numPoints * 2];
|
||||
for (int i = 0; i < numPoints; ++i)
|
||||
{
|
||||
Vector3 v = geometry[i];
|
||||
vertices[i] = new Vector3(v.x, bottomY, v.z);
|
||||
vertices[i + numPoints] = new Vector3(v.x, topY, v.z);
|
||||
uvs[i] = new Vector2((float)i / (numPoints - 1), 0.0f);
|
||||
uvs[i + numPoints] = new Vector2(uvs[i].x, 1.0f);
|
||||
}
|
||||
|
||||
int[] triangles = new int[(numPoints - 1) * 2 * 3];
|
||||
for (int i = 0; i < numPoints - 1; ++i)
|
||||
{
|
||||
// the geometry is built clockwised. only the back faces should be rendered in the camera frame mask
|
||||
|
||||
triangles[i * 6 + 0] = i;
|
||||
triangles[i * 6 + 1] = i + numPoints;
|
||||
triangles[i * 6 + 2] = i + 1 + numPoints;
|
||||
|
||||
triangles[i * 6 + 3] = i;
|
||||
triangles[i * 6 + 4] = i + 1 + numPoints;
|
||||
triangles[i * 6 + 5] = i + 1;
|
||||
}
|
||||
|
||||
Mesh mesh = new Mesh();
|
||||
mesh.vertices = vertices;
|
||||
mesh.uv = uvs;
|
||||
mesh.triangles = triangles;
|
||||
return mesh;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 43bf91d46b2eb874a842be95aee2cc9a
|
||||
timeCreated: 1502992822
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
118
Assets/Oculus/VR/Scripts/Composition/OVRDirectComposition.cs
Normal file
118
Assets/Oculus/VR/Scripts/Composition/OVRDirectComposition.cs
Normal file
@@ -0,0 +1,118 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public class OVRDirectComposition : OVRCameraComposition
|
||||
{
|
||||
public GameObject directCompositionCameraGameObject;
|
||||
public Camera directCompositionCamera;
|
||||
public RenderTexture boundaryMeshMaskTexture = null;
|
||||
|
||||
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Direct; }
|
||||
|
||||
public OVRDirectComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice cameraDevice, bool useDynamicLighting, OVRManager.DepthQuality depthQuality)
|
||||
: base(cameraDevice, useDynamicLighting, depthQuality)
|
||||
{
|
||||
Debug.Assert(directCompositionCameraGameObject == null);
|
||||
directCompositionCameraGameObject = new GameObject();
|
||||
directCompositionCameraGameObject.name = "MRDirectCompositionCamera";
|
||||
directCompositionCameraGameObject.transform.parent = parentObject.transform;
|
||||
directCompositionCamera = directCompositionCameraGameObject.AddComponent<Camera>();
|
||||
directCompositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
directCompositionCamera.depth = float.MaxValue;
|
||||
directCompositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
directCompositionCamera.clearFlags = mainCamera.clearFlags;
|
||||
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
Debug.LogError("Unable to open camera device " + cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.Log("DirectComposition activated : useDynamicLighting " + (useDynamicLighting ? "ON" : "OFF"));
|
||||
CreateCameraFramePlaneObject(parentObject, directCompositionCamera, useDynamicLighting);
|
||||
}
|
||||
}
|
||||
|
||||
public override void Update(Camera mainCamera)
|
||||
{
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
|
||||
{
|
||||
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
|
||||
}
|
||||
|
||||
directCompositionCamera.clearFlags = mainCamera.clearFlags;
|
||||
directCompositionCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
directCompositionCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
directCompositionCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
directCompositionCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
|
||||
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
|
||||
directCompositionCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
directCompositionCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
|
||||
// So far, only support 1 camera for MR and always use camera index 0
|
||||
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
|
||||
|
||||
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
|
||||
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
directCompositionCamera.fieldOfView = fovY;
|
||||
directCompositionCamera.aspect = aspect;
|
||||
directCompositionCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Failed to get external camera information");
|
||||
}
|
||||
}
|
||||
|
||||
if (hasCameraDeviceOpened)
|
||||
{
|
||||
if (boundaryMeshMaskTexture == null || boundaryMeshMaskTexture.width != Screen.width || boundaryMeshMaskTexture.height != Screen.height)
|
||||
{
|
||||
boundaryMeshMaskTexture = new RenderTexture(Screen.width, Screen.height, 0, RenderTextureFormat.R8);
|
||||
boundaryMeshMaskTexture.Create();
|
||||
}
|
||||
UpdateCameraFramePlaneObject(mainCamera, directCompositionCamera, boundaryMeshMaskTexture);
|
||||
directCompositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = boundaryMeshMaskTexture;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
base.Cleanup();
|
||||
|
||||
OVRCompositionUtil.SafeDestroy(ref directCompositionCameraGameObject);
|
||||
directCompositionCamera = null;
|
||||
|
||||
Debug.Log("DirectComposition deactivated");
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 8e9d1c62d6c68c7429ce265558cfd2b2
|
||||
timeCreated: 1502990248
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
165
Assets/Oculus/VR/Scripts/Composition/OVRExternalComposition.cs
Normal file
165
Assets/Oculus/VR/Scripts/Composition/OVRExternalComposition.cs
Normal file
@@ -0,0 +1,165 @@
|
||||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public class OVRExternalComposition : OVRComposition
|
||||
{
|
||||
private GameObject foregroundCameraGameObject;
|
||||
private Camera foregroundCamera;
|
||||
private GameObject backgroundCameraGameObject;
|
||||
private Camera backgroundCamera;
|
||||
private GameObject cameraProxyPlane;
|
||||
|
||||
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.External; }
|
||||
|
||||
public OVRExternalComposition(GameObject parentObject, Camera mainCamera)
|
||||
{
|
||||
Debug.Assert(backgroundCameraGameObject == null);
|
||||
backgroundCameraGameObject = new GameObject();
|
||||
backgroundCameraGameObject.name = "MRBackgroundCamera";
|
||||
backgroundCameraGameObject.transform.parent = parentObject.transform;
|
||||
backgroundCamera = backgroundCameraGameObject.AddComponent<Camera>();
|
||||
backgroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
backgroundCamera.depth = float.MaxValue;
|
||||
backgroundCamera.rect = new Rect(0.0f, 0.0f, 0.5f, 1.0f);
|
||||
backgroundCamera.clearFlags = mainCamera.clearFlags;
|
||||
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
Debug.Assert(foregroundCameraGameObject == null);
|
||||
foregroundCameraGameObject = new GameObject();
|
||||
foregroundCameraGameObject.name = "MRForgroundCamera";
|
||||
foregroundCameraGameObject.transform.parent = parentObject.transform;
|
||||
foregroundCamera = foregroundCameraGameObject.AddComponent<Camera>();
|
||||
foregroundCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
foregroundCamera.depth = float.MaxValue;
|
||||
foregroundCamera.rect = new Rect(0.5f, 0.0f, 0.5f, 1.0f);
|
||||
foregroundCamera.clearFlags = CameraClearFlags.Color;
|
||||
foregroundCamera.backgroundColor = OVRMixedReality.chromaKeyColor;
|
||||
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
// Create cameraProxyPlane for clipping
|
||||
Debug.Assert(cameraProxyPlane == null);
|
||||
cameraProxyPlane = GameObject.CreatePrimitive(PrimitiveType.Quad);
|
||||
cameraProxyPlane.name = "MRProxyClipPlane";
|
||||
cameraProxyPlane.transform.parent = parentObject.transform;
|
||||
cameraProxyPlane.GetComponent<Collider>().enabled = false;
|
||||
cameraProxyPlane.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
|
||||
Material clipMaterial = new Material(Shader.Find("Oculus/OVRMRClipPlane"));
|
||||
cameraProxyPlane.GetComponent<MeshRenderer>().material = clipMaterial;
|
||||
clipMaterial.SetColor("_Color", OVRMixedReality.chromaKeyColor);
|
||||
clipMaterial.SetFloat("_Visible", 0.0f);
|
||||
cameraProxyPlane.transform.localScale = new Vector3(1000, 1000, 1000);
|
||||
cameraProxyPlane.SetActive(true);
|
||||
OVRMRForegroundCameraManager foregroundCameraManager = foregroundCameraGameObject.AddComponent<OVRMRForegroundCameraManager>();
|
||||
foregroundCameraManager.clipPlaneGameObj = cameraProxyPlane;
|
||||
}
|
||||
|
||||
public override void Update(Camera mainCamera)
|
||||
{
|
||||
OVRPlugin.SetHandNodePoseStateLatency(0.0); // the HandNodePoseStateLatency doesn't apply to the external composition. Always enforce it to 0.0
|
||||
|
||||
backgroundCamera.clearFlags = mainCamera.clearFlags;
|
||||
backgroundCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
backgroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
backgroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
backgroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
foregroundCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
foregroundCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
foregroundCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
|
||||
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
|
||||
backgroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
backgroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
backgroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
|
||||
foregroundCamera.fieldOfView = OVRMixedReality.fakeCameraFov;
|
||||
foregroundCamera.aspect = OVRMixedReality.fakeCameraAspect;
|
||||
foregroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
|
||||
// So far, only support 1 camera for MR and always use camera index 0
|
||||
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
|
||||
|
||||
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
|
||||
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
backgroundCamera.fieldOfView = fovY;
|
||||
backgroundCamera.aspect = aspect;
|
||||
backgroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
foregroundCamera.fieldOfView = fovY;
|
||||
foregroundCamera.aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
foregroundCamera.transform.FromOVRPose(worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogError("Failed to get external camera information");
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Assume player always standing straightly
|
||||
Vector3 externalCameraToHeadXZ = mainCamera.transform.position - foregroundCamera.transform.position;
|
||||
externalCameraToHeadXZ.y = 0;
|
||||
cameraProxyPlane.transform.position = mainCamera.transform.position;
|
||||
cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + externalCameraToHeadXZ);
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(ref backgroundCameraGameObject);
|
||||
backgroundCamera = null;
|
||||
OVRCompositionUtil.SafeDestroy(ref foregroundCameraGameObject);
|
||||
foregroundCamera = null;
|
||||
OVRCompositionUtil.SafeDestroy(ref cameraProxyPlane);
|
||||
Debug.Log("ExternalComposition deactivated");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Helper internal class for foregroundCamera, don't call it outside
|
||||
/// </summary>
|
||||
internal class OVRMRForegroundCameraManager : MonoBehaviour
|
||||
{
|
||||
public GameObject clipPlaneGameObj;
|
||||
private Material clipPlaneMaterial;
|
||||
void OnPreRender()
|
||||
{
|
||||
// the clipPlaneGameObj should be only visible to foreground camera
|
||||
if (clipPlaneGameObj)
|
||||
{
|
||||
if (clipPlaneMaterial == null)
|
||||
clipPlaneMaterial = clipPlaneGameObj.GetComponent<MeshRenderer>().material;
|
||||
clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 1.0f);
|
||||
}
|
||||
}
|
||||
void OnPostRender()
|
||||
{
|
||||
if (clipPlaneGameObj)
|
||||
{
|
||||
Debug.Assert(clipPlaneMaterial);
|
||||
clipPlaneGameObj.GetComponent<MeshRenderer>().material.SetFloat("_Visible", 0.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 2c109ff55176f71418ec2c06d1b5d28e
|
||||
timeCreated: 1502990231
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
373
Assets/Oculus/VR/Scripts/Composition/OVRSandwichComposition.cs
Normal file
373
Assets/Oculus/VR/Scripts/Composition/OVRSandwichComposition.cs
Normal file
@@ -0,0 +1,373 @@
|
||||
using UnityEngine;
|
||||
using UnityEngine.Rendering;
|
||||
|
||||
#if UNITY_EDITOR_WIN || UNITY_STANDALONE_WIN
|
||||
|
||||
public class OVRSandwichComposition : OVRCameraComposition
|
||||
{
|
||||
public float frameRealtime;
|
||||
|
||||
public Camera fgCamera;
|
||||
public Camera bgCamera;
|
||||
|
||||
public class HistoryRecord
|
||||
{
|
||||
public float timestamp = float.MinValue;
|
||||
public RenderTexture fgRenderTexture;
|
||||
public RenderTexture bgRenderTexture;
|
||||
public RenderTexture boundaryMeshMaskTexture;
|
||||
}
|
||||
|
||||
public readonly int historyRecordCount = 8; // enough to compensate 88ms latency @ 90 fps
|
||||
public readonly HistoryRecord[] historyRecordArray;
|
||||
public int historyRecordCursorIndex = 0;
|
||||
|
||||
public GameObject cameraProxyPlane;
|
||||
|
||||
public Camera compositionCamera;
|
||||
public OVRSandwichCompositionManager compositionManager;
|
||||
|
||||
private int _cameraFramePlaneLayer = -1;
|
||||
|
||||
// find an unnamed layer between 24..29
|
||||
public int cameraFramePlaneLayer
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_cameraFramePlaneLayer < 0)
|
||||
{
|
||||
for (int i=24; i<=29; ++i)
|
||||
{
|
||||
string layerName = LayerMask.LayerToName(i);
|
||||
if (layerName == null || layerName.Length == 0)
|
||||
{
|
||||
_cameraFramePlaneLayer = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (_cameraFramePlaneLayer == -1)
|
||||
{
|
||||
Debug.LogWarning("Unable to find an unnamed layer between 24 and 29.");
|
||||
_cameraFramePlaneLayer = 25;
|
||||
}
|
||||
Debug.LogFormat("Set the CameraFramePlaneLayer in SandwichComposition to {0}. Please do NOT put any other gameobject in this layer.", _cameraFramePlaneLayer);
|
||||
}
|
||||
return _cameraFramePlaneLayer;
|
||||
}
|
||||
}
|
||||
|
||||
public override OVRManager.CompositionMethod CompositionMethod() { return OVRManager.CompositionMethod.Sandwich; }
|
||||
|
||||
public OVRSandwichComposition(GameObject parentObject, Camera mainCamera, OVRManager.CameraDevice cameraDevice, bool useDynamicLighting, OVRManager.DepthQuality depthQuality)
|
||||
: base(cameraDevice, useDynamicLighting, depthQuality)
|
||||
{
|
||||
frameRealtime = Time.realtimeSinceStartup;
|
||||
|
||||
historyRecordCount = OVRManager.instance.sandwichCompositionBufferedFrames;
|
||||
if (historyRecordCount < 1)
|
||||
{
|
||||
Debug.LogWarning("Invalid sandwichCompositionBufferedFrames in OVRManager. It should be at least 1");
|
||||
historyRecordCount = 1;
|
||||
}
|
||||
if (historyRecordCount > 16)
|
||||
{
|
||||
Debug.LogWarning("The value of sandwichCompositionBufferedFrames in OVRManager is too big. It would consume a lot of memory. It has been override to 16");
|
||||
historyRecordCount = 16;
|
||||
}
|
||||
historyRecordArray = new HistoryRecord[historyRecordCount];
|
||||
for (int i=0; i<historyRecordCount; ++i)
|
||||
{
|
||||
historyRecordArray[i] = new HistoryRecord();
|
||||
}
|
||||
historyRecordCursorIndex = 0;
|
||||
|
||||
GameObject fgObject = new GameObject("MRSandwichForegroundCamera");
|
||||
fgObject.transform.parent = parentObject.transform;
|
||||
fgCamera = fgObject.AddComponent<Camera>();
|
||||
fgCamera.depth = 200;
|
||||
fgCamera.clearFlags = CameraClearFlags.SolidColor;
|
||||
fgCamera.backgroundColor = Color.clear;
|
||||
fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
fgCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
fgCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
GameObject bgObject = new GameObject("MRSandwichBackgroundCamera");
|
||||
bgObject.transform.parent = parentObject.transform;
|
||||
bgCamera = bgObject.AddComponent<Camera>();
|
||||
bgCamera.depth = 100;
|
||||
bgCamera.clearFlags = mainCamera.clearFlags;
|
||||
bgCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
bgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
bgCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
bgCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
// Create cameraProxyPlane for clipping
|
||||
Debug.Assert(cameraProxyPlane == null);
|
||||
cameraProxyPlane = GameObject.CreatePrimitive(PrimitiveType.Quad);
|
||||
cameraProxyPlane.name = "MRProxyClipPlane";
|
||||
cameraProxyPlane.transform.parent = parentObject.transform;
|
||||
cameraProxyPlane.GetComponent<Collider>().enabled = false;
|
||||
cameraProxyPlane.GetComponent<MeshRenderer>().shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
|
||||
Material clipMaterial = new Material(Shader.Find("Oculus/OVRMRClipPlane"));
|
||||
cameraProxyPlane.GetComponent<MeshRenderer>().material = clipMaterial;
|
||||
clipMaterial.SetColor("_Color", Color.clear);
|
||||
clipMaterial.SetFloat("_Visible", 0.0f);
|
||||
cameraProxyPlane.transform.localScale = new Vector3(1000, 1000, 1000);
|
||||
cameraProxyPlane.SetActive(true);
|
||||
OVRMRForegroundCameraManager foregroundCameraManager = fgCamera.gameObject.AddComponent<OVRMRForegroundCameraManager>();
|
||||
foregroundCameraManager.clipPlaneGameObj = cameraProxyPlane;
|
||||
|
||||
GameObject compositionCameraObject = new GameObject("MRSandwichCaptureCamera");
|
||||
compositionCameraObject.transform.parent = parentObject.transform;
|
||||
compositionCamera = compositionCameraObject.AddComponent<Camera>();
|
||||
compositionCamera.stereoTargetEye = StereoTargetEyeMask.None;
|
||||
compositionCamera.depth = float.MaxValue;
|
||||
compositionCamera.rect = new Rect(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
compositionCamera.clearFlags = CameraClearFlags.Depth;
|
||||
compositionCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
compositionCamera.cullingMask = 1 << cameraFramePlaneLayer;
|
||||
compositionCamera.nearClipPlane = mainCamera.nearClipPlane;
|
||||
compositionCamera.farClipPlane = mainCamera.farClipPlane;
|
||||
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
Debug.LogError("Unable to open camera device " + cameraDevice);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.Log("SandwichComposition activated : useDynamicLighting " + (useDynamicLighting ? "ON" : "OFF"));
|
||||
CreateCameraFramePlaneObject(parentObject, compositionCamera, useDynamicLighting);
|
||||
cameraFramePlaneObject.layer = cameraFramePlaneLayer;
|
||||
RefreshRenderTextures(mainCamera);
|
||||
compositionManager = compositionCamera.gameObject.AddComponent<OVRSandwichCompositionManager>();
|
||||
compositionManager.fgTexture = historyRecordArray[historyRecordCursorIndex].fgRenderTexture;
|
||||
compositionManager.bgTexture = historyRecordArray[historyRecordCursorIndex].bgRenderTexture;
|
||||
}
|
||||
}
|
||||
|
||||
public override void Update(Camera mainCamera)
|
||||
{
|
||||
if (!hasCameraDeviceOpened)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
frameRealtime = Time.realtimeSinceStartup;
|
||||
|
||||
++historyRecordCursorIndex;
|
||||
if (historyRecordCursorIndex >= historyRecordCount)
|
||||
{
|
||||
historyRecordCursorIndex = 0;
|
||||
}
|
||||
|
||||
if (!OVRPlugin.SetHandNodePoseStateLatency(OVRManager.instance.handPoseStateLatency))
|
||||
{
|
||||
Debug.LogWarning("HandPoseStateLatency is invalid. Expect a value between 0.0 to 0.5, get " + OVRManager.instance.handPoseStateLatency);
|
||||
}
|
||||
|
||||
RefreshRenderTextures(mainCamera);
|
||||
|
||||
bgCamera.clearFlags = mainCamera.clearFlags;
|
||||
bgCamera.backgroundColor = mainCamera.backgroundColor;
|
||||
bgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
|
||||
fgCamera.cullingMask = mainCamera.cullingMask & (~OVRManager.instance.extraHiddenLayers);
|
||||
|
||||
if (OVRMixedReality.useFakeExternalCamera || OVRPlugin.GetExternalCameraCount() == 0)
|
||||
{
|
||||
OVRPose worldSpacePose = new OVRPose();
|
||||
OVRPose trackingSpacePose = new OVRPose();
|
||||
trackingSpacePose.position = OVRMixedReality.fakeCameraPositon;
|
||||
trackingSpacePose.orientation = OVRMixedReality.fakeCameraRotation;
|
||||
worldSpacePose = OVRExtensions.ToWorldSpacePose(trackingSpacePose);
|
||||
|
||||
RefreshCameraPoses(OVRMixedReality.fakeCameraFov, OVRMixedReality.fakeCameraAspect, worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
OVRPlugin.CameraExtrinsics extrinsics;
|
||||
OVRPlugin.CameraIntrinsics intrinsics;
|
||||
|
||||
// So far, only support 1 camera for MR and always use camera index 0
|
||||
if (OVRPlugin.GetMixedRealityCameraInfo(0, out extrinsics, out intrinsics))
|
||||
{
|
||||
OVRPose worldSpacePose = ComputeCameraWorldSpacePose(extrinsics);
|
||||
|
||||
float fovY = Mathf.Atan(intrinsics.FOVPort.UpTan) * Mathf.Rad2Deg * 2;
|
||||
float aspect = intrinsics.FOVPort.LeftTan / intrinsics.FOVPort.UpTan;
|
||||
|
||||
RefreshCameraPoses(fovY, aspect, worldSpacePose);
|
||||
}
|
||||
else
|
||||
{
|
||||
Debug.LogWarning("Failed to get external camera information");
|
||||
}
|
||||
}
|
||||
|
||||
compositionCamera.GetComponent<OVRCameraFrameCompositionManager>().boundaryMeshMaskTexture = historyRecordArray[historyRecordCursorIndex].boundaryMeshMaskTexture;
|
||||
HistoryRecord record = GetHistoryRecordForComposition();
|
||||
UpdateCameraFramePlaneObject(mainCamera, compositionCamera, record.boundaryMeshMaskTexture);
|
||||
OVRSandwichCompositionManager compositionManager = compositionCamera.gameObject.GetComponent<OVRSandwichCompositionManager>();
|
||||
compositionManager.fgTexture = record.fgRenderTexture;
|
||||
compositionManager.bgTexture = record.bgRenderTexture;
|
||||
|
||||
cameraProxyPlane.transform.position = fgCamera.transform.position + fgCamera.transform.forward * cameraFramePlaneDistance;
|
||||
cameraProxyPlane.transform.LookAt(cameraProxyPlane.transform.position + fgCamera.transform.forward);
|
||||
}
|
||||
|
||||
public override void Cleanup()
|
||||
{
|
||||
base.Cleanup();
|
||||
|
||||
Camera[] cameras = { fgCamera, bgCamera, compositionCamera };
|
||||
|
||||
foreach (Camera c in cameras)
|
||||
{
|
||||
OVRCompositionUtil.SafeDestroy(c.gameObject);
|
||||
}
|
||||
|
||||
fgCamera = null;
|
||||
bgCamera = null;
|
||||
compositionCamera = null;
|
||||
|
||||
Debug.Log("SandwichComposition deactivated");
|
||||
}
|
||||
|
||||
private RenderTextureFormat DesiredRenderTextureFormat(RenderTextureFormat originalFormat)
|
||||
{
|
||||
if (originalFormat == RenderTextureFormat.RGB565)
|
||||
{
|
||||
return RenderTextureFormat.ARGB1555;
|
||||
}
|
||||
else if (originalFormat == RenderTextureFormat.RGB111110Float)
|
||||
{
|
||||
return RenderTextureFormat.ARGBHalf;
|
||||
}
|
||||
else
|
||||
{
|
||||
return originalFormat;
|
||||
}
|
||||
}
|
||||
|
||||
protected void RefreshRenderTextures(Camera mainCamera)
|
||||
{
|
||||
int width = Screen.width;
|
||||
int height = Screen.height;
|
||||
RenderTextureFormat format = mainCamera.targetTexture ? DesiredRenderTextureFormat(mainCamera.targetTexture.format) : RenderTextureFormat.ARGB32;
|
||||
int depth = mainCamera.targetTexture ? mainCamera.targetTexture.depth : 24;
|
||||
|
||||
Debug.Assert(fgCamera != null && bgCamera != null);
|
||||
|
||||
HistoryRecord record = historyRecordArray[historyRecordCursorIndex];
|
||||
|
||||
record.timestamp = frameRealtime;
|
||||
|
||||
if (record.fgRenderTexture == null || record.fgRenderTexture.width != width || record.fgRenderTexture.height != height || record.fgRenderTexture.format != format || record.fgRenderTexture.depth != depth)
|
||||
{
|
||||
record.fgRenderTexture = new RenderTexture(width, height, depth, format);
|
||||
record.fgRenderTexture.name = "Sandwich FG " + historyRecordCursorIndex.ToString();
|
||||
}
|
||||
fgCamera.targetTexture = record.fgRenderTexture;
|
||||
|
||||
if (record.bgRenderTexture == null || record.bgRenderTexture.width != width || record.bgRenderTexture.height != height || record.bgRenderTexture.format != format || record.bgRenderTexture.depth != depth)
|
||||
{
|
||||
record.bgRenderTexture = new RenderTexture(width, height, depth, format);
|
||||
record.bgRenderTexture.name = "Sandwich BG " + historyRecordCursorIndex.ToString();
|
||||
}
|
||||
bgCamera.targetTexture = record.bgRenderTexture;
|
||||
|
||||
if (OVRManager.instance.virtualGreenScreenType != OVRManager.VirtualGreenScreenType.Off)
|
||||
{
|
||||
if (record.boundaryMeshMaskTexture == null || record.boundaryMeshMaskTexture.width != width || record.boundaryMeshMaskTexture.height != height)
|
||||
{
|
||||
record.boundaryMeshMaskTexture = new RenderTexture(width, height, 0, RenderTextureFormat.R8);
|
||||
record.boundaryMeshMaskTexture.name = "Boundary Mask " + historyRecordCursorIndex.ToString();
|
||||
record.boundaryMeshMaskTexture.Create();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
record.boundaryMeshMaskTexture = null;
|
||||
}
|
||||
|
||||
Debug.Assert(fgCamera.targetTexture != null && bgCamera.targetTexture != null && (OVRManager.instance.virtualGreenScreenType == OVRManager.VirtualGreenScreenType.Off || record.boundaryMeshMaskTexture != null));
|
||||
}
|
||||
|
||||
protected HistoryRecord GetHistoryRecordForComposition()
|
||||
{
|
||||
float expectedTime = frameRealtime - OVRManager.instance.sandwichCompositionRenderLatency;
|
||||
int currIndex = historyRecordCursorIndex;
|
||||
int prevIndex = currIndex - 1;
|
||||
if (prevIndex < 0)
|
||||
{
|
||||
prevIndex = historyRecordCount - 1;
|
||||
}
|
||||
while (prevIndex != historyRecordCursorIndex)
|
||||
{
|
||||
if (historyRecordArray[prevIndex].timestamp <= expectedTime)
|
||||
{
|
||||
float timeToCurrIndex = historyRecordArray[currIndex].timestamp - expectedTime;
|
||||
float timeToPrevIndex = expectedTime - historyRecordArray[prevIndex].timestamp;
|
||||
return timeToCurrIndex <= timeToPrevIndex ? historyRecordArray[currIndex] : historyRecordArray[prevIndex];
|
||||
}
|
||||
currIndex = prevIndex;
|
||||
prevIndex = currIndex - 1;
|
||||
if (prevIndex < 0) prevIndex = historyRecordCount - 1;
|
||||
}
|
||||
// return the earliest frame
|
||||
return historyRecordArray[currIndex];
|
||||
}
|
||||
|
||||
protected void RefreshCameraPoses(float fovY, float aspect, OVRPose pose)
|
||||
{
|
||||
Camera[] cameras = { fgCamera, bgCamera, compositionCamera };
|
||||
foreach (Camera c in cameras)
|
||||
{
|
||||
c.fieldOfView = fovY;
|
||||
c.aspect = aspect;
|
||||
c.transform.FromOVRPose(pose);
|
||||
}
|
||||
}
|
||||
|
||||
public class OVRSandwichCompositionManager : MonoBehaviour
|
||||
{
|
||||
public RenderTexture fgTexture;
|
||||
public RenderTexture bgTexture;
|
||||
public Material alphaBlendMaterial;
|
||||
|
||||
void Start()
|
||||
{
|
||||
Shader alphaBlendShader = Shader.Find("Oculus/UnlitTransparent");
|
||||
if (alphaBlendShader == null)
|
||||
{
|
||||
Debug.LogError("Unable to create transparent shader");
|
||||
return;
|
||||
}
|
||||
alphaBlendMaterial = new Material(alphaBlendShader);
|
||||
}
|
||||
|
||||
private void OnPreRender()
|
||||
{
|
||||
if (fgTexture == null || bgTexture == null || alphaBlendMaterial == null)
|
||||
{
|
||||
Debug.LogError("OVRSandwichCompositionManager has not setup properly");
|
||||
return;
|
||||
}
|
||||
|
||||
Graphics.Blit(bgTexture, RenderTexture.active);
|
||||
}
|
||||
|
||||
void OnPostRender()
|
||||
{
|
||||
if (fgTexture == null || bgTexture == null || alphaBlendMaterial == null)
|
||||
{
|
||||
Debug.LogError("OVRSandwichCompositionManager has not setup properly");
|
||||
return;
|
||||
}
|
||||
|
||||
Graphics.Blit(fgTexture, RenderTexture.active, alphaBlendMaterial);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -0,0 +1,12 @@
|
||||
fileFormatVersion: 2
|
||||
guid: 3c02efcdd3fb2aa4e9c641b0c2a54b9a
|
||||
timeCreated: 1502990248
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
||||
Reference in New Issue
Block a user