Visualizing the VR tracking space is useful when designing an experience to know the player is going to be in a reliable tracking location.

Unity 5 VR doesn’t yet have a way to access the tracking space. I assume that is coming soon. For now though, you can access it using the new Oculus Utilities for Unity.

I decided to write my own so that I could keep it very simple (not needing any external meshes) and later change it to not need the Oculus Utilities dependancy.

My code uses the line renderer to draw the pyramid view cone. The pyramid can get determined by the trackers pose and frustrum.

The mainĀ challenge is the Oculus API expose a tracker.GetPose and tracker.frustrum calls which give you a view cone relative to the HMD position, but there is no way to know exactly when these change, so you have to constantly monitor itĀ and when it changes hope that you caught it at the same moment you record the position of the HMD. I’ve seen some code that only updates every few seconds, and this is wrong, because it means your view cone could be way out.

To use it with OVR Camera Rig:

Create a gameobject under your “Tracking Space” and attach this script. Drag the center eye camera on to the VR Camera param.

To use it with Unity 5 VR Camera:

Lets say with FirstPersonController: create a new game object named “Tracking Space” under the top level FPSController and move the FirstPersonCharacter as it’s child. Then as above, make a game object under the Tracking Space and attach this script. Drag the FirstPersonCharacter camera onto the VR Param. The “Tracking Space” lets you make sure the children are at the same X,Y,Z position.

Note: In both cases, the game object with the View Cone should be set to the same transform values as on the VR Camera so they are matching at start.

Here is the code:

using UnityEngine;

/*
 * Draw the IR Camera view Cone with a line renderer
 * By Peter Koch <peterept@gmail.com>
 * Use:  'I' key: Hide/Show view cone
 * Note: Requires OVRUtilities to access the tracker pose and frustrum:
 *       Go to https://developer.oculus.com/downloads and import "Oculus Utilities for Unity" 
 */
public class VRViewCone : MonoBehaviour 
{
    public bool Show = false;
    public Transform VRCamera;
    private Vector3 TrackerResetPosition;

    void Update () 
    {
        HandleInput();

        if ((OVRManager.tracker.isPresent && OVRManager.tracker.isEnabled))
        {
            OVRPose ss = OVRManager.tracker.GetPose(0f);

            // Handle when the IR camera reports a spatial change
            if (ss.position != TrackerResetPosition)
            {
                TrackerResetPosition = ss.position;
                OVRTracker.Frustum ff = OVRManager.tracker.frustum;
                UpdateViewCone(TrackerResetPosition, ss.orientation, ff.fov, ff.nearZ, ff.farZ);
            }

            ShowViewCone(Show);
        }
    }

    void HandleInput()
    {
        if (Input.GetKeyDown(KeyCode.I))
        {
            Show = !Show;
        }
    }

    // Show or Hide our view cone 
    void ShowViewCone(bool show)
    {
        LineRenderer lr = gameObject.GetComponent<LineRenderer>();
        if (lr != null)
        {
            lr.enabled = show;
        }
    }

    // Update (create if necessary) our view cone 
    void UpdateViewCone(Vector3 trackerPosition, Quaternion trackerOrientation, Vector2 fov, float nearZ, float farZ)
    {
        LineRenderer lr = gameObject.GetComponent<LineRenderer>();
        if (lr == null)
        {
            lr = gameObject.AddComponent<LineRenderer>();
            lr.useWorldSpace = false;
            lr.SetWidth(0.001f, 0.001f);
            lr.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
            lr.receiveShadows = false;
            lr.material = new Material(Shader.Find("UI/Default"));
            lr.material.color = Color.red;
        }
        
        float z = farZ - nearZ;
        Vector3 x = trackerOrientation * Vector3.left * (z * Mathf.Sin(Mathf.Deg2Rad * fov.x/2));
        Vector3 y = trackerOrientation * Vector3.up * (z * Mathf.Sin(Mathf.Deg2Rad * fov.y/2));
        Vector3 farCenter = trackerOrientation * Vector3.forward * z;

        // Normalized View Cone co-ordinates
        Vector3[] points = new Vector3[] { 
            new Vector3(0,0,0),     // Apex
            new Vector3(-1,1,-1),     // Top-Left to
            new Vector3(1,1,-1),     // Top-Right
            new Vector3(0,0,0),     // Apex
            new Vector3(-1,-1,-1),     // Bottom-Left to
            new Vector3(1,-1,-1),    // Bottom-Right
            new Vector3(0,0,0),     // Apex
            new Vector3(-1,1,-1),     // Top-Left to
            new Vector3(-1,-1,-1),     // Bottom-Left
            new Vector3(1,-1,-1),     // Bottom-Right to
            new Vector3(1,1,-1)        // Top-Right
        };
        
        // Scale and position the Veiw Cone into world space
        lr.SetVertexCount(points.Length);
        int n = 0;
        foreach (Vector3 point in points)
        {
            lr.SetPosition(n++, trackerPosition + farCenter * -point.z + x * point.x + y * point.y);
        }
    }
}