using UnityEngine; using System; using System.Threading; using UnityEngine.VR; using System.Collections; using System.Collections.Generic; #if UNITY_EDITOR using UnityEditor; #endif /// /// The central script of the ZED Unity plugin, and the primary way a developer can interact with the camera. /// It sets up and closes connection to the ZED, adjusts parameters based on user settings, enables/disables/handles /// features like tracking, and holds numerous useful properties, methods, and callbacks. /// /// /// ZEDManager is attached to the root objects in the ZED_Rig_Mono and ZED_Rig_Stereo prefabs. /// If using ZED_Rig_Stereo, it will set isStereoRig to true, which triggers several behaviors unique to stereo pass-through AR. /// public class ZEDManager : MonoBehaviour { /// /// Static function to get instance of the ZEDManager with a given camera_ID. See sl.ZED_CAMERA_ID for the available choices. /// public static object grabLock; static ZEDManager[] ZEDManagerInstance = null; public static ZEDManager GetInstance(sl.ZED_CAMERA_ID _id) { if (ZEDManagerInstance == null) return null; else return ZEDManagerInstance[(int)_id]; } /// /// Static function to get all ZEDManagers that have been properly instantiated. /// Cameras may not necessarily be connected, if they haven't finished connecting, have disconnected, /// or if no camera is available. /// /// public static List GetInstances() { List instances = new List(); for (int i = 0; i < (int)sl.Constant.MAX_CAMERA_PLUGIN; i++) { ZEDManager instance = GetInstance((sl.ZED_CAMERA_ID)i); if (instance != null) instances.Add(instance); } return instances; } /// /// For advanced debugging. Default false. Set true for the Unity wrapper to log all SDK calls to a new file /// at C:/ProgramData/stereolabs/SL_Unity_wrapper.txt. This helps find issues that may occur within /// the protected .dll, but can decrease performance. /// private bool wrapperVerbose = true; /// /// Current instance of the ZED Camera, which handles calls to the Unity wrapper .dll. /// public sl.ZEDCamera zedCamera = null; ///////////////////////////////////////////////////////////////////////// ///////////////////////// Camera Settings /////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// Resolution setting for all images retrieved from the camera. Higher resolution means lower framerate. /// HD720 is strongly recommended for pass-through AR. /// /// /// Camera ID /// [HideInInspector] public sl.ZED_CAMERA_ID cameraID = sl.ZED_CAMERA_ID.CAMERA_ID_01; /// /// The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance. /// Note there's a significant jump in performance cost between QUALITY and ULTRA modes. /// /*[Tooltip("The accuracy of depth calculations. Higher settings mean more accurate occlusion and lighting but costs performance.")]*/ [HideInInspector] public sl.DEPTH_MODE depthMode = sl.DEPTH_MODE.PERFORMANCE; /// /// Input Type in SDK (USB, SVO or Stream) /// [HideInInspector] public sl.INPUT_TYPE inputType = sl.INPUT_TYPE.INPUT_TYPE_USB; /// /// Camera Resolution /// [HideInInspector] public sl.RESOLUTION resolution = sl.RESOLUTION.HD720; /// /// Targeted FPS, based on the resolution. VGA = 100, HD720 = 60, HD1080 = 30, HD2K = 15. /// [HideInInspector] public int FPS = 60; /// /// SVO Input FileName /// [HideInInspector] public string svoInputFileName = ""; /// /// SVO loop back option /// [HideInInspector] public bool svoLoopBack = true; /// /// SVO loop back option /// [HideInInspector] public bool svoRealTimeMode = false; /// /// Current frame being read from the SVO. Doesn't apply when recording. /// [HideInInspector] [SerializeField] private int currentFrame = 0; /// /// Current frame being read from the SVO. Doesn't apply when recording. /// public int CurrentFrame { get { return currentFrame; } set { currentFrame = value; } } /// /// Total number of frames in a loaded SVO. /// [HideInInspector] [SerializeField] private int numberFrameMax = 0; /// /// Total number of frames in a loaded SVO. /// public int NumberFrameMax { set { numberFrameMax = value; } get { return numberFrameMax; } } [HideInInspector] [SerializeField] public bool pauseSVOReading = false; [HideInInspector] public bool pauseLiveReading = false; /// /// Ask a new frame is in pause (SVO only) /// [HideInInspector] public bool NeedNewFrameGrab = false; /// /// Streaming Input IP (v2.8) /// [HideInInspector] public string streamInputIP = "127.0.0.1"; /// /// Streaming Input Port (v2.8) /// [HideInInspector] public int streamInputPort = 30000; ///////////////////////////////////////////////////////////////////////// ///////////////////////// Motion Tracking /////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// If enabled, the ZED will move/rotate itself using its own inside-out tracking. /// If false, the camera tracking will move with the VR HMD if connected and available. /// Normally, ZEDManager's GameObject will move according to the tracking. But if in AR pass-through mode, /// then the Camera_eyes object in ZED_Rig_Stereo will move while this object stays still. /// [HideInInspector] public bool enableTracking = true; /// /// Enables the spatial memory. Will detect and correct tracking drift by remembering features and anchors in the environment, /// but may cause visible jumps when it happens. /// [HideInInspector] public bool enableSpatialMemory = true; /// /// If using Spatial Memory, you can specify a path to an existing .area file to start with some memory already loaded. /// .area files are created by scanning a scene with ZEDSpatialMappingManager and saving the scan. /// [HideInInspector] public string pathSpatialMemory; /// /// Estimate initial position by detecting the floor. /// [HideInInspector] public bool estimateInitialPosition = true; ///////////////////////////////////////////////////////////////////////// ///////////////////////// Spatial Mapping /////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// Resolution setting for the scan. A higher resolution creates more submeshes and uses more memory, but is more accurate. /// [HideInInspector] public ZEDSpatialMapping.RESOLUTION mappingResolutionPreset = ZEDSpatialMapping.RESOLUTION.MEDIUM; /// /// Maximum distance geometry can be from the camera to be scanned. Geometry scanned from farther away will be less accurate. /// [HideInInspector] public ZEDSpatialMapping.RANGE mappingRangePreset = ZEDSpatialMapping.RANGE.MEDIUM; /// /// Whether mesh filtering is needed. /// [HideInInspector] public bool isMappingFilteringEnable = false; /// /// Whether surface textures will be scanned and applied. Note that texturing will add further delay to the post-scan finalizing period. /// [HideInInspector] public bool isMappingTextured = false; /// /// Whether to save the mesh .obj and .area files once the scan is finished. /// [HideInInspector] public bool saveMeshWhenOver = false; /// /// Path to save the .obj and .area files. /// [HideInInspector] public string meshPath = "Assets/ZEDMesh.obj"; /// /// Filtering setting. More filtering results in fewer faces in the mesh, reducing both file size and accuracy. /// [HideInInspector] public sl.FILTER meshFilterParameters; /// /// Instance of the ZEDSpatialMapping class that handles the actual spatial mapping implementation within Unity. /// [HideInInspector] private ZEDSpatialMapping spatialMapping = null; public ZEDSpatialMapping GetSpatialMapping { get { return spatialMapping; } } /// /// Whether the spatial mapping is currently scanning. /// public bool IsMappingRunning { get { return spatialMapping != null ? spatialMapping.IsRunning() : false; } } /// /// List of the processed submeshes. This list isn't filled until StopSpatialMapping() is called. /// public List MappingChunkList { get { return spatialMapping != null ? spatialMapping.ChunkList : null; } } /// /// Whether the mesh update thread is running. /// public bool IsMappingUpdateThreadRunning { get { return spatialMapping != null ? spatialMapping.IsUpdateThreadRunning : false; } } /// /// Whether the spatial mapping was running but has been paused (not stopped) by the user. /// public bool IsMappingPaused { get { return spatialMapping != null ? spatialMapping.IsPaused : false; } } /// /// Whether the mesh is in the texturing stage of finalization. /// public bool IsMappingTexturingRunning { get { return spatialMapping != null ? spatialMapping.IsTexturingRunning : false; } } /// /// Gets a value indicating whether the spatial mapping display is enabled. /// public bool IsSpatialMappingDisplay { get { return spatialMapping != null ? spatialMapping.display : false; } } ///////////////////////////////////////////////////////////////////////// ///////////////////////////// Rendering /////////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// Rendering paths available to the ZED with the corresponding Unity rendering path. /// public enum ZEDRenderingMode { FORWARD = RenderingPath.Forward, DEFERRED = RenderingPath.DeferredShading }; /// /// When enabled, the real world can occlude (cover up) virtual objects that are behind it. /// Otherwise, virtual objects will appear in front. /// [HideInInspector] public bool depthOcclusion = true; /// /// Enables post-processing effects on virtual objects that blends them in with the real world. /// [HideInInspector] public bool postProcessing = true; /// /// Field version of CameraBrightness property. /// [SerializeField] [HideInInspector] private int m_cameraBrightness = 100; /// Brightness of the final real-world image. Default is 100. Lower to darken the environment in a realistic-looking way. /// This is a rendering setting that doesn't affect the raw input from the camera. /// public int CameraBrightness { get { return m_cameraBrightness; } set { if (m_cameraBrightness == value) return; m_cameraBrightness = value; if (OnCamBrightnessChange != null) OnCamBrightnessChange(m_cameraBrightness); } } /// /// Field version of MaxDepthRange property. /// [SerializeField] [HideInInspector] private float m_maxDepthRange = 20f; /// /// Maximum depth at which the camera will display the real world, in meters. Pixels further than this value will be invisible. /// public float MaxDepthRange { get { return m_maxDepthRange; } set { if (m_maxDepthRange == value) return; m_maxDepthRange = value; if (OnMaxDepthChange != null) OnMaxDepthChange(m_maxDepthRange); } } ///////////////////////////////////////////////////////////////////////// ///////////////////////// Recording Module ////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// SVO Output file name /// [HideInInspector] public string svoOutputFileName = "Assets/Recording.svo"; /// /// SVO Compression mode used for recording /// [HideInInspector] public sl.SVO_COMPRESSION_MODE svoOutputCompressionMode = sl.SVO_COMPRESSION_MODE.AVCHD_BASED; /// /// Indicates if frame must be recorded /// [HideInInspector] public bool needRecordFrame = false; ///////////////////////////////////////////////////////////////////////// ///////////////////////// Streaming Module ////////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// Enable/Disable Streaming module /// [HideInInspector] public bool enableStreaming = false; /// /// Status of streaming request /// private bool isStreamingEnable = false; /// /// Codec used for Streaming /// [HideInInspector] public sl.STREAMING_CODEC streamingCodec = sl.STREAMING_CODEC.AVCHD_BASED; /// /// port used for Streaming /// [HideInInspector] public int streamingPort = 30000; /// /// bitrate used for Streaming /// [HideInInspector] public int bitrate = 8000; /// /// gop size used for Streaming /// [HideInInspector] public int gopSize = -1; /// /// Enable/Disable adaptative bitrate /// [HideInInspector] public bool adaptativeBitrate = false; ///////////////////////////////////////////////////////////////////////// ///////////////////////// Advanced control ///////////////////////////// ///////////////////////////////////////////////////////////////////////// /// /// /// True to make the ZED image fade from black when the application starts. /// [HideInInspector] public bool fadeInOnStart = true; /// /// True to apply DontDestroyOnLoad() on the ZED rig in Awake(), preserving it between scenes. /// [HideInInspector] public bool dontDestroyOnLoad = false; /// /// Grey Out Skybox on Start", "True to set the background to a neutral gray when the scene starts. /// Recommended for AR so that lighting on virtual objects better matches the real world. /// [HideInInspector] public bool greySkybox = true; /// /// Field version of confidenceThreshold property. /// [SerializeField] [HideInInspector] private int m_confidenceThreshold = 100; /// /// How tolerant the ZED SDK is to low confidence values. Lower values filter more pixels. /// public int confidenceThreshold { get { return m_confidenceThreshold; } set { if (value == m_confidenceThreshold) return; m_confidenceThreshold = Mathf.RoundToInt(Mathf.Clamp(value, 0, 100)); if (Application.isPlaying && zedReady) { zedCamera.SetConfidenceThreshold(m_confidenceThreshold); } } } /// /// Delegate for OnCamBrightnessChange, which is used to update shader properties when the brightness setting changes. /// public delegate void onCamBrightnessChangeDelegate(int newVal); /// /// Event fired when the camera brightness setting is changed. Used to update shader properties. /// public event onCamBrightnessChangeDelegate OnCamBrightnessChange; /// /// Delegate for OnCamBrightnessChange, which is used to update shader properties when the max depth setting changes. /// public delegate void onMaxDepthChangeDelegate(float newVal); /// /// Event fired when the max depth setting is changed. Used to update shader properties. /// public event onMaxDepthChangeDelegate OnMaxDepthChange; /// /// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output. /// [SerializeField] [HideInInspector] private bool showarrig = false; /// /// Whether to show the hidden camera rig used in stereo AR mode to prepare images for HMD output. /// This is rarely needed, but can be useful for understanding how the ZED output works. /// public bool showARRig { get { return showarrig; } set { if (Application.isPlaying && showarrig != value && zedRigDisplayer != null) { zedRigDisplayer.hideFlags = value ? HideFlags.None : HideFlags.HideInHierarchy; } showarrig = value; } } private float maxdepthrange = 20f; public float maxDepthRange { get { return maxdepthrange; } set { maxdepthrange = Mathf.Clamp(value, 0, 20); if (Application.isPlaying) { setRenderingSettings(); } } } ///////////////////////////////////////////////////////////////////////// ///////////////////////// Status Report ///////////////////////////////// ///////////////////////////////////////////////////////////////////////// //Strings used for the Status display in the Inspector. [Header("Status")] /// /// The camera model (ZED or ZED-M). /// [ReadOnly("Camera S/N")] [HideInInspector] public string cameraModel = "-"; /// /// The camera serial number. /// [ReadOnly("Camera S/N")] [HideInInspector] public string cameraSerialNumber = "-"; /// /// The camera firmware version /// [ReadOnly("Camera Firmware")] [HideInInspector] public string cameraFirmware = "-"; /// /// Version of the installed ZED SDK, for display in the Inspector. /// [ReadOnly("Version")] [HideInInspector] public string versionZED = "-"; /// /// How many frames per second the engine is rendering, for display in the Inspector. /// [ReadOnly("Engine FPS")] [HideInInspector] public string engineFPS = "-"; /// /// How many images per second are received from the ZED, for display in the Inspector. /// [ReadOnly("Camera FPS")] [HideInInspector] public string cameraFPS = "-"; /// /// The connected VR headset, if any, for display in the Inspector. /// [ReadOnly("HMD Device")] [HideInInspector] public string HMDDevice = "-"; /// /// Whether the ZED's tracking is on, off, or searching (lost position, trying to recover) for display in the Inspector. /// [ReadOnly("Tracking State")] [HideInInspector] public string trackingState = "-"; //////////////////////////// //////// Private /////////// //////////////////////////// /// /// Initialization parameters used to start the ZED. Holds settings that can't be changed at runtime /// (resolution, depth mode, .SVO path, etc.). /// private sl.InitParameters initParameters; /// /// Runtime parameters used to grab a new image. Settings can change each frame, but are lower level /// (sensing mode, point cloud, if depth is enabled, etc.). /// private sl.RuntimeParameters runtimeParameters; /// /// Enables the ZED SDK's depth stabilizer, which improves depth accuracy and stability. There's rarely a reason to disable this. /// private bool depthStabilizer = true; /// /// Disable the IMU of the ZED-M /// private bool cameraDisableIMU = false; /// /// Set the camera in Flip mode /// private bool cameraFlipMode = false; /// /// Whether the camera is currently being tracked using the ZED's inside-out tracking. /// private bool isZEDTracked = false; /// /// Whether the ZED's inside-out tracking has been activated. /// private bool isTrackingEnable = false; /// /// Whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking). /// private bool isCameraTracked = false; /// /// Public accessor for whether the camera is tracked in any way (ZED's tracking or a VR headset's tracking). /// public bool IsCameraTracked { get { return isCameraTracked; } } /// /// Whether the camera has a new frame available. /// private bool isNewFrameGrabbed = false; /// /// Public accessor for whether the camera has a new frame available. /// public bool IsNewFrameGrabbed { get { return isNewFrameGrabbed; } } /// /// Orientation last returned by the ZED's tracking. /// private Quaternion zedOrientation = Quaternion.identity; /// /// Position last returned by the ZED's tracking. /// private Vector3 zedPosition = Vector3.zero; /// /// Position of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR. /// private Vector3 initialPosition = new Vector3(); /// /// Orientation of the camera (zedRigRoot) when the scene starts. Not used in Stereo AR. /// private Quaternion initialRotation = Quaternion.identity; /// /// Sensing mode: STANDARD or FILL. FILL corrects for missing depth values. /// Almost always better to use FILL, since we need depth without holes for proper occlusion. /// [SerializeField] [HideInInspector] public sl.SENSING_MODE sensingMode = sl.SENSING_MODE.FILL; /// /// Rotation offset used to retrieve the tracking with a rotational offset. /// private Quaternion rotationOffset; /// /// Position offset used to retrieve the tracking with a positional offset. /// private Vector3 positionOffset; /// /// Enables pose smoothing during drift correction. Leave it to true. /// private bool enablePoseSmoothing = true; /// /// The engine FPS, updated every frame. /// private float fps_engine = 90.0f; /// /// Recording state /// private bool isRecording = false; /////////////////////////////////////// /////////// Static States ///////////// /////////////////////////////////////// /// /// Whether AR mode is activated. /// private bool isStereoRig = false; /// /// Whether AR mode is activated. Assigned by ZEDManager.CheckStereoMode() in Awake(). /// Will be true if the ZED_Rig_Stereo prefab (or a similarly-structured prefab) is used. /// public bool IsStereoRig { get { return isStereoRig; } } /// /// Checks if the ZED has finished initializing. /// private bool zedReady = false; /// /// Checks if the ZED has finished initializing. /// public bool IsZEDReady { get { return zedReady; } } /// /// Flag set to true if the camera was connected and the wasn't anymore. /// Causes ZEDDisconnected() to be called each frame, which attemps to restart it. /// private bool isDisconnected = false; /// /// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift. /// private sl.TRACKING_STATE zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF; /// /// Current state of tracking: On, Off, or Searching (lost tracking, trying to recover). Used by anti-drift. /// public sl.TRACKING_STATE ZEDTrackingState { get { return zedtrackingState; } } /// /// First position registered after the tracking has started (whether via ZED or a VR HMD). /// public Vector3 OriginPosition { get; private set; } /// /// First rotation/orientation registered after the tracking has started (whether via ZED or a VR HMD). /// public Quaternion OriginRotation { get; private set; } /// /// In AR pass-through mode, whether to compare the ZED's IMU data against the reported position of /// the VR headset. This helps compensate for drift and should usually be left on. /// However, in some setups, like when using a custom mount, this can cause tracking errors. /// /// Read more about the potential errors here: https://support.stereolabs.com/hc/en-us/articles/360026482413 /// public bool setIMUPriorInAR = true; /////////////////////////////////////////////////// [HideInInspector] public Quaternion gravityRotation = Quaternion.identity; [HideInInspector] public Vector3 ZEDSyncPosition; [HideInInspector] public Vector3 HMDSyncPosition; [HideInInspector] public Quaternion ZEDSyncRotation; [HideInInspector] public Quaternion HMDSyncRotation; /// /// Image acquisition thread. /// private Thread threadGrab = null; /// /// State of the image acquisition thread. /// private bool running = false; /// /// Initialization thread. /// private Thread threadOpening = null; /// /// Result of the latest attempt to initialize the ZED. /// private sl.ERROR_CODE lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; public sl.ERROR_CODE LastInitStatus { get { return lastInitStatus; } } /// /// State of the ZED initialization thread. /// private bool openingLaunched; /// /// Wait Handle used to safely tell the init thread to shut down. /// EventWaitHandle initQuittingHandle; /// /// When true, the init thread will close early instead of completing all its connection attempts. /// Set to true when the application is closed before a camera finishes its initialization. /// private bool forceCloseInit = false; /// /// Tracking initialization thread. Used as the tracking takes some time to start. /// private Thread trackerThread = null; /////////////////////////////////////////// ////// Camera and Player Transforms ////// /////////////////////////////////////////// /// /// Transform of the left camera in the ZED rig. /// private Transform camLeftTransform = null; /// /// Transform of the right camera in the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo). /// private Transform camRightTransform = null; /// /// Contains the position of the player's head, which is different from the ZED's position in AR mode. /// But its position relative to the ZED does not change during use (it's a rigid transform). /// In ZED_Rig_Mono, this will be the root ZED_Rig_Mono object. In ZED_Rig_Stereo, this is Camera_eyes. /// private Transform zedRigRoot = null; /// /// Left camera in the ZED rig. Also the "main" camera if in ZED_Rig_Mono. /// private Camera cameraLeft; /// /// Right camera of the ZED rig. Only exists in a stereo rig (like ZED_Rig_Stereo). /// private Camera cameraRight; /// /// Gets the center transform, which is the transform moved by the tracker in AR mode. /// This is the root object in ZED_Rig_Mono, and Camera_eyes in ZED_Rig_Stereo. /// public Transform GetZedRootTansform() { return zedRigRoot; } /// /// Returns the left ZED camera transform. If there is no left camera but there is a right camera, /// returns the right camera transform instead. /// /// public Transform GetMainCameraTransform() { if (camLeftTransform) return camLeftTransform; else if (camRightTransform) return camRightTransform; else return null; } /// /// Gets the left camera transform in the ZED rig. It's best to use this one as it's available in all configurations. /// public Transform GetLeftCameraTransform() { return camLeftTransform; } /// /// Get the right camera transform in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo). /// public Transform GetRightCameraTransform() { return camRightTransform; } /// /// Returns the left ZED camera. If there is no left camera but there is a right camera, /// returns the right camera instead. /// /// public Camera GetMainCamera() { if (cameraLeft) return cameraLeft; else if (cameraRight) return cameraRight; else return null; } /// /// Gets the left camera in the ZED rig. Both ZED_Rig_Mono and ZED_Rig_Stereo have a left camera by default. /// public Camera GetLeftCamera() { if (cameraLeft == null && camLeftTransform != null) cameraLeft = camLeftTransform.GetComponent(); return cameraLeft; } /// /// Get the right camera in the ZED rig. Only available in the stereo rig (ZED_Rig_Stereo) unless configured otherwise. /// public Camera GetRightCamera() { if (cameraRight == null && camRightTransform != null) cameraRight = camRightTransform.GetComponent(); return cameraRight; } /// /// Save the foldout options as it was used last time /// [SerializeField] [HideInInspector] private bool advancedPanelOpen = false; [SerializeField] [HideInInspector] private bool spatialMappingFoldoutOpen = false; [SerializeField] [HideInInspector] private bool recordingFoldoutOpen = false; [SerializeField] [HideInInspector] private bool streamingOutFoldoutOpen = false; [SerializeField] [HideInInspector] private bool camControlFoldoutOpen = false; ///////////////////////////////////// ////// Timestamps ////// ///////////////////////////////////// /// /// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet. /// private ulong cameraTimeStamp = 0; /// /// Timestamp of the last ZED image grabbed. Textures from this grab may not have updated yet. /// public ulong CameraTimeStamp { get { return cameraTimeStamp; } } /// /// Timestamp of the images used to create the current textures. /// private ulong imageTimeStamp = 0; /// /// Timestamp of the images used to create the current textures. /// public ulong ImageTimeStamp { get { return imageTimeStamp; } } /// /// Whether the grabbing thread should grab a new frame from the ZED SDK. /// True unless the last grabbed frame hasn't been applied yet, or the ZED isn't initialized. /// private bool requestNewFrame = false; /// /// Whether a new frame has been grabbed from the ZED SDK that needs to be updated. /// private bool newFrameAvailable = false; ///////////////////////////////////// ////// Layers for ZED ////// ///////////////////////////////////// /// /// Layer assigned to the cameras and objects of a (normally hidden) AR camera rig created to handle /// pass-through AR. This allows the cameras to see nothing but two canvas objects with the final MR images. /// [HideInInspector] public int arLayer { get { return arlayer; } } [SerializeField] [HideInInspector] private int arlayer = 30; ///////////////////////////////////// ////// ZED specific events ////// ///////////////////////////////////// /// /// Delegate for OnZEDReady. /// public delegate void OnZEDManagerReady(); /// /// Called when the ZED has finished initializing successfully. /// Used by many scripts to run startup logic that requires that the ZED is active. /// public event OnZEDManagerReady OnZEDReady; /// /// Delegate for OnZEDDisconnected. /// public delegate void OnZEDManagerDisconnected(); /// /// Event called when ZED was running but became disconnected. /// public event OnZEDManagerDisconnected OnZEDDisconnected; /// /// Delegate for new Frame grabbed for external module update /// public delegate void OnGrabAction(); /// /// Event called when ZED has grabbed a new frame. /// public event OnGrabAction OnGrab; #region CHECK_AR /// /// Checks if this GameObject is a stereo rig. Requires a child object called 'Camera_eyes' and /// two cameras as children of that object, one with stereoTargetEye set to Left, the other two Right. /// Regardless, sets references to leftCamera and (if relevant) rightCamera. /// private void CheckStereoMode() { zedRigRoot = gameObject.transform; //The object moved by tracking. By default it's this Transform. May get changed. bool devicePresent = UnityEngine.XR.XRDevice.isPresent; //May not need. //Set first left eye Component[] cams = gameObject.GetComponentsInChildren(); //Camera firstmonocam = null; List monocams = new List(); foreach (Camera cam in cams) { switch (cam.stereoTargetEye) { case StereoTargetEyeMask.Left: if (!cameraLeft) { cameraLeft = cam; camLeftTransform = cam.transform; } break; case StereoTargetEyeMask.Right: if (!cameraRight) { cameraRight = cam; camRightTransform = cam.transform; } break; case StereoTargetEyeMask.None: monocams.Add(cam); break; case StereoTargetEyeMask.Both: default: break; } } //If the left camera or right camera haven't been assigned via stereo target eyes, search the monocams //based on their ZEDRenderingPlane assignments. //This won't affect whether the rig is in stereo mode, but allows the cameras to be accessed via GetLeftCamera() and GetRightCamera(). if (cameraLeft == null || cameraRight == null) { foreach (Camera cam in monocams) { ZEDRenderingPlane rendplane = cam.gameObject.GetComponent(); if (!rendplane) continue; if (!cameraLeft && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT || rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.LEFT_FORCE)) { cameraLeft = cam; camLeftTransform = cam.transform; } else if (!cameraRight && (rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT || rendplane.viewSide == ZEDRenderingPlane.ZED_CAMERA_SIDE.RIGHT_FORCE)) { cameraRight = cam; camRightTransform = cam.transform; } } } if (camLeftTransform && camRightTransform && cameraLeft.stereoTargetEye == StereoTargetEyeMask.Left) //We found both a left- and right-eye camera. { isStereoRig = UnityEngine.XR.XRDevice.isPresent; if (camLeftTransform.transform.parent != null) { zedRigRoot = camLeftTransform.parent; //Make the camera's parent object (Camera_eyes in the ZED_Rig_Stereo prefab) the new zedRigRoot to be tracked. Debug.Log("set camLeftTransform.position"); } if (UnityEngine.XR.XRDevice.isPresent) { isStereoRig = true; } else { isStereoRig = false; //If there's no VR headset, then cameras set to Left and Right won't display in Unity. Set them both to None. if (cameraLeft) cameraLeft.stereoTargetEye = StereoTargetEyeMask.None; if (cameraRight) cameraRight.stereoTargetEye = StereoTargetEyeMask.None; } } else //Not all conditions for a stereo rig were met. { isStereoRig = false; if (camLeftTransform) { Camera caml = camLeftTransform.gameObject.GetComponent(); cameraLeft = caml; if (camLeftTransform.transform.parent != null) { zedRigRoot = camLeftTransform.parent; } } else { zedRigRoot = transform; } } } #endregion /// /// Sets the target GameObject and all its children to the specified layer. /// /// Target GameObject. /// Layer that the GameObject and all children will be set to. public static void SetLayerRecursively(GameObject go, int layerNumber) { if (go == null) return; foreach (Transform trans in go.GetComponentsInChildren(true)) { trans.gameObject.layer = layerNumber; } } /// /// Stops the initialization and grabbing threads. /// public void Destroy() { running = false; //In case the opening thread is still running. if (threadOpening != null) { initQuittingHandle.Reset(); forceCloseInit = true; initQuittingHandle.Set(); threadOpening.Join(); threadOpening = null; } //Shut down the image grabbing thread. if (threadGrab != null) { threadGrab.Join(); threadGrab = null; } if (IsMappingRunning) StopSpatialMapping(); Thread.Sleep(10); } /// /// Called by Unity when the application is closed. /// Also called by Reset() to properly start from a 'clean slate.' /// void OnApplicationQuit() { CloseManager(); //sl.ZEDCamera.UnloadPlugin(); //If this was the last camera to close, make sure all instances are closed. bool notlast = false; foreach (ZEDManager manager in ZEDManagerInstance) { if (manager != null && manager.IsZEDReady == true) { notlast = true; break; } } if (notlast == false) { sl.ZEDCamera.UnloadPlugin(); } } private void CloseManager() { if (spatialMapping != null) spatialMapping.Dispose(); ClearRendering(); zedReady = false; OnCamBrightnessChange -= SetCameraBrightness; OnMaxDepthChange -= SetMaxDepthRange; Destroy(); //Close the grab and initialization threads. if (zedCamera != null) { if (isRecording) { zedCamera.DisableRecording(); } zedCamera.Destroy(); zedCamera = null; } #if UNITY_EDITOR //Prevents building the app otherwise. //Restore the AR layers that were hidden, if necessary. if (!showarrig) { LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number. UnityEditor.Tools.visibleLayers |= (layerNumberBinary); } #endif sl.ZEDCamera.UnloadInstance((int)cameraID); } private void ClearRendering() { if (camLeftTransform != null) { ZEDRenderingPlane leftRenderingPlane = camLeftTransform.GetComponent(); if (leftRenderingPlane) { leftRenderingPlane.Clear(); } } if (IsStereoRig) { ZEDRenderingPlane rightRenderingPlane = GetRightCameraTransform().GetComponent(); rightRenderingPlane.Clear(); } } /// /// Sets up starting properties and starts the ZED initialization co-routine. /// void Awake() { // If never initialized, init the array of instances linked to each ZEDManager that could be created. if (ZEDManagerInstance == null) { ZEDManagerInstance = new ZEDManager[(int)sl.Constant.MAX_CAMERA_PLUGIN]; for (int i = 0; i < (int)sl.Constant.MAX_CAMERA_PLUGIN; i++) ZEDManagerInstance[i] = null; } initialPosition = transform.localPosition; initialRotation = transform.localRotation; zedReady = false; ZEDManagerInstance[(int)cameraID] = this; zedCamera = new sl.ZEDCamera(); LayerHandler.GetInstance().setUsed(cameraID, true); if (dontDestroyOnLoad) DontDestroyOnLoad(transform.root); //If you want the ZED rig not to be destroyed when loading a scene. //Set first few parameters for initialization. This will get passed to the ZED SDK when initialized. initParameters = new sl.InitParameters(); initParameters.resolution = resolution; initParameters.cameraFPS = FPS; initParameters.cameraID = (int)cameraID; initParameters.depthMode = depthMode; initParameters.depthStabilization = depthStabilizer; initParameters.cameraDisableIMU = cameraDisableIMU; initParameters.cameraImageFlip = cameraFlipMode; //Check if this rig is a stereo rig. Will set isStereoRig accordingly. CheckStereoMode(); //Set initialization parameters that may change depending on what was done in CheckStereoMode(). isZEDTracked = enableTracking; zedPosition = initialPosition; zedOrientation = initialRotation; lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; bool res = zedCamera.CreateCamera((int)cameraID, wrapperVerbose); if (!res) { Debug.LogError("ZEDManager on " + gameObject.name + " couldn't connect to camera: " + cameraID + ". Check if another ZEDManager is already connected."); this.gameObject.SetActive(false); return; } initParameters.inputType = inputType; if (inputType == sl.INPUT_TYPE.INPUT_TYPE_USB) { } else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO) { initParameters.pathSVO = svoInputFileName; initParameters.svoRealTimeMode = svoRealTimeMode; } else if (inputType == sl.INPUT_TYPE.INPUT_TYPE_STREAM) { initParameters.ipStream = streamInputIP; initParameters.portStream = (ushort)streamInputPort; } versionZED = "[SDK]: " + sl.ZEDCamera.GetSDKVersion().ToString() + " [Plugin]: " + sl.ZEDCamera.PluginVersion.ToString(); //Behavior specific to AR pass-through mode. if (isStereoRig) { //Creates a hidden camera rig that handles final output to the headset. GameObject o = CreateZEDRigDisplayer(); if (!showarrig) o.hideFlags = HideFlags.HideInHierarchy; o.transform.parent = transform; //Force some initParameters that are required for a good AR experience. initParameters.enableRightSideMeasure = isStereoRig; //Creates a depth map for both eyes, not just one. initParameters.depthMinimumDistance = 0.1f; //Allow depth calculation to very close objects. //For the Game/output window, mirror the headset view using a custom script that avoids stretching. CreateMirror(); } //Starts a coroutine that initializes the ZED without freezing the game. lastInitStatus = sl.ERROR_CODE.ERROR_CODE_LAST; openingLaunched = false; StartCoroutine(InitZED()); OnCamBrightnessChange += SetCameraBrightness; //Subscribe event for adjusting brightness setting. OnMaxDepthChange += SetMaxDepthRange; //Create Module Object //Create the spatial mapping module object (even if not used necessarly) spatialMapping = new ZEDSpatialMapping(transform, this); } void Start() { //adjust layers for multiple camera //setLayersForMultiCamera (); } #region INITIALIZATION //const int MAX_OPENING_TRIES = 10; private uint numberTriesOpening = 0;/// Counter of tries to open the ZED /// /// ZED opening function. Should be called in the initialization thread (threadOpening). /// private void OpenZEDInBackground() { openingLaunched = true; int timeout = 0; do { initQuittingHandle.WaitOne(0); //Makes sure we haven't been turned off early, which only happens in Destroy() from OnApplicationQuit(). if (forceCloseInit) break; lastInitStatus = zedCamera.Init(ref initParameters); timeout++; numberTriesOpening++; } while (lastInitStatus != sl.ERROR_CODE.SUCCESS); } /// /// Initialization coroutine. /// private System.Collections.IEnumerator InitZED() { zedReady = false; if (!openingLaunched) { initQuittingHandle = new EventWaitHandle(true, EventResetMode.ManualReset); threadOpening = new Thread(new ThreadStart(OpenZEDInBackground)); //Assign thread. threadOpening.Start(); } while (lastInitStatus != sl.ERROR_CODE.SUCCESS) { yield return new WaitForSeconds(0.3f); } //ZED has initialized successfully. if (lastInitStatus == sl.ERROR_CODE.SUCCESS) { threadOpening.Join(); //Initialize the tracking thread, AR initial transforms and SVO read/write as needed. ZEDReady(); //If using tracking, wait until the tracking thread has been initialized. while (enableTracking && !isTrackingEnable) { yield return new WaitForSeconds(0.5f); } //Tells all the listeners that the ZED is ready! :) if (OnZEDReady != null) { OnZEDReady(); } //Make sure the screen is at 16:9 aspect ratio or close. Warn the user otherwise. float ratio = (float)Screen.width / (float)Screen.height; float target = 16.0f / 9.0f; if (Mathf.Abs(ratio - target) > 0.01) { Debug.LogWarning(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.SCREEN_RESOLUTION)); } //get informations from camera (S/N, firmware, model...) cameraModel = zedCamera.GetCameraModel().ToString(); cameraFirmware = zedCamera.GetZEDFirmwareVersion().ToString(); cameraSerialNumber = zedCamera.GetZEDSerialNumber().ToString(); if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO) { numberFrameMax = zedCamera.GetSVONumberOfFrames(); } // If streaming has been switched on before play if (enableStreaming && !isStreamingEnable) { lock (zedCamera.grabLock) { sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint)bitrate, (ushort)streamingPort, gopSize, adaptativeBitrate); if (err == sl.ERROR_CODE.SUCCESS) { isStreamingEnable = true; } else { enableStreaming = false; isStreamingEnable = false; } } } //If not already launched, launch the image grabbing thread. if (!running) { running = true; requestNewFrame = true; threadGrab = new Thread(new ThreadStart(ThreadedZEDGrab)); threadGrab.Start(); } zedReady = true; isDisconnected = false; //In case we just regained connection. setRenderingSettings(); //Find the ZEDRenderingPlanes in the rig and configure them. AdjustZEDRigCameraPosition(); //If in AR mode, move cameras to proper offset relative to zedRigRoot. } } /// /// Adjust camera(s) relative to zedRigRoot transform, which is what is moved each frame. Called at start of tracking. /// In AR mode, offset is each camera's position relative to center of the user's head. Otherwise, cameras are just spaced /// by the camera's baseline/IPD, or no offset is applied if there's just one camera. /// void AdjustZEDRigCameraPosition() { Vector3 rightCameraOffset = new Vector3(zedCamera.Baseline, 0.0f, 0.0f); if (isStereoRig && UnityEngine.XR.XRDevice.isPresent) //Using AR pass-through mode. { //zedRigRoot transform (origin of the global camera) is placed on the HMD headset. Therefore, we move the //camera in front of it by offsetHmdZEDPosition to compensate for the ZED's position on the headset. //If values are wrong, tweak calibration file created in ZEDMixedRealityPlugin. camLeftTransform.localPosition = arRig.HmdToZEDCalibration.translation; camLeftTransform.localRotation = arRig.HmdToZEDCalibration.rotation; if (camRightTransform) camRightTransform.localPosition = camLeftTransform.localPosition + rightCameraOffset; //Space the eyes apart. if (camRightTransform) camRightTransform.localRotation = camLeftTransform.localRotation; } else if (isStereoRig && !UnityEngine.XR.XRDevice.isPresent) //Using stereo rig, but no VR headset. { //When no VR HMD is available, simply put the origin at the left camera. if (camLeftTransform) camLeftTransform.localPosition = Vector3.zero; if (camLeftTransform) camLeftTransform.localRotation = Quaternion.identity; if (camRightTransform) camRightTransform.localPosition = rightCameraOffset; //Space the eyes apart. if (camRightTransform) camRightTransform.localRotation = Quaternion.identity; } else //Using mono rig (ZED_Rig_Mono). No offset needed. { if (GetMainCameraTransform()) { GetMainCameraTransform().localPosition = Vector3.zero; GetMainCameraTransform().localRotation = Quaternion.identity; } } } /// /// Find the ZEDRenderingPlane components in the ZED rig and set their rendering settings /// (rendering path, shader values, etc.) for left and right cameras. Also activate/deactivate depth occlusions. /// void setRenderingSettings() { ZEDRenderingPlane leftRenderingPlane = null; if (GetLeftCameraTransform() != null) { leftRenderingPlane = GetLeftCameraTransform().GetComponent(); leftRenderingPlane.SetPostProcess(postProcessing); cameraLeft.renderingPath = RenderingPath.UsePlayerSettings; } ZEDRenderingPlane rightRenderingPlane = null; if (GetRightCameraTransform() != null) { rightRenderingPlane = GetRightCameraTransform().GetComponent(); rightRenderingPlane.SetPostProcess(postProcessing); cameraRight.renderingPath = RenderingPath.UsePlayerSettings; } SetCameraBrightness(m_cameraBrightness); SetMaxDepthRange(m_maxDepthRange); Camera maincam = GetMainCamera(); if (maincam != null) { ZEDRenderingMode renderingPath = (ZEDRenderingMode)maincam.actualRenderingPath; //Make sure we're in either forward or deferred rendering. Default to forward otherwise. if (renderingPath != ZEDRenderingMode.FORWARD && renderingPath != ZEDRenderingMode.DEFERRED) { Debug.LogError("[ZED Plugin] Only Forward and Deferred Shading rendering path are supported"); if (cameraLeft) cameraLeft.renderingPath = RenderingPath.Forward; if (cameraRight) cameraRight.renderingPath = RenderingPath.Forward; } //Set depth occlusion. if (renderingPath == ZEDRenderingMode.FORWARD) { if (leftRenderingPlane) leftRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH_OCC"); if (rightRenderingPlane) rightRenderingPlane.ManageKeywordPipe(!depthOcclusion, "NO_DEPTH_OCC"); } else if (renderingPath == ZEDRenderingMode.DEFERRED) { if (leftRenderingPlane) leftRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH_OCC"); if (rightRenderingPlane) rightRenderingPlane.ManageKeywordDeferredMat(!depthOcclusion, "NO_DEPTH_OCC"); } } } #endregion #region IMAGE_ACQUIZ /// /// Continuously grabs images from the ZED. Runs on its own thread. /// private void ThreadedZEDGrab() { runtimeParameters = new sl.RuntimeParameters(); runtimeParameters.sensingMode = sensingMode; runtimeParameters.enableDepth = true; //Don't change this reference frame. If we need normals in the world frame, better to do the conversion ourselves. runtimeParameters.measure3DReferenceFrame = sl.REFERENCE_FRAME.CAMERA; while (running) { if (zedCamera == null) return; if (runtimeParameters.sensingMode != sensingMode) runtimeParameters.sensingMode = sensingMode; AcquireImages(); } } /// /// Grabs images from the ZED SDK and updates tracking, FPS and timestamp values. /// Called from ThreadedZEDGrab() in a separate thread. /// private void AcquireImages() { if (requestNewFrame && zedReady) { sl.ERROR_CODE e = sl.ERROR_CODE.FAILURE; if (inputType == sl.INPUT_TYPE.INPUT_TYPE_SVO) { //handle pause if (NeedNewFrameGrab && pauseSVOReading) { e = zedCamera.Grab(ref runtimeParameters); NeedNewFrameGrab = false; } else if (!pauseSVOReading) e = zedCamera.Grab(ref runtimeParameters); currentFrame = zedCamera.GetSVOPosition(); } else if (!pauseLiveReading) { e = zedCamera.Grab(ref runtimeParameters); } lock (zedCamera.grabLock) { if (e == sl.ERROR_CODE.CAMERA_NOT_DETECTED) { Debug.Log("Camera not detected or disconnected."); isDisconnected = true; Thread.Sleep(10); requestNewFrame = false; } else if (e == sl.ERROR_CODE.SUCCESS) { #if UNITY_EDITOR float camera_fps = zedCamera.GetCameraFPS(); cameraFPS = camera_fps.ToString() + " FPS"; #endif //Get position of camera if (isTrackingEnable) { zedtrackingState = zedCamera.GetPosition(ref zedOrientation, ref zedPosition, sl.TRACKING_FRAME.LEFT_EYE); } else { zedtrackingState = sl.TRACKING_STATE.TRACKING_OFF; } if (needRecordFrame) zedCamera.Record(); // Indicate that a new frame is available and pause the thread until a new request is called newFrameAvailable = true; requestNewFrame = false; } else Thread.Sleep(1); } } else { //To avoid "overheating." Thread.Sleep(1); } } #endregion /// /// Initialize the SVO, and launch the thread to initialize tracking. Called once the ZED /// is initialized successfully. /// private void ZEDReady() { FPS = (int)zedCamera.GetRequestedCameraFPS(); if (enableTracking) { trackerThread = new Thread(EnableTrackingThreaded); trackerThread.Start(); } else if (estimateInitialPosition) { sl.ERROR_CODE err = zedCamera.EstimateInitialPosition(ref initialRotation, ref initialPosition); if (zedCamera.GetCameraModel() == sl.MODEL.ZED_M) zedCamera.GetInternalIMUOrientation(ref initialRotation, sl.TIME_REFERENCE.IMAGE); if (err != sl.ERROR_CODE.SUCCESS) Debug.LogWarning("Failed to estimate initial camera position"); } if (enableTracking) trackerThread.Join(); if (isStereoRig && UnityEngine.XR.XRDevice.isPresent) { ZEDMixedRealityPlugin.Pose pose = arRig.InitTrackingAR(); OriginPosition = pose.translation; OriginRotation = pose.rotation; if (!zedCamera.IsHmdCompatible && zedCamera.IsCameraReady) Debug.LogWarning("WARNING: AR Passtrough with a ZED is not recommended. Consider using ZED Mini, designed for this purpose."); } else { OriginPosition = initialPosition; OriginRotation = initialRotation; } //Set the original transform for the Rig zedRigRoot.localPosition = OriginPosition; zedRigRoot.localRotation = OriginRotation; //Set confidence threshold if needed. if (m_confidenceThreshold != 100) zedCamera.SetConfidenceThreshold(m_confidenceThreshold); #if UNITY_EDITOR UnityEditor.EditorApplication.playmodeStateChanged = HandleOnPlayModeChanged; #endif } /// /// Initializes the ZED's inside-out tracking. Started as a separate thread in OnZEDReady. /// void EnableTrackingThreaded() { lock (zedCamera.grabLock) { //If using spatial memory and given a path to a .area file, make sure that path is valid. if (enableSpatialMemory && pathSpatialMemory != "" && !System.IO.File.Exists(pathSpatialMemory)) { Debug.Log("Specified path to .area file '" + pathSpatialMemory + "' does not exist. Ignoring."); pathSpatialMemory = ""; } //Now enable the tracking with the proper parameters. if (!(enableTracking = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition, enableSpatialMemory, enablePoseSmoothing, estimateInitialPosition, pathSpatialMemory) == sl.ERROR_CODE.SUCCESS))) { throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED)); } else { isTrackingEnable = true; } } } #if UNITY_EDITOR /// /// Handler for playmodeStateChanged. /// void HandleOnPlayModeChanged() { if (zedCamera == null) return; #if UNITY_EDITOR UnityEditor.EditorApplication.playmodeStateChanged = HandleOnPlayModeChanged; #endif } #endif /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////// ENGINE UPDATE REGION ///////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #region ENGINE_UPDATE /// /// If a new frame is available, this function retrieves the images and updates the Unity textures. Called in Update(). /// public void UpdateImages() { if (zedCamera == null) return; if (newFrameAvailable) //ThreadedZEDGrab()/AcquireImages() grabbed images we haven't updated yet. { lock (zedCamera.grabLock) { zedCamera.RetrieveTextures(); //Tell the wrapper to compute the textures. zedCamera.UpdateTextures(); //Tell the wrapper to update the textures. imageTimeStamp = zedCamera.GetImagesTimeStamp(); } //For external module ... Trigger the capture done event. if (OnGrab != null) OnGrab(); //SVO and loop back ? --> reset position if needed if (zedCamera.GetInputType() == sl.INPUT_TYPE.INPUT_TYPE_SVO && svoLoopBack) { int maxSVOFrame = zedCamera.GetSVONumberOfFrames(); if (zedCamera.GetSVOPosition() >= maxSVOFrame - (svoRealTimeMode ? 2 : 1)) { zedCamera.SetSVOPosition(0); if (enableTracking) { if (!(enableTracking = (zedCamera.ResetTracking(initialRotation, initialPosition) == sl.ERROR_CODE.SUCCESS))) { Debug.LogError("ZED Tracking disabled: Not available during SVO playback when Loop is enabled."); } } zedRigRoot.localPosition = initialPosition; zedRigRoot.localRotation = initialRotation; } } requestNewFrame = true; //Lets ThreadedZEDGrab/AcquireImages() start grabbing again. newFrameAvailable = false; } } private int blackoutToggleSpeed = 0; /// /// Gets the tracking position from the ZED and updates zedRigRoot's position. Also updates the AR tracking if enabled. /// Only called in Live (not SVO playback) mode. Called in Update(). /// private void UpdateTracking() { bool enableFixed = false; //simple video goggle mode bool zLockZED = true; // rotation of zed camera does not rotate image plane in hmd bool ZLockCam = false; // do not rotate image plane if hmd rotates if (enableFixed) { zLockZED = false; ZLockCam = false; } if (!zedReady) return; if (isZEDTracked) //ZED inside-out tracking is enabled and initialized. { Quaternion r; Vector3 v; isCameraTracked = true; if (UnityEngine.XR.XRDevice.isPresent && isStereoRig) //AR pass-through mode. { if (calibrationHasChanged) //If the HMD offset calibration file changed during runtime. { AdjustZEDRigCameraPosition(); //Re-apply the ZED's offset from the VR headset. calibrationHasChanged = false; } //(Mod) /* arRig.ExtractLatencyPose(imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation. arRig.AdjustTrackingAR(zedPosition, zedOrientation, out r, out v, setIMUPriorInAR); zedRigRoot.localRotation = r; zedRigRoot.localPosition = v; ZEDSyncPosition = v; ZEDSyncRotation = r; HMDSyncPosition = arRig.LatencyPose().translation; HMDSyncRotation = arRig.LatencyPose().rotation; */ //Rotate Image plane with zed rotation r = zedOrientation; if (enableFixed) { zedRigDisplayer.transform.localRotation = camLeft.transform.rotation; } else { zedRigDisplayer.transform.localRotation = r; //(Mod) zedRigDisplayer should move/rotate the image plane //zedRigDisplayer.transform.localEulerAngles = new Vector3(r.x,r.y,0); } //Fixed //camLeft.transform.SetParent(zedRigDisplayer.transform); //(Mod) //camLeftTransform gets overwritten by hmd transform //move cams(within camRig) along with zedRig (test, immitates behaviour with cams parent set to zedRigDisplayer) //camRigDisplayer.transform.rotation = zedRigDisplayer.transform.rotation; //camRigDisplayer.transform.position = zedRigDisplayer.transform.position; //hmd: camRigDisplayer.transform.eulerAngles.z = pitch //hmd: camRigDisplayer.transform.eulerAngles.y = yaw //hmd: camRigDisplayer.transform.eulerAngles.x = roll //zed roll = z //camLeft.transform <- Rotation des HMD //zedRigDisplayer.transform <- rotation der ZED (durch zeile oben, fuer umwandlung in euler von quaternion) //camRigDisplayer.transform <- Rotation des HMD (Parent, rotiert nicht mit hmd mit) //camRigDisplayer.transform.eulerAngles = new Vector3(0,0,0); //Transform rotAxis = camRigDisplayer.transform; //axis to rotate around //rotAxis.Rotate(camLeftTransform.eulerAngles); //match camera direction /*GameObject emptyGO = new GameObject(); Transform u = emptyGO.transform; u.position = new Vector3(0,0,1); //rotation axis u.Rotate(zedRigDisplayer.transform.eulerAngles); //rotate so that vector above matches rotation direction of zedRigDisplayers coordinate system */ //camRigDisplayer.transform = camRigDisplayer.transform.rotateAroundVector(u, zedOrientation.z) //TODO //Transform u = rotationTransformhelper.transform; //rotationTransformhelper.transform.position = camLeft.transform.position; //copy data //rotationTransformhelper.transform.rotation = camLeft.transform.rotation; //copy data ///rotationTransformhelper.transform.position = camLeft.transform.localPosition; //copy data ///rotationTransformhelper.transform.rotation = camLeft.transform.localRotation; //copy data //Debug.Log(new Vector3(0, 0, zedRigDisplayer.transform.eulerAngles.z)); ///rotationTransformhelper.transform.Rotate(new Vector3(0, 0, zedRigDisplayer.transform.localEulerAngles.z)); ///rotationTransformhelper.transform.Rotate(-camLeft.transform.localEulerAngles);//rotate back //var newrot = zedRigDisplayer.transform.eulerAngles - rotationTransformhelper.transform.eulerAngles; //camRigDisplayer.transform.eulerAngles = newrot; //camRigDisplayer.transform.eulerAngles = rotationTransformhelper.transform.eulerAngles; //TODO: das hier geht auch noch nicht, rotation noch etwas komisch. ggf weil rotationspunkt von camLeft/Right nicht im ursprung ist? ///camRigDisplayer.transform.eulerAngles = rotationTransformhelper.transform.eulerAngles; //get cam rotation ///camRigDisplayer.transform.position = zedRigDisplayer.transform.position; // # Versuch Z Rotation der ZED transformiert auf Rotation des camRig uebertragen # //rotationTransformhelper.transform.rotation = zedRigDisplayer.transform.localRotation; //rotationTransformhelper.transform.localEulerAngles = new Vector3(-zedRigDisplayer.transform.eulerAngles.x, -zedRigDisplayer.transform.eulerAngles.y, 0); //camRigDisplayer.transform.localRotation = Quaternion.identity; //asdf /*if (Input.GetKeyDown("space")) { //Restoring position does currently not work Debug.Log("fix"); //camRigDisplayer.transform.localRotation = zedRigDisplayer.transform.rotation; //moves camera image to origin (not view point) camRigDisplayer.transform.localRotation = zedRigDisplayer.transform.rotation; camRigDisplayer.transform.Rotate(-camLeft.transform.rotation.eulerAngles); }*/ if (Input.GetKeyDown("space")) //Tollge Blackout { blackoutToggleSpeed = CameraBrightness < 50 ? blackoutToggleSpeed = 1 : blackoutToggleSpeed = -1; //set speed direction } if (blackoutToggleSpeed!=0) //fade running { CameraBrightness += blackoutToggleSpeed; if (CameraBrightness >= 100) { CameraBrightness = 100; blackoutToggleSpeed = 0; //stop fade } if (CameraBrightness <= 0) { CameraBrightness = 0; blackoutToggleSpeed = 0; //stop fade } } if (zLockZED) { Vector3 zedDirection = new Vector3(0, 0, 1); //Create Vector towars Z+ zedDirection = zedRigDisplayer.transform.rotation * zedDirection; //rotate vector by zedRigRotation (vector should point towards +Z of zedRigDisplayer) camRigDisplayer.transform.localRotation = Quaternion.AngleAxis(zedRigDisplayer.transform.localEulerAngles.z, zedDirection); //rotate only around zedDirection.z with respect to original coordinate system } if (ZLockCam) { //same for camera Z rotation compensation Vector3 hmdDirection = new Vector3(0, 0, 1); //Create Vector towars Z+ hmdDirection = camLeft.transform.rotation * hmdDirection; //rotate vector by cam (HMD) (vector should point towards +Z of HMD) camRigDisplayer.transform.rotation *= Quaternion.AngleAxis(-camLeft.transform.localEulerAngles.z, hmdDirection); //compensate only around hmd.z with respect to original coordinate system } //zedRigRoot.localRotation = r; //zedRigRoot.localPosition = v; } else //Not AR pass-through mode. { zedRigRoot.localRotation = zedOrientation; if (!ZEDSupportFunctions.IsVector3NaN(zedPosition)) zedRigRoot.localPosition = zedPosition; } } else if (UnityEngine.XR.XRDevice.isPresent && isStereoRig) //ZED tracking is off but HMD tracking is on. Fall back to that. { isCameraTracked = true; arRig.ExtractLatencyPose(imageTimeStamp); //Find what HMD's pose was at ZED image's timestamp for latency compensation. zedRigRoot.localRotation = arRig.LatencyPose().rotation; zedRigRoot.localPosition = arRig.LatencyPose().translation; } else //The ZED is not tracked by itself or an HMD. isCameraTracked = false; } /// /// Stores the HMD's current pose. Used in AR mode for latency compensation. /// Pose will be applied to final canvases when a new image's timestamp matches /// the time when this is called. /// void UpdateHmdPose() { if (IsStereoRig && UnityEngine.XR.XRDevice.isPresent) arRig.CollectPose(); //Save headset pose with current timestamp. } Vector3 RotatePointAroundAxis(Vector3 point, float angle, Vector3 axis) { Quaternion q = Quaternion.AngleAxis(angle, axis); return q * point; //Note: q must be first (point * q wouldn't compile) } /// /// Updates images, collects HMD poses for latency correction, and applies tracking. /// Called by Unity each frame. /// void Update() { //Check if ZED is disconnected; invoke event and call function if so. if (isDisconnected) { if (OnZEDDisconnected != null) OnZEDDisconnected(); //Invoke event. Used for GUI message and pausing ZEDRenderingPlanes. ZEDDisconnected(); //Tries to reset the camera. return; } // Then update all modules UpdateImages(); //Image is updated first so we have its timestamp for latency compensation. UpdateHmdPose(); //Store the HMD's pose at the current timestamp. UpdateTracking(); //Apply position/rotation changes to zedRigRoot. UpdateMapping(); //Update mapping if activated /// If in Unity Editor, update the ZEDManager status list #if UNITY_EDITOR //Update strings used for di splaying stats in the Inspector. if (zedCamera != null) { float frame_drop_count = zedCamera.GetFrameDroppedPercent(); float CurrentTickFPS = 1.0f / Time.deltaTime; fps_engine = (fps_engine + CurrentTickFPS) / 2.0f; engineFPS = fps_engine.ToString("F0") + " FPS"; if (frame_drop_count > 30 && fps_engine < 45) engineFPS += "WARNING: Low engine framerate detected"; if (isZEDTracked) trackingState = ZEDTrackingState.ToString(); else if (UnityEngine.XR.XRDevice.isPresent && isStereoRig) trackingState = "HMD Tracking"; else trackingState = "Camera Not Tracked"; } #endif } public void LateUpdate() { if (IsStereoRig) { arRig.LateUpdateHmdRendering(); //Update textures on final AR rig for output to the headset. } } #endregion /// /// Event called when camera is disconnected /// void ZEDDisconnected() { cameraFPS = "Disconnected"; isDisconnected = true; if (zedReady) { Reset(); //Cache tracking, turn it off and turn it back on again. } } private void OnDestroy() { //OnApplicationQuit(); CloseManager(); } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////// SPATIAL MAPPING REGION ///////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #region MAPPING_MODULE /// /// Tells ZEDSpatialMapping to begin a new scan. This clears the previous scan from the scene if there is one. /// public void StartSpatialMapping() { transform.position = Vector3.zero; transform.rotation = Quaternion.identity; spatialMapping.StartStatialMapping(mappingResolutionPreset, mappingRangePreset, isMappingTextured); } /// /// Ends the current spatial mapping. Once called, the current mesh will be filtered, textured (if enabled) and saved (if enabled), /// and a mesh collider will be added. /// public void StopSpatialMapping() { if (spatialMapping != null) { if (saveMeshWhenOver) SaveMesh(meshPath); spatialMapping.StopStatialMapping(); } } /// /// Updates the filtering parameters and call the ZEDSpatialMapping instance's Update() function. /// private void UpdateMapping() { if (spatialMapping != null) { //if (IsMappingUpdateThreadRunning) if (spatialMapping.IsRunning()) { spatialMapping.filterParameters = meshFilterParameters; spatialMapping.Update(); } } } /// /// Toggles whether to display the mesh or not. /// /// True to make the mesh visible, false to make it invisible. public void SwitchDisplayMeshState(bool state) { if (spatialMapping != null) spatialMapping.SwitchDisplayMeshState(state); } public void ClearAllMeshes() { if (spatialMapping != null) spatialMapping.ClearAllMeshes(); } /// /// Pauses the current scan. /// /// True to pause the scanning, false to unpause it. public void SwitchPauseState(bool state) { if (spatialMapping != null) spatialMapping.SwitchPauseState(state); } /// /// Saves the mesh into a 3D model (.obj, .ply or .bin) file. Also saves an .area file for spatial memory for better tracking. /// Calling this will end the spatial mapping if it's running. Note it can take a significant amount of time to finish. /// /// Path where the mesh and .area files will be saved. public void SaveMesh(string meshPath = "ZEDMeshObj.obj") { spatialMapping.RequestSaveMesh(meshPath); } /// /// Loads a mesh and spatial memory data from a file. /// If scanning is running, it will be stopped. Existing scans in the scene will be cleared. /// /// Path to the 3D mesh file (.obj, .ply or .bin) to load. /// True if successfully loaded, false otherwise. public bool LoadMesh(string meshPath = "ZEDMeshObj.obj") { //Cache the save setting and set to false, to avoid overwriting the mesh file during the load. bool oldSaveWhenOver = saveMeshWhenOver; saveMeshWhenOver = false; gravityRotation = Quaternion.identity; spatialMapping.SetMeshRenderer(); bool loadresult = spatialMapping.LoadMesh(meshPath); saveMeshWhenOver = oldSaveWhenOver; //Restoring old setting. return loadresult; } #endregion /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////// AR REGION ////////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// #region AR_CAMERAS /// /// Stereo rig that adjusts images from ZED_Rig_Stereo to look correct in the HMD. /// Hidden by default as it rarely needs to be changed. /// [HideInInspector] public GameObject zedRigDisplayer; public GameObject camRigDisplayer; //(Mod) public GameObject rotationTransformhelper;//(Mod): public GameObject camLeft;//(Mod); private ZEDMixedRealityPlugin arRig; /// /// Create a GameObject to display the ZED in an headset (ZED-M Only). /// /// private GameObject CreateZEDRigDisplayer() { //Make sure we don't already have one, such as if the camera disconnected and reconnected. if (zedRigDisplayer != null) return zedRigDisplayer; zedRigDisplayer = new GameObject("ZEDRigDisplayer"); arRig = zedRigDisplayer.AddComponent(); camRigDisplayer = new GameObject("camRigDisplayer"); //(Mod) rotationTransformhelper = new GameObject("rotationTransformhelper"); //(Mod) //rotationTransformhelper.transform.SetParent(zedRigDisplayer.transform); /*Screens left and right */ GameObject leftScreen = GameObject.CreatePrimitive(PrimitiveType.Quad); leftScreen.name = "Quad - Left"; MeshRenderer meshLeftScreen = leftScreen.GetComponent(); meshLeftScreen.lightProbeUsage = UnityEngine.Rendering.LightProbeUsage.Off; meshLeftScreen.reflectionProbeUsage = UnityEngine.Rendering.ReflectionProbeUsage.Off; meshLeftScreen.receiveShadows = false; meshLeftScreen.motionVectorGenerationMode = MotionVectorGenerationMode.ForceNoMotion; meshLeftScreen.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off; meshLeftScreen.sharedMaterial = Resources.Load("Materials/Unlit/Mat_ZED_Unlit") as Material; leftScreen.layer = arLayer; GameObject.Destroy(leftScreen.GetComponent()); GameObject rightScreen = GameObject.CreatePrimitive(PrimitiveType.Quad); rightScreen.name = "Quad - Right"; MeshRenderer meshRightScreen = rightScreen.GetComponent(); meshRightScreen.lightProbeUsage = UnityEngine.Rendering.LightProbeUsage.Off; meshRightScreen.reflectionProbeUsage = UnityEngine.Rendering.ReflectionProbeUsage.Off; meshRightScreen.receiveShadows = false; meshRightScreen.motionVectorGenerationMode = MotionVectorGenerationMode.ForceNoMotion; meshRightScreen.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off; GameObject.Destroy(rightScreen.GetComponent()); meshRightScreen.sharedMaterial = Resources.Load("Materials/Unlit/Mat_ZED_Unlit") as Material; rightScreen.layer = arLayer; /*Camera left and right*/ //GameObject camLeft = new GameObject("cameraLeft"); //(Mod) camLeft = new GameObject("cameraLeft"); //camLeft.transform.SetParent(zedRigDisplayer.transform); //(Mod) camLeft.transform.SetParent(camRigDisplayer.transform); //(Mod) Camera camL = camLeft.AddComponent(); camL.stereoTargetEye = StereoTargetEyeMask.Both; //Temporary setting to fix loading screen issue. camL.renderingPath = RenderingPath.Forward;//Minimal overhead camL.clearFlags = CameraClearFlags.Color; camL.backgroundColor = Color.black; camL.cullingMask = 1 << arLayer; camL.allowHDR = false; camL.allowMSAA = false; camL.depth = camLeftTransform.GetComponent().depth; GameObject camRight = new GameObject("cameraRight"); //camRight.transform.SetParent(zedRigDisplayer.transform); //(Mod) camRight.transform.SetParent(camRigDisplayer.transform); //(Mod) Camera camR = camRight.AddComponent(); camR.renderingPath = RenderingPath.Forward;//Minimal overhead camR.clearFlags = CameraClearFlags.Color; camR.backgroundColor = Color.black; camR.stereoTargetEye = StereoTargetEyeMask.Both; //Temporary setting to fix loading screen issue. camR.cullingMask = 1 << arLayer; camR.allowHDR = false; camR.allowMSAA = false; camR.depth = camRightTransform.GetComponent().depth; HideFromWrongCameras.RegisterZEDCam(camL); HideFromWrongCameras lhider = leftScreen.AddComponent(); lhider.SetRenderCamera(camL); lhider.showInNonZEDCameras = false; HideFromWrongCameras.RegisterZEDCam(camR); HideFromWrongCameras rhider = rightScreen.AddComponent(); rhider.SetRenderCamera(camR); rhider.showInNonZEDCameras = false; SetLayerRecursively(camRight, arLayer); SetLayerRecursively(camLeft, arLayer); //Hide camera in editor. #if UNITY_EDITOR if (!showarrig) { LayerMask layerNumberBinary = (1 << arLayer); //Convert layer index into binary number. LayerMask flippedVisibleLayers = ~UnityEditor.Tools.visibleLayers; UnityEditor.Tools.visibleLayers = ~(flippedVisibleLayers | layerNumberBinary); } #endif leftScreen.transform.SetParent(zedRigDisplayer.transform); rightScreen.transform.SetParent(zedRigDisplayer.transform); arRig.finalCameraLeft = camLeft; arRig.finalCameraRight = camRight; arRig.ZEDEyeLeft = camLeftTransform.gameObject; arRig.ZEDEyeRight = camRightTransform.gameObject; arRig.quadLeft = leftScreen.transform; arRig.quadRight = rightScreen.transform; ZEDMixedRealityPlugin.OnHmdCalibChanged += CalibrationHasChanged; if (UnityEngine.XR.XRDevice.isPresent) HMDDevice = UnityEngine.XR.XRDevice.model; return zedRigDisplayer; } #endregion #region MIRROR private ZEDMirror mirror = null; private GameObject mirrorContainer = null; void CreateMirror() { GameObject camLeft; Camera camL; if (mirrorContainer == null) { mirrorContainer = new GameObject("Mirror"); mirrorContainer.hideFlags = HideFlags.HideInHierarchy; camLeft = new GameObject("MirrorCamera"); camLeft.hideFlags = HideFlags.HideInHierarchy; mirror = camLeft.AddComponent(); mirror.manager = this; camL = camLeft.AddComponent(); } else { camLeft = mirror.gameObject; camL = camLeft.GetComponent(); } camLeft.transform.parent = mirrorContainer.transform; camL.stereoTargetEye = StereoTargetEyeMask.None; camL.renderingPath = RenderingPath.Forward;//Minimal overhead camL.clearFlags = CameraClearFlags.Color; camL.backgroundColor = Color.black; camL.cullingMask = 0; //It should see nothing. It gets its final image entirely from a Graphics.Blit call in ZEDMirror. camL.allowHDR = false; camL.allowMSAA = false; camL.useOcclusionCulling = false; camL.depth = cameraLeft.GetComponent().depth; //Make sure it renders after the left cam so we can copy texture from latest frame. } #endregion /// /// Closes out the current stream, then starts it up again while maintaining tracking data. /// Used when the zed becomes unplugged, or you want to change a setting at runtime that /// requires re-initializing the camera. /// public void Reset() { //Save tracking if (enableTracking && isTrackingEnable) { zedCamera.GetPosition(ref zedOrientation, ref zedPosition); } CloseManager(); openingLaunched = false; running = false; numberTriesOpening = 0; forceCloseInit = false; Awake(); } #region EventHandler /// /// Changes the real-world brightness by setting the brightness value in the shaders. /// /// New brightness value to be applied. Should be between 0 and 100. public void SetCameraBrightness(int newVal) { SetFloatValueOnPlaneMaterials("_ZEDFactorAffectReal", newVal / 100f); } /// /// Sets the maximum depth range of real-world objects. Pixels further than this range are discarded. /// /// Furthest distance, in meters, that the camera will display pixels for. Should be between 0 and 20. public void SetMaxDepthRange(float newVal) { if (newVal < 0 || newVal > 20) { Debug.LogWarning("Tried to set max depth range to " + newVal + "m. Must be within 0m and 20m."); newVal = Mathf.Clamp(newVal, 0, 20); } SetFloatValueOnPlaneMaterials("_MaxDepth", newVal); } /// /// Sets a value of a float property on the material(s) rendering the ZED image. /// Used to set things like brightness and maximum depth. /// /// Name of value/property within Shader. /// New value for the specified property. private void SetFloatValueOnPlaneMaterials(string propertyname, float newvalue) { foreach (ZEDRenderingPlane renderPlane in GetComponentsInChildren()) { Material rendmat; if (renderPlane.ActualRenderingPath == RenderingPath.Forward) rendmat = renderPlane.canvas.GetComponent().material; else if (renderPlane.ActualRenderingPath == RenderingPath.DeferredShading) rendmat = renderPlane.deferredMat; else { Debug.LogError("Can't set " + propertyname + " value for Rendering Path " + renderPlane.ActualRenderingPath + ": only Forward and DeferredShading are supported."); return; } rendmat.SetFloat(propertyname, newvalue); } } /// /// Flag set to true when the HMD-to-ZED calibration file has changed during runtime. /// Causes values from the new file to be applied during Update(). /// private bool calibrationHasChanged = false; /// /// Sets the calibrationHasChanged flag to true, which causes the next Update() to /// re-apply the HMD-to-ZED offsets. /// private void CalibrationHasChanged() { calibrationHasChanged = true; } #endregion #if UNITY_EDITOR /// /// Handles changes to tracking or graphics settings changed from the Inspector. /// void OnValidate() { if (zedCamera != null) { // If tracking has been switched on if (!isTrackingEnable && enableTracking) { //Enables tracking and initializes the first position of the camera. if (!(enableTracking = (zedCamera.EnableTracking(ref zedOrientation, ref zedPosition, enableSpatialMemory, enablePoseSmoothing, estimateInitialPosition, pathSpatialMemory) == sl.ERROR_CODE.SUCCESS))) { isZEDTracked = false; throw new Exception(ZEDLogMessage.Error2Str(ZEDLogMessage.ERROR.TRACKING_NOT_INITIALIZED)); } else { isZEDTracked = true; isTrackingEnable = true; } } // If tracking has been switched off if (isTrackingEnable && !enableTracking) { isZEDTracked = false; lock (zedCamera.grabLock) { zedCamera.DisableTracking(); } isTrackingEnable = false; } // If streaming has been switched on if (enableStreaming && !isStreamingEnable) { lock (zedCamera.grabLock) { sl.ERROR_CODE err = zedCamera.EnableStreaming(streamingCodec, (uint)bitrate, (ushort)streamingPort, gopSize, adaptativeBitrate); if (err == sl.ERROR_CODE.SUCCESS) { isStreamingEnable = true; } else { enableStreaming = false; isStreamingEnable = false; } } } // If streaming has been switched off if (!enableStreaming && isStreamingEnable) { lock (zedCamera.grabLock) { zedCamera.DisableStreaming(); isStreamingEnable = false; } } //Reapplies graphics settings based on current values. setRenderingSettings(); } } #endif }