上传修改

This commit is contained in:
Sora丶kong
2026-03-03 10:57:00 +08:00
parent fb16c80a59
commit 82130f6146
2894 changed files with 229 additions and 856196 deletions

View File

@@ -1,181 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System;
using UnityEngine;
namespace Unity.XR.PXR
{
public class PXR_Boundary
{
/// <summary>
/// Sets the boundary as visible or invisible. Note: The setting defined in this function can be overridden by system settings (e.g., proximity trigger) or user settings (e.g., disabling the boundary system).
/// </summary>
/// <param name="value">Whether to set the boundary as visible or invisble:
/// - `true`: visible
/// - `false`: invisible</param>
public static void SetVisible(bool value)
{
PXR_Plugin.Boundary.UPxr_SetBoundaryVisiable(value);
}
/// <summary>
/// Gets whether the boundary is visible.
/// </summary>
/// <returns>
/// - `true`: visible
/// - `false`: invisible</returns>
public static bool GetVisible()
{
return PXR_Plugin.Boundary.UPxr_GetBoundaryVisiable();
}
/// <summary>
/// Checks whether the boundary is configured. Boundary-related functions are available for use only if the boundary is configured.
/// </summary>
/// <returns>
/// - `true`: configured
/// - `false`: not configured</returns>
public static bool GetConfigured()
{
return PXR_Plugin.Boundary.UPxr_GetBoundaryConfigured();
}
/// <summary>
/// Checks whether the boundary is enabled.
/// </summary>
/// <returns>
/// - `true`: enabled
/// - `false`: not enabled</returns>
public static bool GetEnabled()
{
return PXR_Plugin.Boundary.UPxr_GetBoundaryEnabled();
}
/// <summary>
/// Checks whether a tracked node (Left hand, Right hand, Head) will trigger the boundary.
/// </summary>
/// <param name="node">The node to track: HandLeft-left controller; HandRight-right controller; Head-HMD.</param>
/// <param name="boundaryType">The boundary type: `OuterBoundary`-boundary (custom boundary or in-site fast boundary); `PlayArea`-the maximum rectangle in the custom boundary (no such a rectangle in the in-site fast boundary).</param>
/// <returns>
/// A struct that contains the following details:
/// - `IsTriggering`: bool, whether the boundary is triggered;
/// - `ClosestDistance`: float, the minimum distance between the tracked node and the boundary;
/// - `ClosestPoint`: vector3, the closest point between the tracked node and the boundary;
/// - `ClosestPointNormal`: vector3, the normal line of the closest point;
/// - `valid`: bool, whether the result returned is valid.
/// </returns>
public static PxrBoundaryTriggerInfo TestNode(BoundaryTrackingNode node, BoundaryType boundaryType)
{
return PXR_Plugin.Boundary.UPxr_TestNodeIsInBoundary(node, boundaryType);
}
/// <summary>
/// Checks whether a tracked point will trigger the boundary.
/// </summary>
/// <param name="point">The coordinate of the point.</param>
/// <param name="boundaryType">The boundary type: `OuterBoundary`-boundary (custom boundary or in-site fast boundary); `PlayArea`-customize the maximum rectangle in the custom boundary (no such rectangle for in-site fast boundary).</param>
/// <returns>
/// A struct that contains the following details:
/// - `IsTriggering`: bool, whether the boundary is triggered;
/// - `ClosestDistance`: float, the minimum distance between the tracked node and the boundary;
/// - `ClosestPoint`: vector3, the closest point between the tracked node and the boundary;
/// - `ClosestPointNormal`: vector3, the normal line of the closest point;
/// - `valid`: bool, whether the result returned is valid.
/// </returns>
public static PxrBoundaryTriggerInfo TestPoint(PxrVector3f point, BoundaryType boundaryType)
{
return PXR_Plugin.Boundary.UPxr_TestPointIsInBoundary(point, boundaryType);
}
/// <summary>
/// Gets the collection of boundary points.
/// </summary>
/// <param name="boundaryType">The boundary type:
/// - `OuterBoundary`: custom boundary or in-site fast boundary.
/// - `PlayArea`: customize the maximum rectangle in the custom boundary (no such rectangle for in-site fast boundary).</param>
/// <returns>A collection of boundary points.
/// - If you pass `OuterBoundary`, the actual calibrated vertex array of the boundary will be returned.
/// - If you pass `PlayArea`, the boundary points array of the maximum rectangle within the calibrated play area will be returned. The boundary points array is calculated by the algorithm.
/// For stationary boundary, passing `PlayArea` returns nothing.
/// </returns>
public static Vector3[] GetGeometry(BoundaryType boundaryType)
{
return PXR_Plugin.Boundary.UPxr_GetBoundaryGeometry(boundaryType);
}
/// <summary>
/// Gets the size of the play area for the custom boundary.
/// </summary>
/// <param name="boundaryType">You can only pass `PlayArea` (customize the maximum rectangle in the custom boundary). **Note**: There is no such rectangle for stationary boundary.</param>
/// <returns>The lengths of the X and Z axis of the maximum rectangle within the custom calibrated play area. The lengths are calculated by the algorithm. The length of the Y axis is always 1.
/// If the current user calibrates the stationary boundary, (0,1,0) will be returned.
/// </returns>
public static Vector3 GetDimensions(BoundaryType boundaryType)
{
return PXR_Plugin.Boundary.UPxr_GetBoundaryDimensions(boundaryType);
}
/// <summary>
/// Gets the camera image of the device and use it as the environmental background. Before calling this function, make sure you have set the clear flags of the camera to solid color and have set the background color of the camera to 0 for the alpha channel.
/// @note If the app is paused, this function will cease. Therefore, you need to call this function again after the app has been resumed.
/// </summary>
/// <param name="value">Whether to enable SeeThrough: `true`-enable; `false`-do not enable.</param>
public static void EnableSeeThroughManual(bool value)
{
PXR_Plugin.Boundary.UPxr_SetSeeThroughBackground(value);
}
/// <summary>
/// Gets the current status of seethrough tracking.
/// </summary>
/// <returns>Returns `PxrTrackingState`. Below are the enumerations:
/// * `LostNoReason`: no reason
/// * `LostCamera`: camera calibration data error
/// * `LostHighLight`: environment lighting too bright
/// * `LostLowLight`: environment lighting too dark
/// * `LostLowFeatureCount`: few environmental features
/// * `LostReLocation`: relocation in progress
/// * `LostInitialization`: initialization in progress
/// * `LostNoCamera`: camera data error
/// * `LostNoIMU`: IMU data error
/// * `LostIMUJitter`: IMU data jitter
/// * `LostUnknown`: unknown error
/// </returns>
public static PxrTrackingState GetSeeThroughTrackingState() {
return PXR_Plugin.Boundary.UPxr_GetSeeThroughTrackingState();
}
/// <summary>
/// disable or enable boundary
/// </summary>
/// <param name="value"></param>
public static void SetGuardianSystemDisable(bool value)
{
PXR_Plugin.Boundary.UPxr_SetGuardianSystemDisable(value);
}
/// <summary>
/// Uses the global pose.
/// </summary>
/// <param name="value">Specifies whether to use the global pose.
/// * `true`: use
/// * `false`: do not use
/// </param>
public static void UseGlobalPose(bool value)
{
PXR_Plugin.Boundary.UPxr_SetSeeThroughState(value);
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 617aa04623edd024a9298a3b21656d4c
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,426 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR;
namespace Unity.XR.PXR
{
public class PXR_EyeTracking
{
/// <summary>
/// Gets the PosMatrix of the head.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="matrix">A Matrix4x4 value returned by the result.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetHeadPosMatrix(out Matrix4x4 matrix)
{
matrix = Matrix4x4.identity;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
Vector3 headPos = Vector3.zero;
if (!device.TryGetFeatureValue(CommonUsages.devicePosition, out headPos))
{
Debug.LogError("PXRLog Failed at GetHeadPosMatrix Pos");
return false;
}
Quaternion headRot = Quaternion.identity;
if (!device.TryGetFeatureValue(CommonUsages.deviceRotation, out headRot))
{
Debug.LogError("PXRLog Failed at GetHeadPosMatrix Rot");
return false;
}
matrix = Matrix4x4.TRS(headPos, headRot, Vector3.one);
return true;
}
static InputDevice curDevice;
/// <summary>
/// Gets the input device for eye tracking data.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="device">The input device returned by the result.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
static bool GetEyeTrackingDevice(out InputDevice device)
{
if (curDevice!= null&& curDevice.isValid)
{
device = curDevice;
return true;
}
device = default;
if (!PXR_Manager.Instance.eyeTracking)
return false;
List<InputDevice> devices = new List<InputDevice>();
InputDevices.GetDevicesWithCharacteristics(InputDeviceCharacteristics.EyeTracking | InputDeviceCharacteristics.HeadMounted, devices);
if (devices.Count == 0)
{
Debug.LogError("PXRLog Failed at GetEyeTrackingDevice devices.Count");
return false;
}
device = devices[0];
curDevice = device;
if (!device.isValid)
{
Debug.LogError("PXRLog Failed at GetEyeTrackingDevice device.isValid");
}
return device.isValid;
}
/// <summary>
/// Gets the position of the center of the eyes in the Unity camera coordinate system (unit: meter).
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="point">Returns a vector3 value which is divided by 1000. To get the original value, multiply the returned value by 1000. Unit: millimeter.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetCombineEyeGazePoint(out Vector3 point)
{
point = Vector3.zero;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.combineEyePoint, out point))
{
Debug.Log("PXRLog Failed at GetCombineEyeGazePoint point");
return false;
}
return true;
}
/// <summary>
/// Gets the direction of binocular combined gaze in the Unity camera coordinate system.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="vector">Returns a vector3 value which is divided by 1000. To get the original value, multiply the returned value by 1000. Unit: millimeter.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetCombineEyeGazeVector(out Vector3 vector)
{
vector = Vector3.zero;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.combineEyeVector, out vector))
{
Debug.LogError("PXRLog Failed at GetCombineEyeGazeVector vector");
return false;
}
return true;
}
/// <summary>
/// Gets the openness/closeness of the left eye.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="openness">A float value returned by the result. The value ranges from `0.0` to `1.0`. `0.0` incicates completely closed, `1.0` indicates completely open.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetLeftEyeGazeOpenness(out float openness)
{
openness = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.leftEyeOpenness, out openness))
{
Debug.LogError("PXRLog Failed at GetLeftEyeGazeOpenness openness");
return false;
}
return true;
}
/// <summary>
/// Gets the openness/closeness of the right eye.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="openness">A float value returned by the result. The value ranges from `0.0` to `1.0`. `0.0` indicates completely closed, `1.0` indicates completely open.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetRightEyeGazeOpenness(out float openness)
{
openness = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.rightEyeOpenness, out openness))
{
Debug.LogError("PXRLog Failed at GetRightEyeGazeOpenness openness");
return false;
}
return true;
}
/// <summary>
/// Gets whether the data of the current left eye is available.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="status">An int value returned by the result. Below are the `EyePoseStatus` enumerations:
/// - GazePointValid = (1 << 0),
/// - GazeVectorValid = (1 << 1),
/// - EyeOpennessValid = (1 << 2),
/// - EyePupilDilationValid = (1 << 3),
/// - EyePositionGuideValid = (1 << 4),
/// - EyePupilPositionValid = (1 << 5),
/// - EyeConvergenceDistanceValid = (1 << 6),
/// - EyeGazePointValid = (1 << 7),
/// - EyeGazeVectorValid = (1 << 8),
/// - PupilDistanceValid = (1 << 9),
/// - ConvergenceDistanceValid = (1 << 10),
/// - PupilDiameterValid = (1 << 11),
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetLeftEyePoseStatus(out uint status)
{
status = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.leftEyePoseStatus, out status))
{
Debug.LogError("PXRLog Failed at GetLeftEyePoseStatus status");
return false;
}
return true;
}
/// <summary>
/// Gets whether the data of the current right eye is available.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="status">An int value returned by the result. Below are the `EyePoseStatus` enumerations:
/// - GazePointValid = (1 << 0),
/// - GazeVectorValid = (1 << 1),
/// - EyeOpennessValid = (1 << 2),
/// - EyePupilDilationValid = (1 << 3),
/// - EyePositionGuideValid = (1 << 4),
/// - EyePupilPositionValid = (1 << 5),
/// - EyeConvergenceDistanceValid = (1 << 6),
/// - EyeGazePointValid = (1 << 7),
/// - EyeGazeVectorValid = (1 << 8),
/// - PupilDistanceValid = (1 << 9),
/// - ConvergenceDistanceValid = (1 << 10),
/// - PupilDiameterValid = (1 << 11),
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetRightEyePoseStatus(out uint status)
{
status = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.rightEyePoseStatus, out status))
{
Debug.LogError("PXRLog Failed at GetRightEyePoseStatus status");
return false;
}
return true;
}
/// <summary>
/// Gets whether the data of the combined eye is available.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="status">An int value returned by the result:
/// `0`: not available
/// `1`: available
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetCombinedEyePoseStatus(out uint status)
{
status = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.combinedEyePoseStatus, out status))
{
Debug.LogError("PXRLog Failed at GetCombinedEyePoseStatus status");
return false;
}
return true;
}
/// <summary>
/// Gets the position of the left eye in a coordinate system. The upper-right point of the sensor is taken as the origin (0, 0) and the lower-left point is taken as (1, 1).
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="position">A vector3 value returned by the result.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetLeftEyePositionGuide(out Vector3 position)
{
position = Vector3.zero;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.leftEyePositionGuide, out position))
{
Debug.LogError("PXRLog Failed at GetLeftEyePositionGuide pos");
return false;
}
return true;
}
/// <summary>
/// Gets the position of the right eye in a coordinate system. The upper-right point of the sensor is taken as the origin (0, 0) and the lower-left point is taken as (1, 1).
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="position">A vector3 value returned by the result.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetRightEyePositionGuide(out Vector3 position)
{
position = Vector3.zero;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.rightEyePositionGuide, out position))
{
Debug.LogError("PXRLog Failed at GetRightEyePositionGuide pos");
return false;
}
return true;
}
/// <summary>
/// Gets the foveated gaze direction (i.e., the central point of fixed foveated rendering).
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="direction">A vector3 value returned by the result.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetFoveatedGazeDirection(out Vector3 direction)
{
direction = Vector3.zero;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.foveatedGazeDirection, out direction))
{
Debug.LogError("PXRLog Failed at GetFoveatedGazeDirection direction");
return false;
}
return true;
}
/// <summary>
/// Gets whether the current foveated gaze tracking data is available.
/// @note Only supported by PICO Neo3 Pro Eye, PICO 4 Pro, and PICO 4 Enterprise.
/// </summary>
/// <param name="status">An int value returned by the result:
/// * `0`: not available
/// * `1`: available
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetFoveatedGazeTrackingState(out uint state)
{
state = 0;
if (!PXR_Manager.Instance.eyeTracking)
return false;
if (!GetEyeTrackingDevice(out InputDevice device))
return false;
if (!device.TryGetFeatureValue(PXR_Usages.foveatedGazeTrackingState, out state))
{
Debug.LogError("PXRLog Failed at GetFoveatedGazeTrackingState state");
return false;
}
return true;
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 815321e0da90723458db60e729bdebde
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,91 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using UnityEngine;
namespace Unity.XR.PXR
{
public class PXR_FoveationRendering
{
private static PXR_FoveationRendering instance = null;
public static PXR_FoveationRendering Instance
{
get
{
if (instance == null)
{
instance = new PXR_FoveationRendering();
}
return instance;
}
}
/// <summary>
/// Sets a foveated rendering level.
/// </summary>
/// <param name="level">Select a foveated rendering level:
/// * `None`: disable foveated rendering
/// * `Low`
/// * `Med`
/// * `High`
/// * `TopHigh`
/// </param>
/// <param name="isETFR">
/// Describe if the foveated rendering mode is eye tracked foveated rendering (ETFR):
/// * `true`: ETFR
/// * `false`: not ETFR
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool SetFoveationLevel(FoveationLevel level, bool isETFR)
{
if (isETFR)
{
return PXR_Plugin.Render.UPxr_SetEyeFoveationLevel(level);
}
else
{
return PXR_Plugin.Render.UPxr_SetFoveationLevel(level);
}
}
/// <summary>
/// Gets the current foveated rendering level.
/// </summary>
/// <returns>The current foveated rendering level:
/// * `None` (`-1`): foveated rendering disabled
/// * `Low`
/// * `Med`
/// * `High`
/// * `TopHigh`
/// </returns>
public static FoveationLevel GetFoveationLevel()
{
return PXR_Plugin.Render.UPxr_GetFoveationLevel();
}
/// <summary>
/// Sets foveated rendering parameters.
/// </summary>
/// <param name="foveationGainX">Set the reduction rate of peripheral pixels in the X-axis direction. Value range: [1.0, 10.0], the greater the value, the higher the reduction rate.</param>
/// <param name="foveationGainY">Set the reduction rate of peripheral pixels in the Y-axis direction. Value range: [1.0, 10.0], the greater the value, the higher the reduction rate.</param>
/// <param name="foveationArea">Set the range of foveated area whose resolution is not to be reduced. Value range: [0.0, 4.0], the higher the value, the bigger the high-quality central area.</param>
/// <param name="foveationMinimum">Set the minimum pixel density. Recommended values: 1/32, 1/16, 1/8, 1/4, 1/2. The actual pixel density will be greater than or equal to the value set here.</param>
public static void SetFoveationParameters(float foveationGainX, float foveationGainY, float foveationArea, float foveationMinimum)
{
PXR_Plugin.Render.UPxr_SetFoveationParameters(foveationGainX, foveationGainY, foveationArea, foveationMinimum);
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 9d0598bf64df1a34e9a5ec19775188d5
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,423 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System.Runtime.InteropServices;
using UnityEngine;
#if PICO_LIVE_PREVIEW && UNITY_EDITOR
using Unity.XR.PICO.LivePreview;
#endif
namespace Unity.XR.PXR
{
/// <summary>
/// Hand types.
/// </summary>
public enum HandType
{
/// <summary>
/// Left hand.
/// </summary>
HandLeft = 0,
/// <summary>
/// Right hand.
/// </summary>
HandRight = 1,
}
/// <summary>
/// The current active input device.
/// </summary>
public enum ActiveInputDevice
{
/// <summary>
/// HMD
/// </summary>
HeadActive = 0,
/// <summary>
/// Controllers
/// </summary>
ControllerActive = 1,
/// <summary>
/// Hands
/// </summary>
HandTrackingActive = 2,
}
public struct Vector3f
{
public float x;
public float y;
public float z;
public Vector3 ToVector3()
{
return new Vector3() { x = x, y = y, z = -z };
}
}
public struct Quatf
{
public float x;
public float y;
public float z;
public float w;
public Quaternion ToQuat()
{
return new Quaternion() { x = x, y = y, z = -z, w = -w };
}
}
/// <summary>
/// The location of hand joint.
/// </summary>
public struct Posef
{
/// <summary>
/// The orientation of hand joint.
/// </summary>
public Quatf Orientation;
/// <summary>
/// The position of hand joint.
/// </summary>
public Vector3f Position;
public override string ToString()
{
return string.Format("Orientation :{0}, {1}, {2}, {3} Position: {4}, {5}, {6}",
Orientation.x, Orientation.y, Orientation.z, Orientation.w,
Position.x, Position.y, Position.z);
}
}
/// <summary>
/// The status of ray and fingers.
/// </summary>
public enum HandAimStatus : ulong
{
/// <summary>
/// Whether the data is valid.
/// </summary>
AimComputed = 0x00000001,
/// <summary>
/// Whether the ray appears.
/// </summary>
AimRayValid = 0x00000002,
/// <summary>
/// Whether the index finger pinches.
/// </summary>
AimIndexPinching = 0x00000004,
/// <summary>
/// Whether the middle finger pinches.
/// </summary>
AimMiddlePinching = 0x00000008,
/// <summary>
/// Whether the ring finger pinches.
/// </summary>
AimRingPinching = 0x00000010,
/// <summary>
/// Whether the little finger pinches.
/// </summary>
AimLittlePinching = 0x00000020,
/// <summary>
/// Whether the ray touches.
/// </summary>
AimRayTouched = 0x00000200
}
/// <summary>
/// The data about the poses of ray and fingers.
/// </summary>
public struct HandAimState
{
/// <summary>
/// The status of hand tracking. If it is not `tracked`, confidence will be `0`.
/// </summary>
public HandAimStatus aimStatus;
/// <summary>
/// The pose of the ray.
/// </summary>
public Posef aimRayPose;
/// <summary>
/// The strength of index finger's pinch.
/// </summary>
private float pinchStrengthIndex;
/// <summary>
/// The strength of middle finger's pinch.
/// </summary>
private float pinchStrengthMiddle;
/// <summary>
/// The strength of ring finger's pinch.
/// </summary>
private float pinchStrengthRing;
/// <summary>
/// The strength of little finger's pinch.
/// </summary>
private float pinchStrengthLittle;
/// <summary>
/// The strength of ray's touch.
/// </summary>
public float touchStrengthRay;
}
/// <summary>
/// The data about the status of hand joint location.
/// </summary>
public enum HandLocationStatus : ulong
{
/// <summary>
/// Whether the joint's orientation is valid.
/// </summary>
OrientationValid = 0x00000001,
/// <summary>
/// Whether the joint's position is valid.
/// </summary>
PositionValid = 0x00000002,
/// <summary>
/// Whether the joint's orientation is being tracked.
/// </summary>
OrientationTracked = 0x00000004,
/// <summary>
/// Whether the joint's position is being tracked.
/// </summary>
PositionTracked = 0x00000008
}
public enum HandJoint
{
JointPalm = 0,
JointWrist = 1,
JointThumbMetacarpal = 2,
JointThumbProximal = 3,
JointThumbDistal = 4,
JointThumbTip = 5,
JointIndexMetacarpal = 6,
JointIndexProximal = 7,
JointIndexIntermediate = 8,
JointIndexDistal = 9,
JointIndexTip = 10,
JointMiddleMetacarpal = 11,
JointMiddleProximal = 12,
JointMiddleIntermediate = 13,
JointMiddleDistal = 14,
JointMiddleTip = 15,
JointRingMetacarpal = 16,
JointRingProximal = 17,
JointRingIntermediate = 18,
JointRingDistal = 19,
JointRingTip = 20,
JointLittleMetacarpal = 21,
JointLittleProximal = 22,
JointLittleIntermediate = 23,
JointLittleDistal = 24,
JointLittleTip = 25,
JointMax = 26
}
/// <summary>
/// The data about the location of hand joint.
/// </summary>
public struct HandJointLocation
{
/// <summary>
/// The status of hand joint location.
/// </summary>
public HandLocationStatus locationStatus;
/// <summary>
/// The orientation and position of hand joint.
/// </summary>
public Posef pose;
/// <summary>
/// The radius of hand joint.
/// </summary>
public float radius;
}
/// <summary>
/// The data about hand tracking.
/// </summary>
public struct HandJointLocations
{
/// <summary>
/// The quality level of hand tracking:
/// `0`: low
/// `1`: high
/// </summary>
public uint isActive;
/// <summary>
/// The number of hand joints that the SDK supports. Currenty returns `26`.
/// </summary>
public uint jointCount;
/// <summary>
/// The scale of the hand.
/// </summary>
public float handScale;
/// <summary>
/// The locations (orientation and position) of hand joints.
/// </summary>
[MarshalAs(UnmanagedType.ByValArray, SizeConst = (int)HandJoint.JointMax)]
public HandJointLocation[] jointLocations;
}
public enum HandFinger
{
Thumb = 0,
Index = 1,
Middle = 2,
Ring = 3,
Pinky = 4
}
public static class PXR_HandTracking
{
/// <summary>Gets whether hand tracking is enabled or disabled.</summary>
/// <returns>
/// * `true`: enabled
/// * `false`: disabled
/// </returns>
public static bool GetSettingState()
{
#if PICO_LIVE_PREVIEW && UNITY_EDITOR
return PXR_PTApi.UPxr_GetSettingState();
#endif
return PXR_Plugin.HandTracking.UPxr_GetHandTrackerSettingState();
}
/// <summary>Gets the current active input device.</summary>
/// <returns>The current active input device:
/// * `HeadActive`: HMD
/// * `ControllerActive`: controllers
/// * `HandTrackingActive`: hands
/// </returns>
public static ActiveInputDevice GetActiveInputDevice()
{
#if PICO_LIVE_PREVIEW && UNITY_EDITOR
return PXR_PTApi.UPxr_GetGetHandTrackerActiveState() ? ActiveInputDevice.HandTrackingActive : ActiveInputDevice.ControllerActive;
#endif
return PXR_Plugin.HandTracking.UPxr_GetHandTrackerActiveInputType();
}
/// <summary>Gets the data about the pose of a specified hand, including the status of the ray and fingers, the strength of finger pinch and ray touch.</summary>
/// <param name="hand">The hand to get data for:
/// * `HandLeft`: left hand
/// * `HandRight`: right hand
/// </param>
/// <param name="aimState">`HandAimState` contains the data about the poses of ray and fingers.
/// If you use PICO hand prefabs without changing any of their default settings, you will get the following data:
/// ```csharp
/// public class PXR_Hand
/// {
/// // Whether the data is valid.
/// public bool Computed { get; private set; }
/// // The ray pose.
/// public Posef RayPose { get; private set; }
/// // Whether the ray was displayed.
/// public bool RayValid { get; private set; }
/// // Whether the ray pinched.
/// public bool Pinch { get; private set; }
/// // The strength of ray pinch.
/// public float PinchStrength { get; private set; }
/// ```
/// </param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetAimState(HandType hand, ref HandAimState aimState)
{
if (!PXR_ProjectSetting.GetProjectConfig().handTracking)
return false;
#if PICO_LIVE_PREVIEW && UNITY_EDITOR
PICO.LivePreview.HandAimState lPHandAimState = new PICO.LivePreview.HandAimState();
PXR_PTApi.UPxr_GetHandTrackerAimState((int)hand, ref lPHandAimState);
aimState.aimStatus = (HandAimStatus)lPHandAimState.aimStatus;
aimState.touchStrengthRay = lPHandAimState.touchStrengthRay;
aimState.aimRayPose.Position.x = lPHandAimState.aimRayPose.Position.x;
aimState.aimRayPose.Position.y = lPHandAimState.aimRayPose.Position.y;
aimState.aimRayPose.Position.z = lPHandAimState.aimRayPose.Position.z;
aimState.aimRayPose.Orientation.x = lPHandAimState.aimRayPose.Orientation.x;
aimState.aimRayPose.Orientation.y = lPHandAimState.aimRayPose.Orientation.y;
aimState.aimRayPose.Orientation.z = lPHandAimState.aimRayPose.Orientation.z;
aimState.aimRayPose.Orientation.w = lPHandAimState.aimRayPose.Orientation.w;
return true;
#endif
return PXR_Plugin.HandTracking.UPxr_GetHandTrackerAimState(hand, ref aimState);
}
/// <summary>Gets the locations of joints for a specified hand.</summary>
/// <param name="hand">The hand to get joint locations for:
/// * `HandLeft`: left hand
/// * `HandRight`: right hand
/// </param>
/// <param name="jointLocations">Contains data about the locations of the joints in the specified hand.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetJointLocations(HandType hand, ref HandJointLocations jointLocations)
{
if (!PXR_ProjectSetting.GetProjectConfig().handTracking)
return false;
#if PICO_LIVE_PREVIEW && UNITY_EDITOR
PICO.LivePreview.HandJointLocations lPHandJointLocations = new PICO.LivePreview.HandJointLocations();
PXR_PTApi.UPxr_GetHandTrackerJointLocations((int)hand,ref lPHandJointLocations);
jointLocations.handScale = lPHandJointLocations.handScale;
jointLocations.isActive = lPHandJointLocations.isActive;
jointLocations.jointCount = lPHandJointLocations.jointCount;
jointLocations.jointLocations = new HandJointLocation[lPHandJointLocations.jointCount];
for (int i = 0; i < lPHandJointLocations.jointCount; i++)
{
jointLocations.jointLocations[i].locationStatus = (HandLocationStatus)lPHandJointLocations.jointLocations[i].locationStatus;
jointLocations.jointLocations[i].radius = lPHandJointLocations.jointLocations[i].radius;
jointLocations.jointLocations[i].pose.Position.x = lPHandJointLocations.jointLocations[i].pose.Position.x;
jointLocations.jointLocations[i].pose.Position.y = lPHandJointLocations.jointLocations[i].pose.Position.y;
jointLocations.jointLocations[i].pose.Position.z = lPHandJointLocations.jointLocations[i].pose.Position.z;
jointLocations.jointLocations[i].pose.Orientation.x = lPHandJointLocations.jointLocations[i].pose.Orientation.x;
jointLocations.jointLocations[i].pose.Orientation.y = lPHandJointLocations.jointLocations[i].pose.Orientation.y;
jointLocations.jointLocations[i].pose.Orientation.z = lPHandJointLocations.jointLocations[i].pose.Orientation.z;
jointLocations.jointLocations[i].pose.Orientation.w = lPHandJointLocations.jointLocations[i].pose.Orientation.w;
}
return true;
#endif
return PXR_Plugin.HandTracking.UPxr_GetHandTrackerJointLocations(hand, ref jointLocations);
}
/// <summary>
/// Gets the scaling ratio of the hand model.
/// </summary>
/// <param name="hand">Specifies the hand to get scaling ratio for:
/// * `HandLeft`: left hand
/// * `HandRight`: right hand
/// </param>
/// <param name="scale">Returns the scaling ratio for the specified hand.</param>
/// <returns>
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool GetHandScale(HandType hand,ref float scale)
{
return PXR_Plugin.HandTracking.UPxr_GetHandScale((int)hand, ref scale);
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: a6243041ddd1c214c84a8c5abef6c24a
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: ce57129cda8427d4eaacb7990f5f40de
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,86 +0,0 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR;
namespace Unity.XR.PXR
{
[Serializable]
public class PXR_LateLatching : MonoBehaviour
{
#if UNITY_2020_3_OR_NEWER
private Camera m_LateLatchingCamera;
static XRDisplaySubsystem s_DisplaySubsystem = null;
static List<XRDisplaySubsystem> s_DisplaySubsystems = new List<XRDisplaySubsystem>();
private void Awake()
{
m_LateLatchingCamera = GetComponent<Camera>();
}
private void OnEnable()
{
List<XRDisplaySubsystem> displaySubsystems = new List<XRDisplaySubsystem>();
SubsystemManager.GetInstances(displaySubsystems);
Debug.Log("PXR_U OnEnable() displaySubsystems.Count = " + displaySubsystems.Count);
for (int i = 0; i < displaySubsystems.Count; i++)
{
s_DisplaySubsystem = displaySubsystems[i];
}
}
private void OnDisable()
{
}
void Update()
{
if (s_DisplaySubsystem == null)
{
List<XRDisplaySubsystem> displaySubsystems = new List<XRDisplaySubsystem>();
SubsystemManager.GetInstances(displaySubsystems);
if (displaySubsystems.Count > 0)
{
s_DisplaySubsystem = displaySubsystems[0];
}
}
if (null == s_DisplaySubsystem)
return;
s_DisplaySubsystem.MarkTransformLateLatched(m_LateLatchingCamera.transform, XRDisplaySubsystem.LateLatchNode.Head);
}
#if !UNITY_EDITOR
private void OnPreRender()
{
s_DisplaySubsystem.BeginRecordingIfLateLatched(m_LateLatchingCamera);
}
private void OnPostRender()
{
s_DisplaySubsystem.EndRecordingIfLateLatched(m_LateLatchingCamera);
}
#endif
private void FixedUpdate()
{
}
private void LateUpdate()
{
}
#endif
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: b0d77cf58f760874892e934648a878f2
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 2d6132037afd3fe4abfa4282efd18bd4
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 575693a8c0449a04f82b773bf343dcef
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,842 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using UnityEditor;
using UnityEngine;
using UnityEngine.Experimental.Rendering;
using UnityEngine.Rendering;
using UnityEngine.XR;
namespace Unity.XR.PXR
{
public class PXR_OverLay : MonoBehaviour, IComparable<PXR_OverLay>
{
private const string TAG = "[PXR_CompositeLayers]";
public static List<PXR_OverLay> Instances = new List<PXR_OverLay>();
private static int overlayID = 0;
[NonSerialized]
public int overlayIndex;
public int layerDepth;
public int imageIndex = 0;
public OverlayType overlayType = OverlayType.Overlay;
public OverlayShape overlayShape = OverlayShape.Quad;
public TextureType textureType = TextureType.ExternalSurface;
public Transform overlayTransform;
public Camera xrRig;
public Texture[] layerTextures = new Texture[2] { null, null };
public bool isPremultipliedAlpha = false;
public bool isDynamic = false;
public int[] overlayTextureIds = new int[2];
public Matrix4x4[] mvMatrixs = new Matrix4x4[2];
public Vector3[] modelScales = new Vector3[2];
public Quaternion[] modelRotations = new Quaternion[2];
public Vector3[] modelTranslations = new Vector3[2];
public Quaternion[] cameraRotations = new Quaternion[2];
public Vector3[] cameraTranslations = new Vector3[2];
public Camera[] overlayEyeCamera = new Camera[2];
public bool overrideColorScaleAndOffset = false;
public Vector4 colorScale = Vector4.one;
public Vector4 colorOffset = Vector4.zero;
// Eac
public Vector3 offsetPosLeft = Vector3.zero;
public Vector3 offsetPosRight = Vector3.zero;
public Vector4 offsetRotLeft = new Vector4(0, 0, 0, 1);
public Vector4 offsetRotRight = new Vector4(0, 0, 0, 1);
public EACModelType eacModelType = EACModelType.Eac360;
public float overlapFactor = 1.0f;
public ulong timestamp = 0;
private Vector4 overlayLayerColorScaleDefault = Vector4.one;
private Vector4 overlayLayerColorOffsetDefault = Vector4.zero;
public bool isExternalAndroidSurface = false;
public bool isExternalAndroidSurfaceDRM = false;
public Surface3DType externalAndroidSurface3DType = Surface3DType.Single;
#region Blurred Quad
public BlurredQuadMode blurredQuadMode = BlurredQuadMode.SmallWindow;
public float blurredQuadScale = 0.5f;
public float blurredQuadShift = 0.01f;
public float blurredQuadFOV = 70.0f;
public float blurredQuadIPD = 0.064f;
#endregion
public IntPtr externalAndroidSurfaceObject = IntPtr.Zero;
public delegate void ExternalAndroidSurfaceObjectCreated();
public ExternalAndroidSurfaceObjectCreated externalAndroidSurfaceObjectCreated = null;
// 360
public float radius = 0; // >0
// ImageRect
public bool useImageRect = false;
public TextureRect textureRect = TextureRect.StereoScopic;
public DestinationRect destinationRect = DestinationRect.Default;
public Rect srcRectLeft = new Rect(0, 0, 1, 1);
public Rect srcRectRight = new Rect(0, 0, 1, 1);
public Rect dstRectLeft = new Rect(0, 0, 1, 1);
public Rect dstRectRight = new Rect(0, 0, 1, 1);
public PxrRecti imageRectLeft;
public PxrRecti imageRectRight;
// LayerBlend
public bool useLayerBlend = false;
public PxrBlendFactor srcColor = PxrBlendFactor.PxrBlendFactorOne;
public PxrBlendFactor dstColor = PxrBlendFactor.PxrBlendFactorOne;
public PxrBlendFactor srcAlpha = PxrBlendFactor.PxrBlendFactorOne;
public PxrBlendFactor dstAlpha = PxrBlendFactor.PxrBlendFactorOne;
public float[] colorMatrix = new float[18] {
1,0,0, // left
0,1,0,
0,0,1,
1,0,0, // right
0,1,0,
0,0,1,
};
public bool isClones = false;
public bool isClonesToNew = false;
public bool enableSubmitLayer = true;
public PXR_OverLay originalOverLay;
public IntPtr layerSubmitPtr = IntPtr.Zero;
public APIExecutionStatus Quad2Status = APIExecutionStatus.None;
public APIExecutionStatus Cylinder2Status = APIExecutionStatus.None;
public APIExecutionStatus Equirect2Status = APIExecutionStatus.None;
private bool toCreateSwapChain = false;
private bool toCopyRT = false;
private bool copiedRT = false;
private int eyeCount = 2;
private UInt32 imageCounts = 0;
private PxrLayerParam overlayParam = new PxrLayerParam();
private struct NativeTexture
{
public Texture[] textures;
};
private NativeTexture[] nativeTextures;
private static Material cubeM;
private IntPtr leftPtr = IntPtr.Zero;
private IntPtr rightPtr = IntPtr.Zero;
private static Material textureM;
public HDRFlags hdr = HDRFlags.None;
public int CompareTo(PXR_OverLay other)
{
return layerDepth.CompareTo(other.layerDepth);
}
protected void Awake()
{
xrRig = Camera.main;
Instances.Add(this);
if (null == xrRig.gameObject.GetComponent<PXR_OverlayManager>())
{
xrRig.gameObject.AddComponent<PXR_OverlayManager>();
}
overlayEyeCamera[0] = xrRig;
overlayEyeCamera[1] = xrRig;
overlayTransform = GetComponent<Transform>();
#if UNITY_ANDROID && !UNITY_EDITOR
if (overlayTransform != null)
{
MeshRenderer render = overlayTransform.GetComponent<MeshRenderer>();
if (render != null)
{
render.enabled = false;
}
}
#endif
if (!isClones)
{
InitializeBuffer();
}
}
private void Start()
{
if (isClones)
{
InitializeBuffer();
}
if (PXR_Manager.Instance == null)
{
return;
}
Camera[] cam = PXR_Manager.Instance.GetEyeCamera();
if (cam[0] != null && cam[0].enabled)
{
RefreshCamera(cam[0], cam[0]);
}
else if (cam[1] != null && cam[2] != null)
{
RefreshCamera(cam[1], cam[2]);
}
}
public void RefreshCamera(Camera leftCamera, Camera rightCamera)
{
overlayEyeCamera[0] = leftCamera;
overlayEyeCamera[1] = rightCamera;
}
private void InitializeBuffer()
{
if (!isExternalAndroidSurface && !isClones)
{
if (null == layerTextures[0] && null == layerTextures[1])
{
PLog.e(TAG, " The left and right images are all empty!");
return;
}
else if (null == layerTextures[0] && null != layerTextures[1])
{
layerTextures[0] = layerTextures[1];
}
else if (null != layerTextures[0] && null == layerTextures[1])
{
layerTextures[1] = layerTextures[0];
}
overlayParam.width = (uint)layerTextures[1].width;
overlayParam.height = (uint)layerTextures[1].height;
}
else
{
overlayParam.width = 1024;
overlayParam.height = 1024;
}
overlayID++;
overlayIndex = overlayID;
overlayParam.layerId = overlayIndex;
overlayParam.layerShape = overlayShape == 0 ? OverlayShape.Quad : overlayShape;
overlayParam.layerType = overlayType;
overlayParam.arraySize = 1;
overlayParam.mipmapCount = 1;
overlayParam.sampleCount = 1;
overlayParam.layerFlags = 0;
if (GraphicsDeviceType.Vulkan == SystemInfo.graphicsDeviceType)
{
overlayParam.format = QualitySettings.activeColorSpace == ColorSpace.Linear ? (UInt64)ColorForamt.VK_FORMAT_R8G8B8A8_SRGB : (UInt64)RenderTextureFormat.Default;
}
else
{
overlayParam.format = QualitySettings.activeColorSpace == ColorSpace.Linear ? (UInt64)ColorForamt.GL_SRGB8_ALPHA8 : (UInt64)RenderTextureFormat.Default;
}
if (OverlayShape.Cubemap == overlayShape)
{
overlayParam.faceCount = 6;
if (cubeM == null)
cubeM = new Material(Shader.Find("PXR_SDK/PXR_CubemapBlit"));
}
else
{
overlayParam.faceCount = 1;
if (textureM == null)
textureM = new Material(Shader.Find("PXR_SDK/PXR_Texture2DBlit"));
}
if (isClones)
{
if (null != originalOverLay)
{
overlayParam.layerFlags |= (UInt32)PxrLayerCreateFlags.PxrLayerFlagSharedImagesBetweenLayers;
leftPtr = Marshal.AllocHGlobal(Marshal.SizeOf(originalOverLay.overlayIndex));
rightPtr = Marshal.AllocHGlobal(Marshal.SizeOf(originalOverLay.overlayIndex));
Marshal.WriteInt64(leftPtr, originalOverLay.overlayIndex);
Marshal.WriteInt64(rightPtr, originalOverLay.overlayIndex);
overlayParam.leftExternalImages = leftPtr;
overlayParam.rightExternalImages = rightPtr;
isExternalAndroidSurface = originalOverLay.isExternalAndroidSurface;
isDynamic = originalOverLay.isDynamic;
overlayParam.width = (UInt32)Mathf.Min(overlayParam.width, originalOverLay.overlayParam.width);
overlayParam.height = (UInt32)Mathf.Min(overlayParam.height, originalOverLay.overlayParam.height);
}
else
{
PLog.e(TAG, "In clone state, originalOverLay cannot be empty!");
}
}
if (isExternalAndroidSurface)
{
if (isExternalAndroidSurfaceDRM)
{
overlayParam.layerFlags |= (UInt32)(PxrLayerCreateFlags.PxrLayerFlagAndroidSurface | PxrLayerCreateFlags.PxrLayerFlagProtectedContent);
}
else
{
overlayParam.layerFlags |= (UInt32)PxrLayerCreateFlags.PxrLayerFlagAndroidSurface;
}
if (Surface3DType.LeftRight == externalAndroidSurface3DType)
{
overlayParam.layerFlags |= (UInt32)PxrLayerCreateFlags.PxrLayerFlag3DLeftRightSurface;
}
else if (Surface3DType.TopBottom == externalAndroidSurface3DType)
{
overlayParam.layerFlags |= (UInt32)PxrLayerCreateFlags.PxrLayerFlag3DTopBottomSurface;
}
overlayParam.layerLayout = LayerLayout.Mono;
PLog.i(TAG, $"UPxr_CreateLayer() overlayParam.layerId={overlayParam.layerId}, layerShape={overlayParam.layerShape}, layerType={overlayParam.layerType}, width={overlayParam.width}, height={overlayParam.height}, layerFlags={overlayParam.layerFlags}, format={overlayParam.format}, layerLayout={overlayParam.layerLayout}.");
IntPtr layerParamPtr = Marshal.AllocHGlobal(Marshal.SizeOf(overlayParam));
Marshal.StructureToPtr(overlayParam, layerParamPtr, false);
PXR_Plugin.Render.UPxr_CreateLayer(layerParamPtr);
Marshal.FreeHGlobal(layerParamPtr);
}
else
{
if (!isDynamic)
{
overlayParam.layerFlags |= (UInt32)PxrLayerCreateFlags.PxrLayerFlagStaticImage;
}
if ((layerTextures[0] != null && layerTextures[1] != null && layerTextures[0] == layerTextures[1]) || null == layerTextures[1])
{
eyeCount = 1;
overlayParam.layerLayout = LayerLayout.Mono;
}
else
{
eyeCount = 2;
overlayParam.layerLayout = LayerLayout.Stereo;
}
PXR_Plugin.Render.UPxr_CreateLayerParam(overlayParam);
toCreateSwapChain = true;
CreateTexture();
}
}
public void CreateExternalSurface(PXR_OverLay overlayInstance)
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (IntPtr.Zero != overlayInstance.externalAndroidSurfaceObject)
{
return;
}
PXR_Plugin.Render.UPxr_GetLayerAndroidSurface(overlayInstance.overlayIndex, 0, ref overlayInstance.externalAndroidSurfaceObject);
PLog.i(TAG, string.Format("CreateExternalSurface: Overlay Type:{0}, LayerDepth:{1}, SurfaceObject:{2}", overlayInstance.overlayType, overlayInstance.overlayIndex, overlayInstance.externalAndroidSurfaceObject));
if (IntPtr.Zero == overlayInstance.externalAndroidSurfaceObject || null == overlayInstance.externalAndroidSurfaceObjectCreated)
{
return;
}
overlayInstance.externalAndroidSurfaceObjectCreated();
#endif
}
public void UpdateCoords()
{
if (null == overlayTransform || !overlayTransform.gameObject.activeSelf || null == overlayEyeCamera[0] || null == overlayEyeCamera[1])
{
return;
}
for (int i = 0; i < mvMatrixs.Length; i++)
{
mvMatrixs[i] = overlayEyeCamera[i].worldToCameraMatrix * overlayTransform.localToWorldMatrix;
if (overlayTransform is RectTransform uiTransform)
{
var rect = uiTransform.rect;
var lossyScale = overlayTransform.lossyScale;
modelScales[i] = new Vector3(rect.width * lossyScale.x,
rect.height * lossyScale.y, 1);
modelTranslations[i] = uiTransform.TransformPoint(rect.center);
}
else
{
modelScales[i] = overlayTransform.lossyScale;
modelTranslations[i] = overlayTransform.position;
}
modelRotations[i] = overlayTransform.rotation;
cameraRotations[i] = overlayEyeCamera[i].transform.rotation;
cameraTranslations[i] = overlayEyeCamera[i].transform.position;
}
}
public bool CreateTexture()
{
if (!toCreateSwapChain)
{
return false;
}
if (null == nativeTextures)
nativeTextures = new NativeTexture[eyeCount];
for (int i = 0; i < eyeCount; i++)
{
int ret = PXR_Plugin.Render.UPxr_GetLayerImageCount(overlayIndex, (EyeType)i, ref imageCounts);
if (ret != 0 || imageCounts < 1)
{
return false;
}
if (null == nativeTextures[i].textures)
{
nativeTextures[i].textures = new Texture[imageCounts];
}
for (int j = 0; j < imageCounts; j++)
{
IntPtr ptr = IntPtr.Zero;
PXR_Plugin.Render.UPxr_GetLayerImagePtr(overlayIndex, (EyeType)i, j, ref ptr);
if (IntPtr.Zero == ptr)
{
return false;
}
Texture texture;
if (OverlayShape.Cubemap == overlayShape)
{
texture = Cubemap.CreateExternalTexture((int)overlayParam.width, TextureFormat.RGBA32, false, ptr);
}
else
{
texture = Texture2D.CreateExternalTexture((int)overlayParam.width, (int)overlayParam.height, TextureFormat.RGBA32, false, true, ptr);
}
if (null == texture)
{
return false;
}
nativeTextures[i].textures[j] = texture;
}
}
toCreateSwapChain = false;
toCopyRT = true;
copiedRT = false;
FreePtr();
return true;
}
public bool CopyRT()
{
if (isClones)
{
return true;
}
if (!toCopyRT)
{
return copiedRT;
}
if (!isDynamic && copiedRT)
{
return copiedRT;
}
if (null == nativeTextures)
{
return false;
}
if (GraphicsDeviceType.Vulkan != SystemInfo.graphicsDeviceType)
{
if (enableSubmitLayer)
{
PXR_Plugin.Render.UPxr_GetLayerNextImageIndexByRender(overlayIndex, ref imageIndex);
}
}
for (int i = 0; i < eyeCount; i++)
{
Texture nativeTexture = nativeTextures[i].textures[imageIndex];
if (null == nativeTexture || null == layerTextures[i])
continue;
RenderTexture texture = layerTextures[i] as RenderTexture;
if (OverlayShape.Cubemap == overlayShape && null == layerTextures[i] as Cubemap)
{
return false;
}
for (int f = 0; f < (int)overlayParam.faceCount; f++)
{
if (QualitySettings.activeColorSpace == ColorSpace.Gamma && texture != null && texture.format == RenderTextureFormat.ARGB32)
{
Graphics.CopyTexture(layerTextures[i], f, 0, nativeTexture, f, 0);
}
else
{
RenderTextureDescriptor rtDes = new RenderTextureDescriptor((int)overlayParam.width, (int)overlayParam.height, RenderTextureFormat.ARGB32, 0);
rtDes.msaaSamples = (int)overlayParam.sampleCount;
rtDes.useMipMap = true;
rtDes.autoGenerateMips = false;
rtDes.sRGB = true;
RenderTexture renderTexture = RenderTexture.GetTemporary(rtDes);
if (!renderTexture.IsCreated())
{
renderTexture.Create();
}
renderTexture.DiscardContents();
if (OverlayShape.Cubemap == overlayShape)
{
cubeM.SetInt("_d", f);
Graphics.Blit(layerTextures[i], renderTexture, cubeM);
}
else
{
textureM.mainTexture = texture;
textureM.SetPass(0);
textureM.SetInt("_premultiply", isPremultipliedAlpha ? 1 : 0);
Graphics.Blit(layerTextures[i], renderTexture, textureM);
}
Graphics.CopyTexture(renderTexture, 0, 0, nativeTexture, f, 0);
RenderTexture.ReleaseTemporary(renderTexture);
}
}
copiedRT = true;
}
return copiedRT;
}
public void SetTexture(Texture texture, bool dynamic)
{
if (isExternalAndroidSurface)
{
PLog.w(TAG, "Not support setTexture !");
return;
}
if (isClones)
{
return;
}
else
{
foreach (PXR_OverLay overlay in PXR_OverLay.Instances)
{
if (overlay.isClones && null != overlay.originalOverLay && overlay.originalOverLay.overlayIndex == overlayIndex)
{
overlay.DestroyLayer();
overlay.isClonesToNew = true;
}
}
}
toCopyRT = false;
PXR_Plugin.Render.UPxr_DestroyLayerByRender(overlayIndex);
ClearTexture();
for (int i = 0; i < layerTextures.Length; i++)
{
layerTextures[i] = texture;
}
isDynamic = dynamic;
InitializeBuffer();
if (!isClones)
{
foreach (PXR_OverLay overlay in PXR_OverLay.Instances)
{
if (overlay.isClones && overlay.isClonesToNew)
{
overlay.originalOverLay = this;
overlay.InitializeBuffer();
overlay.isClonesToNew = false;
}
}
}
}
private void FreePtr()
{
if (leftPtr != IntPtr.Zero)
{
Marshal.FreeHGlobal(leftPtr);
leftPtr = IntPtr.Zero;
}
if (rightPtr != IntPtr.Zero)
{
Marshal.FreeHGlobal(rightPtr);
rightPtr = IntPtr.Zero;
}
if (layerSubmitPtr != IntPtr.Zero)
{
Marshal.FreeHGlobal(layerSubmitPtr);
layerSubmitPtr = IntPtr.Zero;
}
}
public void OnDestroy()
{
DestroyLayer();
Instances.Remove(this);
}
public void DestroyLayer()
{
if (isExternalAndroidSurface)
{
PXR_Plugin.Render.UPxr_DestroyLayer(overlayIndex);
externalAndroidSurfaceObject = IntPtr.Zero;
ClearTexture();
return;
}
if (!isClones)
{
List<PXR_OverLay> toDestroyClones = new List<PXR_OverLay>();
foreach (PXR_OverLay overlay in Instances)
{
if (overlay.isClones && null != overlay.originalOverLay && overlay.originalOverLay.overlayIndex == overlayIndex)
{
toDestroyClones.Add(overlay);
}
}
foreach (PXR_OverLay overLay in toDestroyClones)
{
PXR_Plugin.Render.UPxr_DestroyLayerByRender(overLay.overlayIndex);
ClearTexture();
}
PXR_Plugin.Render.UPxr_DestroyLayerByRender(overlayIndex);
}
else
{
if (null != originalOverLay && Instances.Contains(originalOverLay))
{
PXR_Plugin.Render.UPxr_DestroyLayerByRender(overlayIndex);
}
}
ClearTexture();
}
private void ClearTexture()
{
FreePtr();
if (isExternalAndroidSurface || null == nativeTextures || isClones)
{
return;
}
for (int i = 0; i < eyeCount; i++)
{
if (null == nativeTextures[i].textures)
{
continue;
}
for (int j = 0; j < imageCounts; j++)
DestroyImmediate(nativeTextures[i].textures[j]);
}
nativeTextures = null;
}
public void SetLayerColorScaleAndOffset(Vector4 scale, Vector4 offset)
{
colorScale = scale;
colorOffset = offset;
}
public void SetEACOffsetPosAndRot(Vector3 leftPos, Vector3 rightPos, Vector4 leftRot, Vector4 rightRot)
{
offsetPosLeft = leftPos;
offsetPosRight = rightPos;
offsetRotLeft = leftRot;
offsetRotRight = rightRot;
}
public void SetEACFactor(float factor)
{
overlapFactor = factor;
}
public Vector4 GetLayerColorScale()
{
if (!overrideColorScaleAndOffset)
{
return overlayLayerColorScaleDefault;
}
return colorScale;
}
public Vector4 GetLayerColorOffset()
{
if (!overrideColorScaleAndOffset)
{
return overlayLayerColorOffsetDefault;
}
return colorOffset;
}
public PxrRecti getPxrRectiLeft(bool left)
{
if (left)
{
imageRectLeft.x = (int)(overlayParam.width * srcRectLeft.x);
imageRectLeft.y = (int)(overlayParam.height * srcRectLeft.y);
imageRectLeft.width = (int)(overlayParam.width * Mathf.Min(srcRectLeft.width, 1 - srcRectLeft.x));
imageRectLeft.height = (int)(overlayParam.height * Mathf.Min(srcRectLeft.height, 1 - srcRectLeft.y));
return imageRectLeft;
}
else
{
imageRectRight.x = (int)(overlayParam.width * srcRectRight.x);
imageRectRight.y = (int)(overlayParam.height * srcRectRight.y);
imageRectRight.width = (int)(overlayParam.width * Mathf.Min(srcRectRight.width, 1 - srcRectRight.x));
imageRectRight.height = (int)(overlayParam.height * Mathf.Min(srcRectRight.height, 1 - srcRectRight.y));
return imageRectRight;
}
}
public UInt32 getHDRFlags()
{
UInt32 hdrFlags = 0;
if (!isExternalAndroidSurface)
{
return hdrFlags;
}
switch (hdr)
{
case HDRFlags.HdrPQ:
hdrFlags |= (UInt32)PxrLayerSubmitFlags.PxrLayerFlagColorSpaceHdrPQ;
break;
case HDRFlags.HdrHLG:
hdrFlags |= (UInt32)PxrLayerSubmitFlags.PxrLayerFlagColorSpaceHdrHLG;
break;
default:
break;
}
return hdrFlags;
}
public enum HDRFlags
{
None,
HdrPQ,
HdrHLG,
}
public enum OverlayShape
{
Quad = 1,
Cylinder = 2,
Equirect = 3,
Cubemap = 5,
Eac = 6,
Fisheye = 7,
BlurredQuad = 9
}
public enum OverlayType
{
Overlay = 0,
Underlay = 1
}
public enum TextureType
{
ExternalSurface,
DynamicTexture,
StaticTexture
}
public enum LayerLayout
{
Stereo = 0,
DoubleWide = 1,
Array = 2,
Mono = 3
}
public enum Surface3DType
{
Single = 0,
LeftRight,
TopBottom
}
public enum TextureRect
{
MonoScopic,
StereoScopic,
Custom
}
public enum DestinationRect
{
Default,
Custom
}
public enum EACModelType
{
Eac360 = 0,
Eac360ViewPort = 1,
Eac180 = 4,
Eac180ViewPort = 5,
}
public enum ColorForamt
{
VK_FORMAT_R8G8B8A8_UNORM = 37,
VK_FORMAT_R8G8B8A8_SRGB = 43,
GL_SRGB8_ALPHA8 = 0x8c43,
GL_RGBA8 = 0x8058
}
public enum APIExecutionStatus
{
None,
True,
False
}
public enum BlurredQuadMode
{
SmallWindow,
Immersion
}
}
}

View File

@@ -1,13 +0,0 @@
fileFormatVersion: 2
guid: daeec670ce18c8d488f9f5b2e51c817b
timeCreated: 1590405833
licenseType: Free
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,214 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace Unity.XR.PXR
{
public class PXR_ScreenFade : MonoBehaviour
{
[Tooltip("The gradient of time.")]
public float gradientTime = 5.0f;
[Tooltip("Basic color.")]
public Color fadeColor = new Color(0.0f, 0.0f, 0.0f, 1.0f);
[Tooltip("The default value is 4000.")]
private int renderQueue = 4000;
private MeshRenderer gradientMeshRenderer;
private MeshFilter gradientMeshFilter;
private Material gradientMaterial = null;
private bool isGradient = false;
private float currentAlpha;
private float nowFadeAlpha;
private List<Vector3> verts;
private List<int> indices;
private int N = 5;
void Awake()
{
CreateFadeMesh();
SetCurrentAlpha(0);
}
void OnEnable()
{
StartCoroutine(ScreenFade());
}
void OnDestroy()
{
DestoryGradientMesh();
}
private void CreateFadeMesh()
{
verts = new List<Vector3>();
indices = new List<int>();
gradientMaterial = new Material(Shader.Find("PXR_SDK/PXR_Fade"));
gradientMeshFilter = gameObject.AddComponent<MeshFilter>();
gradientMeshRenderer = gameObject.AddComponent<MeshRenderer>();
CreateModel();
}
public void SetCurrentAlpha(float alpha)
{
currentAlpha = alpha;
SetAlpha();
}
IEnumerator ScreenFade()
{
float nowTime = 0.0f;
while (nowTime < gradientTime)
{
nowTime += Time.deltaTime;
nowFadeAlpha = Mathf.Lerp(1.0f, 0, Mathf.Clamp01(nowTime / gradientTime));
SetAlpha();
yield return null;
}
}
private void SetAlpha()
{
Color color = fadeColor;
color.a = Mathf.Max(currentAlpha, nowFadeAlpha);
isGradient = color.a > 0;
if (gradientMaterial != null)
{
gradientMaterial.color = color;
gradientMaterial.renderQueue = renderQueue;
gradientMeshRenderer.material = gradientMaterial;
gradientMeshRenderer.enabled = isGradient;
}
}
void CreateModel()
{
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(i, j, -N / 2f));
}
}
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(N / 2f, j, i));
}
}
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(i, N / 2f, j));
}
}
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(-N / 2f, j, i));
}
}
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(i, j, N / 2f));
}
}
for (float i = -N / 2f; i <= N / 2f; i++)
{
for (float j = -N / 2f; j <= N / 2f; j++)
{
verts.Add(new Vector3(i, -N / 2f, j));
}
}
for (int i = 0; i < verts.Count; i++)
{
verts[i] = verts[i].normalized * 0.7f;
}
CreateMakePos(0);
CreateMakePos(1);
CreateMakePos(2);
OtherMakePos(3);
OtherMakePos(4);
OtherMakePos(5);
Mesh mesh = new Mesh();
mesh.vertices = verts.ToArray();
mesh.triangles = indices.ToArray();
mesh.RecalculateNormals();
mesh.RecalculateBounds();
Vector3[] normals = mesh.normals;
for (int i = 0; i < normals.Length; i++)
{
normals[i] = -normals[i];
}
mesh.normals = normals;
int[] triangles = mesh.triangles;
for (int i = 0; i < triangles.Length; i += 3)
{
int t = triangles[i];
triangles[i] = triangles[i + 2];
triangles[i + 2] = t;
}
mesh.triangles = triangles;
gradientMeshFilter.mesh = mesh;
}
public void CreateMakePos(int num)
{
for (int i = 0; i < N; i++)
{
for (int j = 0; j < N; j++)
{
int index = j * (N + 1) + (N + 1) * (N + 1) * num + i;
int up = (j + 1) * (N + 1) + (N + 1) * (N + 1) * num + i;
indices.AddRange(new int[] { index, index + 1, up + 1 });
indices.AddRange(new int[] { index, up + 1, up });
}
}
}
public void OtherMakePos(int num)
{
for (int i = 0; i < N + 1; i++)
{
for (int j = 0; j < N + 1; j++)
{
if (i != N && j != N)
{
int index = j * (N + 1) + (N + 1) * (N + 1) * num + i;
int up = (j + 1) * (N + 1) + (N + 1) * (N + 1) * num + i;
indices.AddRange(new int[] { index, up + 1, index + 1 });
indices.AddRange(new int[] { index, up, up + 1 });
}
}
}
}
private void DestoryGradientMesh()
{
if (gradientMeshRenderer != null)
Destroy(gradientMeshRenderer);
if (gradientMaterial != null)
Destroy(gradientMaterial);
if (gradientMeshFilter != null)
Destroy(gradientMeshFilter);
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 9ee8dd1042c84fa4fa2411e8c4ebcc01
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

View File

@@ -1,421 +0,0 @@
/*******************************************************************************
Copyright © 2015-2022 PICO Technology Co., Ltd.All rights reserved.
NOTICEAll information contained herein is, and remains the property of
PICO Technology Co., Ltd. The intellectual and technical concepts
contained herein are proprietary to PICO Technology Co., Ltd. and may be
covered by patents, patents in process, and are protected by trade secret or
copyright law. Dissemination of this information or reproduction of this
material is strictly forbidden unless prior written permission is obtained from
PICO Technology Co., Ltd.
*******************************************************************************/
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace Unity.XR.PXR
{
public delegate void EventDataBufferCallBack(ref PxrEventDataBuffer dataBuffer);
public class PXR_System
{
/// <summary>
/// Gets the SDK version.
/// </summary>
/// <returns>The SDK version.</returns>
public static string GetSDKVersion()
{
return PXR_Plugin.System.UPxr_GetSDKVersion();
}
/// <summary>
/// Gets the predicted time a frame will be displayed after being rendered.
/// </summary>
/// <returns>The predicted time (in miliseconds).</returns>
public static double GetPredictedDisplayTime()
{
return PXR_Plugin.System.UPxr_GetPredictedDisplayTime();
}
/// <summary>
/// Sets the extra latency mode. Note: Call this function once only.
/// </summary>
/// <param name="mode">The latency mode:
/// * `0`: ExtraLatencyModeOff (Disable ExtraLatencyMode mode. This option will display the latest rendered frame for display)
/// * `1`: ExtraLatencyModeOn (Enable ExtraLatencyMode mode. This option will display one frame prior to the latest rendered frame)
/// * `2`: ExtraLatencyModeDynamic (Use system default setup)
/// </param>
/// <returns>Whether the extra latency mode has been set:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool SetExtraLatencyMode(int mode)
{
return PXR_Plugin.System.UPxr_SetExtraLatencyMode(mode);
}
/// <summary>
/// Gets the sensor's status.
/// </summary>
/// <returns>The sensor's status:
/// * `0`: null
/// * `1`: 3DoF
/// * `3`: 6DoF
/// </returns>
public static int GetSensorStatus()
{
return PXR_Plugin.System.UPxr_GetSensorStatus();
}
/// <summary>
/// Sets the system display frequency rate.
/// </summary>
/// <param name="rate">The frequency rate: `72`; `90`; `120`. Other values are invalid.</param>
public static void SetSystemDisplayFrequency(float rate)
{
PXR_Plugin.System.UPxr_SetSystemDisplayFrequency(rate);
}
/// <summary>
/// Gets the system display frequency rate.
/// </summary>
/// <returns>The system display frequency rate.</returns>
public static float GetSystemDisplayFrequency()
{
return PXR_Plugin.System.UPxr_GetSystemDisplayFrequency();
}
/// <summary>
/// Gets the predicted status of the sensor.
/// </summary>
/// <param name="sensorState">Sensor's coordinate:
/// * `pose`: in-app coordinate
/// * `globalPose`: global coordinate
/// </param>
/// <param name="sensorFrameIndex">Sensor frame index.</param>
/// <returns>The predicted status of the sensor.</returns>
public static int GetPredictedMainSensorStateNew(ref PxrSensorState2 sensorState, ref int sensorFrameIndex) {
return PXR_Plugin.System.UPxr_GetPredictedMainSensorStateNew(ref sensorState, ref sensorFrameIndex);
}
/// <summary>
/// Enables/disables content protection.
/// </summary>
/// <param name="data">Specifies whether to enable/disable content protection:
/// * `0`: disable
/// * `1`: enable
/// </param>
/// <returns>Whether content protection is successfully enabled/disabled:
/// * `0`: success
/// * `1`: failure
/// </returns>
public static int ContentProtect(int data) {
return PXR_Plugin.System.UPxr_ContentProtect(data);
}
/// <summary>
/// Enables/disables face tracking.
/// @note Only supported by PICO 4 Pro and PICO 4 Enterprise.
/// </summary>
/// <param name="enable">Whether to enable/disable face tracking:
/// * `true`: enable
/// * `false`: disable
/// </param>
public static void EnableFaceTracking(bool enable) {
PXR_Plugin.System.UPxr_EnableFaceTracking(enable);
}
/// <summary>
/// Enables/disables lipsync.
/// @note Only supported by PICO 4 Pro and PICO 4 Enterprise.
/// </summary>
/// <param name="enable">Whether to enable/disable lipsync:
/// * `true`: enable
/// * `false`: disable
/// </param>
public static void EnableLipSync(bool enable){
PXR_Plugin.System.UPxr_EnableLipSync(enable);
}
/// <summary>
/// Gets face tracking data.
/// @note Only supported by PICO 4 Pro and PICO 4 Enterprise.
/// </summary>
/// <param name="ts">(Optional) A reserved parameter, pass `0`.</param>
/// <param name="flags">The face tracking mode to retrieve data for. Enumertions:
/// * `PXR_GET_FACE_DATA_DEFAULT` (invalid, only for making it compatible with older SDK version)
/// * `PXR_GET_FACE_DATA`: face only
/// * `PXR_GET_LIP_DATA`: lipsync only
/// * `PXR_GET_FACELIP_DATA`: hybrid (both face and lipsync)
/// </param>
/// <param name="faceTrackingInfo">Returns the `PxrFaceTrackingInfo` struct that contains the following face tracking data:
/// * `timestamp`: Int64, reserved field
/// * `blendShapeWeight`: float[], pass `0`.
/// * `videoInputValid`: float[], the input validity of the upper and lower parts of the face.
/// * `laughingProb`: float[], the coefficient of laughter.
/// * `emotionProb`: float[], the emotion factor.
/// * `reserved`: float[], reserved field.
/// </param>
public static void GetFaceTrackingData(Int64 ts, GetDataType flags, ref PxrFaceTrackingInfo faceTrackingInfo) {
PXR_Plugin.System.UPxr_GetFaceTrackingData( ts, (int)flags, ref faceTrackingInfo);
}
/// <summary>Sets a GPU or CPU level for the device.</summary>
/// <param name="which">Choose to set a GPU or CPU level:
/// * `CPU`
/// * `GPU`
/// </param>
/// <param name="level">Select a level from the following:
/// * `POWER_SAVINGS`: power-saving level
/// * `SUSTAINED_LOW`: low level
/// * `SUSTAINED_HIGH`: high level
/// * `BOOST`: top-high level, be careful to use this level
/// </param>
/// <returns>
/// * `0`: success
/// * `1`: failure
/// </returns>
public static int SetPerformanceLevels(PxrPerfSettings which, PxrSettingsLevel level)
{
return PXR_Plugin.System.UPxr_SetPerformanceLevels(which, level);
}
/// <summary>Gets the device's GPU or CPU level.</summary>
/// <param name="which">Choose to get GPU or CPU level:
/// * `CPU`
/// * `GPU`
/// </param>
/// <returns>
/// Returns one of the following levels:
/// * `POWER_SAVINGS`: power-saving level
/// * `SUSTAINED_LOW`: low level
/// * `SUSTAINED_HIGH`: high level
/// * `BOOST`: top-high level, be careful to use this level
/// </returns>
public static PxrSettingsLevel GetPerformanceLevels(PxrPerfSettings which)
{
return PXR_Plugin.System.UPxr_GetPerformanceLevels(which);
}
/// <summary>Sets FOV in four directions (left, right, up, and down) for specified eye(s).</summary>
/// <param name="eye">The eye to set FOV for:
/// * `LeftEye`
/// * `RightEye`
/// * `BothEye`
/// </param>
/// <param name="fovLeft">The horizontal FOV (in degrees) for the left part of the eye, for example, `47.5`.</param>
/// <param name="fovRight">The horizontal FOV (in degrees) for the right part of the eye..</param>
/// <param name="fovUp">The vertical FOV (in degrees) for the upper part of the eye.</param>
/// <param name="fovDown">The vertical FOV (in degrees) for the lower part of the eye.</param>
/// <returns>
/// * `0`: success
/// * `1`: failure
/// </returns>
public static int SetEyeFOV(EyeType eye, float fovLeft, float fovRight, float fovUp, float fovDown)
{
return PXR_Plugin.Render.UPxr_SetEyeFOV(eye, fovLeft, fovRight, fovUp, fovDown);
}
/// <summary>
/// Switches the face tracking mode.
/// @note Only supported by PICO 4 Pro and PICO 4 Enterprise.
/// </summary>
/// <param name="value">
/// `STOP_FT`: to stop the "Face Only" mode.
/// `STOP_LIPSYNC`: to stop the "Lipsync Only" mode.
/// `START_FT`: to start the "Face Only" mode.
/// `START_LIPSYNC`: to start the "Lipsync Only" mode.
/// </param>
/// <returns>
/// `0`: success
/// `1`: failure
/// </returns>
public static int SetFaceTrackingStatus(PxrFtLipsyncValue value) {
return PXR_Plugin.System.UPxr_SetFaceTrackingStatus(value);
}
/// <summary>
/// Sets a tracking origin mode for the app.
/// When the user moves in the virtual scene, the system tracks and calculates the user's positional changes based on the origin.
/// </summary>
/// <param name="originMode">Selects a tracking origin mode from the following:
/// * `TrackingOriginModeFlags.Device`: Device mode. The system sets the device's initial position as the origin. The device's height from the floor is not calculated.
/// * `TrackingOriginModeFlags.Floor`: Floor mode. The system sets an origin based on the device's original position and the device's height from the floor.
/// </param>
public static void SetTrackingOrigin(PxrTrackingOrigin originMode)
{
PXR_Plugin.System.UPxr_SetTrackingOrigin(originMode);
}
/// <summary>
/// Gets the tracking origin mode of the app.
/// </summary>
/// <param name="originMode">Returns the app's tracking origin mode:
/// * `TrackingOriginModeFlags.Device`: Device mode
/// * `TrackingOriginModeFlags.Floor`: Floor mode
/// For the description of each mode, refer to `SetTrackingOrigin`.
/// </param>
public static void GetTrackingOrigin(out PxrTrackingOrigin originMode)
{
originMode = PxrTrackingOrigin.Eye;
PXR_Plugin.System.UPxr_GetTrackingOrigin(ref originMode);
}
/// <summary>
/// Turns on the power service for a specified object.
/// </summary>
/// <param name="objName">The name of the object to turn on the power service for.</param>
/// <returns>Whether the power service has been turned on:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool StartBatteryReceiver(string objName)
{
return PXR_Plugin.System.UPxr_StartBatteryReceiver(objName);
}
/// <summary>
/// Turns off the power service.
/// </summary>
/// <returns>Whether the power service has been turned off:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool StopBatteryReceiver()
{
return PXR_Plugin.System.UPxr_StopBatteryReceiver();
}
/// <summary>
/// Sets the brightness for the current HMD.
/// </summary>
/// <param name="brightness">Target brightness. Value range: [0,255].</param>
/// <returns>Whether the brightness has been set successfully:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool SetCommonBrightness(int brightness)
{
return PXR_Plugin.System.UPxr_SetBrightness(brightness);
}
/// <summary>
/// Gets the brightness of the current HMD.
/// </summary>
/// <returns>An int value that indicates the brightness. Value range: [0,255].</returns>
public static int GetCommonBrightness()
{
return PXR_Plugin.System.UPxr_GetCurrentBrightness();
}
/// <summary>
/// Gets the brightness level of the current screen.
/// </summary>
/// <returns>An int array. The first bit is the total brightness level supported, the second bit is the current brightness level, and it is the interval value of the brightness level from the third bit to the end bit.</returns>
public static int[] GetScreenBrightnessLevel()
{
return PXR_Plugin.System.UPxr_GetScreenBrightnessLevel();
}
/// <summary>
/// Sets a brightness level for the current screen.
/// </summary>
/// <param name="brightness">Brightness mode:
/// * `0`: system default brightness setting.
/// * `1`: custom brightness setting, you can then set param `level`.
/// </param>
/// <param name="level">Brightness level. Value range: [1,255].</param>
public static void SetScreenBrightnessLevel(int brightness, int level)
{
PXR_Plugin.System.UPxr_SetScreenBrightnessLevel(brightness, level);
}
/// <summary>
/// Turns on the volume service for a specified object.
/// </summary>
/// <param name="objName">The name of the object to turn on the volume service for.</param>
/// <returns>Whether the volume service has been turned on:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool StartAudioReceiver(string objName)
{
return PXR_Plugin.System.UPxr_StartAudioReceiver(objName);
}
/// <summary>
/// Turns off the volume service.
/// </summary>
/// <returns>Whether the volume service has been turned off:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool StopAudioReceiver()
{
return PXR_Plugin.System.UPxr_StopAudioReceiver();
}
/// <summary>
/// Gets the maximum volume.
/// </summary>
/// <returns>An int value that indicates the maximum volume.</returns>
public static int GetMaxVolumeNumber()
{
return PXR_Plugin.System.UPxr_GetMaxVolumeNumber();
}
/// <summary>
/// Gets the current volume.
/// </summary>
/// <returns>An int value that indicates the current volume. Value range: [0,15].</returns>
public static int GetCurrentVolumeNumber()
{
return PXR_Plugin.System.UPxr_GetCurrentVolumeNumber();
}
/// <summary>
/// Increases the volume.
/// </summary>
/// <returns>Whether the volume has been increased:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool VolumeUp()
{
return PXR_Plugin.System.UPxr_VolumeUp();
}
/// <summary>
/// Decreases the volume.
/// </summary>
/// <returns>Whether the volume has been decreased:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool VolumeDown()
{
return PXR_Plugin.System.UPxr_VolumeDown();
}
/// <summary>
/// Sets a volume.
/// </summary>
/// <param name="volume">The target volume. Value range: [0,15].</param>
/// <returns>Whether the target volume has been set:
/// * `true`: success
/// * `false`: failure
/// </returns>
public static bool SetVolumeNum(int volume)
{
return PXR_Plugin.System.UPxr_SetVolumeNum(volume);
}
public static string GetProductName()
{
return PXR_Plugin.System.ProductName;
}
}
}

View File

@@ -1,11 +0,0 @@
fileFormatVersion: 2
guid: 00e4f0103b06c774b9ba07b7c06221b6
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant: