Commit 29de0c28 authored by BlackAngle233's avatar BlackAngle233
Browse files

10.19 learned

parent 912976bb
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
public partial class BaseSpatialAwarenessObject : IMixedRealitySpatialAwarenessObject
{
/// <inheritdoc />
public int Id { get; set; }
/// <inheritdoc />
public GameObject GameObject { get; set; }
/// <inheritdoc />
public MeshRenderer Renderer { get; set; }
/// <summary>
/// The MeshFilter associated with this spatial object's renderer.
/// </summary>
public MeshFilter Filter { get; set; }
/// <inheritdoc />
public virtual void CleanObject()
{
// todo: consider if this should be virtual, and what params it should contain
}
/// <summary>
/// Constructor.
/// </summary>
protected BaseSpatialAwarenessObject()
{
}
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Abstract class that provides base profile information for Spatial Awareness Observers and their configuration
/// </summary>
[Serializable]
public abstract class BaseSpatialAwarenessObserverProfile : BaseMixedRealityProfile
{
[SerializeField]
[Tooltip("How should the observer behave at startup?")]
private AutoStartBehavior startupBehavior = AutoStartBehavior.AutoStart;
/// <summary>
/// Indicates if the observer is to start immediately or wait for manual startup.
/// </summary>
public AutoStartBehavior StartupBehavior => startupBehavior;
[SerializeField]
[Tooltip("Should the spatial observer remain in a fixed location?")]
private bool isStationaryObserver = false;
/// <summary>
/// Indicates whether or not the spatial observer is to remain in a fixed location.
/// </summary>
public bool IsStationaryObserver => isStationaryObserver;
[SerializeField]
[Tooltip("The dimensions of the spatial observer volume, in meters.")]
private Vector3 observationExtents = Vector3.one * 3;
/// <summary>
/// The size of the volume, in meters per axis, from which individual observations will be made.
/// </summary>
public Vector3 ObservationExtents => observationExtents;
[SerializeField]
[Tooltip("The shape of observation volume")]
private VolumeType observerVolumeType = VolumeType.AxisAlignedCube;
/// <summary>
/// The shape (ex: axis aligned cube) of the observation volume.
/// </summary>
public VolumeType ObserverVolumeType => observerVolumeType;
[SerializeField]
[Tooltip("How often, in seconds, should the spatial observer update?")]
private float updateInterval = 3.5f;
/// <summary>
/// The frequency, in seconds, at which the spatial observer updates.
/// </summary>
public float UpdateInterval => updateInterval;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Physics;
using Microsoft.MixedReality.Toolkit.Utilities;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Configuration profile settings for spatial awareness mesh observers.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Spatial Awareness Mesh Observer Profile", fileName = "MixedRealitySpatialAwarenessMeshObserverProfile", order = (int)CreateProfileMenuItemIndices.SpatialAwarenessMeshObserver)]
[MixedRealityServiceProfile(typeof(IMixedRealitySpatialAwarenessMeshObserver))]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/SpatialAwareness/ConfiguringSpatialAwarenessMeshObserver.html")]
public class MixedRealitySpatialAwarenessMeshObserverProfile : BaseSpatialAwarenessObserverProfile
{
#region IMixedRealitySpatialAwarenessMeshObserver settings
[PhysicsLayer]
[SerializeField]
[Tooltip("Physics layer on which to set observed meshes.")]
private int meshPhysicsLayer = BaseSpatialObserver.DefaultSpatialAwarenessLayer;
/// <summary>
/// The Unity Physics Layer on which to set observed meshes.
/// </summary>
public int MeshPhysicsLayer => meshPhysicsLayer;
[SerializeField]
[Tooltip("Level of detail used when creating the mesh")]
private SpatialAwarenessMeshLevelOfDetail levelOfDetail = SpatialAwarenessMeshLevelOfDetail.Coarse;
/// <summary>
/// The level of detail used when creating the mesh.
/// </summary>
public SpatialAwarenessMeshLevelOfDetail LevelOfDetail => levelOfDetail;
[SerializeField]
[Tooltip("Level of detail, in triangles per cubic meter.\nIgnored unless LevelOfDetail is set to Custom.")]
private int trianglesPerCubicMeter = 0;
/// <summary>
/// The level of detail, in triangles per cubic meter, for the returned spatial mesh.
/// </summary>
/// <remarks>This value is ignored, unless <see cref="LevelOfDetail"/> is set to Custom.</remarks>
public int TrianglesPerCubicMeter => trianglesPerCubicMeter;
[SerializeField]
[Tooltip("Should normals be recalculated when a mesh is added or updated?")]
private bool recalculateNormals = true;
/// <summary>
/// Indicates if the spatial awareness system to generate normal for the returned meshes
/// as some platforms may not support returning normal along with the spatial mesh.
/// </summary>
public bool RecalculateNormals => recalculateNormals;
[SerializeField]
[Tooltip("How should spatial meshes be displayed?")]
private SpatialAwarenessMeshDisplayOptions displayOption = SpatialAwarenessMeshDisplayOptions.Visible;
/// <summary>
/// Indicates how the mesh subsystem is to display surface meshes within the application.
/// </summary>
public SpatialAwarenessMeshDisplayOptions DisplayOption => displayOption;
[SerializeField]
[Tooltip("Material to use when displaying observed meshes")]
private Material visibleMaterial = null;
/// <summary>
/// The material to be used when displaying observed meshes.
/// </summary>
public Material VisibleMaterial => visibleMaterial;
[SerializeField]
[Tooltip("Material to use when observed meshes should occlude other objects")]
private Material occlusionMaterial = null;
/// <summary>
/// The material to be used when observed meshes should occlude other objects.
/// </summary>
public Material OcclusionMaterial => occlusionMaterial;
[SerializeField]
[Tooltip("Optional physics material to apply to spatial mesh")]
private PhysicMaterial physicsMaterial = null;
public PhysicMaterial PhysicsMaterial => physicsMaterial;
#endregion IMixedRealitySpatialAwarenessMeshObserver settings
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Configuration profile settings for spatial awareness mesh observers.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Spatial Awareness System Profile", fileName = "MixedRealitySpatialAwarenessSystemProfile", order = (int)CreateProfileMenuItemIndices.SpatialAwareness)]
[MixedRealityServiceProfile(typeof(IMixedRealitySpatialAwarenessSystem))]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/SpatialAwareness/SpatialAwarenessGettingStarted.html")]
public class MixedRealitySpatialAwarenessSystemProfile : BaseMixedRealityProfile
{
[SerializeField]
private MixedRealitySpatialObserverConfiguration[] observerConfigurations = System.Array.Empty<MixedRealitySpatialObserverConfiguration>();
public MixedRealitySpatialObserverConfiguration[] ObserverConfigurations
{
get { return observerConfigurations; }
internal set { observerConfigurations = value; }
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Enumeration defining levels of detail for the spatial awareness mesh subsystem.
/// </summary>
public enum SpatialAwarenessMeshLevelOfDetail
{
/// <summary>
/// The custom level of detail allows specifying a custom value for
/// TrianglesPerCubicMeter.
/// </summary>
Custom = -1,
/// <summary>
/// The coarse level of detail is well suited for identifying large
/// environmental features, such as floors and walls.
/// </summary>
Coarse = 0,
/// <summary>
/// The medium level of detail is often useful for experiences that
/// continually scan the environment (ex: a virtual pet).
/// </summary>
Medium,
/// <summary>
/// The fine level of detail is well suited for using as an occlusion
/// mesh.
/// </summary>
Fine
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Object encapsulating the components of a spatial awareness mesh object.
/// </summary>
public class SpatialAwarenessMeshObject : BaseSpatialAwarenessObject
{
/// <summary>
/// When a mesh is created we will need to create a game object with a minimum
/// set of components to contain the mesh. These are the required component types.
/// </summary>
private static readonly Type[] requiredMeshComponents =
{
typeof(MeshFilter),
typeof(MeshRenderer),
typeof(MeshCollider)
};
/// <summary>
/// The collider for the mesh object.
/// </summary>
public MeshCollider Collider { get; set; }
/// <summary>
/// Constructor.
/// </summary>
private SpatialAwarenessMeshObject() : base() { }
/// <summary>
/// Creates a <see cref="SpatialAwarenessMeshObject"/>.
/// </summary>
/// <returns>
/// SpatialMeshObject containing the fields that describe the mesh.
/// </returns>
public static SpatialAwarenessMeshObject Create(
Mesh mesh,
int layer,
string name,
int meshId,
GameObject meshParent = null)
{
SpatialAwarenessMeshObject newMesh = new SpatialAwarenessMeshObject
{
Id = meshId,
GameObject = new GameObject(name, requiredMeshComponents)
{
layer = layer
}
};
/// Preserve local transform when attaching to parent.
newMesh.GameObject.transform.SetParent(meshParent != null ? meshParent.transform : null, false);
newMesh.Filter = newMesh.GameObject.GetComponent<MeshFilter>();
newMesh.Filter.sharedMesh = mesh;
newMesh.Renderer = newMesh.GameObject.GetComponent<MeshRenderer>();
// Reset the surface mesh collider to fit the updated mesh.
// Unity tribal knowledge indicates that to change the mesh assigned to a
// mesh collider, the mesh must first be set to null. Presumably there
// is a side effect in the setter when setting the shared mesh to null.
newMesh.Collider = newMesh.GameObject.GetComponent<MeshCollider>();
newMesh.Collider.sharedMesh = null;
newMesh.Collider.sharedMesh = newMesh.Filter.sharedMesh;
return newMesh;
}
/// <summary>
/// Clean up the resources associated with the surface.
/// </summary>
/// <param name="meshObject">The <see cref="SpatialAwarenessMeshObject"/> whose resources will be cleaned up.</param>
public static void Cleanup(SpatialAwarenessMeshObject meshObject, bool destroyGameObject = true, bool destroyMeshes = true)
{
if (meshObject.GameObject == null)
{
return;
}
if (destroyGameObject)
{
UnityEngine.Object.Destroy(meshObject.GameObject);
meshObject.GameObject = null;
return;
}
if (destroyMeshes)
{
Mesh filterMesh = meshObject.Filter.sharedMesh;
Mesh colliderMesh = meshObject.Collider.sharedMesh;
if (filterMesh != null)
{
UnityEngine.Object.Destroy(filterMesh);
meshObject.Filter.sharedMesh = null;
}
if ((colliderMesh != null) && (colliderMesh != filterMesh))
{
UnityEngine.Object.Destroy(colliderMesh);
meshObject.Collider.sharedMesh = null;
}
}
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
public partial class SpatialAwarenessPlanarObject : BaseSpatialAwarenessObject
{
/// <summary>
/// The BoxCollider associated with this plane's GameObject.
/// </summary>
public BoxCollider Collider { get; set; }
/// <summary>
/// Constructor.
/// </summary>
public SpatialAwarenessPlanarObject() : base() { }
/// <summary>
/// Creates a <see cref="SpatialAwarenessPlanarObject"/>.
/// </summary>
/// <returns>
/// SpatialAwarenessPlanarObject containing the fields that describe the plane.
/// </returns>
public static SpatialAwarenessPlanarObject CreateSpatialObject(Vector3 size, int layer, string name, int planeId)
{
SpatialAwarenessPlanarObject newMesh = new SpatialAwarenessPlanarObject();
newMesh.Id = planeId;
newMesh.GameObject = GameObject.CreatePrimitive(PrimitiveType.Cube);
newMesh.GameObject.layer = layer;
newMesh.GameObject.transform.localScale = size;
newMesh.Filter = newMesh.GameObject.GetComponent<MeshFilter>();
newMesh.Renderer = newMesh.GameObject.GetComponent<MeshRenderer>();
newMesh.Collider = newMesh.GameObject.GetComponent<BoxCollider>();
return newMesh;
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Enumeration defining the types of planar surfaces that are supported by the spatial awareness surface finding subsystem.
/// </summary>
[System.Flags]
public enum SpatialAwarenessSurfaceTypes
{
/// <summary>
/// An unknown / unsupported type of surface.
/// </summary>
Unknown = 1 << 0,
/// <summary>
/// The environment’s floor.
/// </summary>
Floor = 1 << 1,
/// <summary>
/// The environment’s ceiling.
/// </summary>
Ceiling = 1 << 2,
/// <summary>
/// A wall within the user’s space.
/// </summary>
Wall = 1 << 3,
/// <summary>
/// A raised, horizontal surface such as a shelf.
/// </summary>
/// <remarks>
/// Platforms, like floors, that can be used for placing objects
/// requiring a horizontal surface.
/// </remarks>
Platform = 1 << 4
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.SpatialAwareness
{
/// <summary>
/// Options for how the spatial mesh is to be displayed by the spatial awareness system.
/// </summary>
public enum SpatialAwarenessMeshDisplayOptions
{
/// <summary>
/// Do not display the spatial mesh
/// </summary>
None = 0,
/// <summary>
/// Display the spatial mesh using the configured material
/// </summary>
Visible,
/// <summary>
/// Display the spatial mesh using the configured occlusion material
/// </summary>
Occlusion
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// A copy of the <see href="https://docs.unity3d.com/ScriptReference/AnimatorControllerParameter.html">AnimatorControllerParameter</see> because that class is not Serializable and cannot be modified in the editor.
/// </summary>
[Serializable]
public struct AnimatorParameter
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="name">Name of the animation parameter to modify.</param>
/// <param name="parameterType">Type of the animation parameter to modify.</param>
/// <param name="defaultInt">If the animation parameter type is an int, value to set. Ignored otherwise.</param>
/// <param name="defaultFloat">If the animation parameter type is a float, value to set. Ignored otherwise.</param>
/// <param name="defaultBool">"If the animation parameter type is a bool, value to set. Ignored otherwise.</param>
public AnimatorParameter(string name, AnimatorControllerParameterType parameterType, int defaultInt = 0, float defaultFloat = 0f, bool defaultBool = false)
{
this.parameterType = parameterType;
this.defaultInt = defaultInt;
this.defaultFloat = defaultFloat;
this.defaultBool = defaultBool;
this.name = name;
nameStringHash = null;
}
[SerializeField]
[Tooltip("Type of the animation parameter to modify.")]
private AnimatorControllerParameterType parameterType;
/// <summary>
/// Type of the animation parameter to modify.
/// </summary>
public AnimatorControllerParameterType ParameterType => parameterType;
[SerializeField]
[Tooltip("If the animation parameter type is an int, value to set. Ignored otherwise.")]
private int defaultInt;
/// <summary>
/// If the animation parameter type is an int, value to set. Ignored otherwise.
/// </summary>
public int DefaultInt => defaultInt;
[SerializeField]
[Tooltip("If the animation parameter type is a float, value to set. Ignored otherwise.")]
private float defaultFloat;
/// <summary>
/// If the animation parameter type is a float, value to set. Ignored otherwise.
/// </summary>
public float DefaultFloat => defaultFloat;
[SerializeField]
[Tooltip("If the animation parameter type is a bool, value to set. Ignored otherwise.")]
private bool defaultBool;
/// <summary>
/// If the animation parameter type is a bool, value to set. Ignored otherwise.
/// </summary>
public bool DefaultBool => defaultBool;
[SerializeField]
[Tooltip("Name of the animation parameter to modify.")]
private string name;
/// <summary>
/// Name of the animation parameter to modify.
/// </summary>
public string Name => name;
private int? nameStringHash;
/// <summary>
/// Animator Name String to Hash.
/// </summary>
public int NameHash
{
get
{
if (!nameStringHash.HasValue && !string.IsNullOrEmpty(Name))
{
nameStringHash = Animator.StringToHash(Name);
}
Debug.Assert(nameStringHash != null);
return nameStringHash.Value;
}
}
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using UnityEngine;
using System;
using System.Collections.Generic;
using System.IO;
#if UNITY_EDITOR
using UnityEditor;
using Microsoft.MixedReality.Toolkit.Utilities.Editor;
#endif
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// Shape of an articulated hand defined by joint poses.
/// </summary>
public class ArticulatedHandPose
{
private static readonly int jointCount = Enum.GetNames(typeof(TrackedHandJoint)).Length;
/// <summary>
/// Joint poses are stored as right-hand poses in camera space.
/// Output poses are computed in world space, and mirroring on the x axis for the left hand.
/// </summary>
private MixedRealityPose[] localJointPoses;
public ArticulatedHandPose()
{
localJointPoses = new MixedRealityPose[jointCount];
SetZero();
}
public ArticulatedHandPose(MixedRealityPose[] _localJointPoses)
{
localJointPoses = new MixedRealityPose[jointCount];
Array.Copy(_localJointPoses, localJointPoses, jointCount);
}
public MixedRealityPose GetLocalJointPose(TrackedHandJoint joint, Handedness handedness)
{
MixedRealityPose pose = localJointPoses[(int)joint];
// Pose offset are for right hand, mirror on X axis if left hand is needed
if (handedness == Handedness.Left)
{
pose = new MixedRealityPose(
new Vector3(-pose.Position.x, pose.Position.y, pose.Position.z),
new Quaternion(pose.Rotation.x, -pose.Rotation.y, -pose.Rotation.z, pose.Rotation.w));
}
return pose;
}
/// <summary>
/// Compute world space poses from camera-space joint data.
/// </summary>
/// <param name="handedness">Handedness of the resulting pose</param>
/// <param name="rotation">Rotational offset of the resulting pose</param>
/// <param name="position">Translational offset of the resulting pose</param>
/// <param name="jointsOut">Output array of joint poses</param>
public void ComputeJointPoses(
Handedness handedness,
Quaternion rotation,
Vector3 position,
MixedRealityPose[] jointsOut)
{
for (int i = 0; i < jointCount; i++)
{
// Initialize from local offsets
MixedRealityPose pose = GetLocalJointPose((TrackedHandJoint)i, handedness);
Vector3 p = pose.Position;
Quaternion r = pose.Rotation;
// Apply external transform
p = position + rotation * p;
r = rotation * r;
jointsOut[i] = new MixedRealityPose(p, r);
}
}
/// <summary>
/// Take world space joint poses from any hand and convert into right-hand, camera-space poses.
/// </summary>
/// <param name="joints">Input joint poses</param>
/// <param name="handedness">Handedness of the input data</param>
/// <param name="rotation">Rotational offset of the input data</param>
/// <param name="position">Translational offset of the input data</param>
public void ParseFromJointPoses(
MixedRealityPose[] joints,
Handedness handedness,
Quaternion rotation,
Vector3 position)
{
Quaternion invRotation = Quaternion.Inverse(rotation);
Quaternion invCameraRotation = Quaternion.Inverse(CameraCache.Main.transform.rotation);
for (int i = 0; i < jointCount; i++)
{
Vector3 p = joints[i].Position;
Quaternion r = joints[i].Rotation;
// Apply inverse external transform
p = invRotation * (p - position);
r = invRotation * r;
// To camera space
p = invCameraRotation * p;
r = invCameraRotation * r;
// Pose offset are for right hand, mirror on X axis if left hand is given
if (handedness == Handedness.Left)
{
p.x = -p.x;
r.y = -r.y;
r.z = -r.z;
}
localJointPoses[i] = new MixedRealityPose(p, r);
}
}
/// <summary>
/// Set all poses to zero.
/// </summary>
public void SetZero()
{
for (int i = 0; i < jointCount; i++)
{
localJointPoses[i] = MixedRealityPose.ZeroIdentity;
}
}
/// <summary>
/// Copy data from another articulated hand pose.
/// </summary>
public void Copy(ArticulatedHandPose other)
{
Array.Copy(other.localJointPoses, localJointPoses, jointCount);
}
/// <summary>
/// Blend between two hand poses.
/// </summary>
public void InterpolateOffsets(ArticulatedHandPose poseA, ArticulatedHandPose poseB, float value)
{
for (int i = 0; i < jointCount; i++)
{
var p = Vector3.Lerp(poseA.localJointPoses[i].Position, poseB.localJointPoses[i].Position, value);
var r = Quaternion.Slerp(poseA.localJointPoses[i].Rotation, poseB.localJointPoses[i].Rotation, value);
localJointPoses[i] = new MixedRealityPose(p, r);
}
}
/// <summary>
/// Supported hand gestures.
/// </summary>
public enum GestureId
{
/// <summary>
/// Unspecified hand shape
/// </summary>
None = 0,
/// <summary>
/// Flat hand with fingers spread out
/// </summary>
Flat,
/// <summary>
/// Relaxed hand pose
/// </summary>
Open,
/// <summary>
/// Index finger and Thumb touching, grab point does not move
/// </summary>
Pinch,
/// <summary>
/// Index finger and Thumb touching, wrist does not move
/// </summary>
PinchSteadyWrist,
/// <summary>
/// Index finger stretched out
/// </summary>
Poke,
/// <summary>
/// Grab with whole hand, fist shape
/// </summary>
Grab,
/// <summary>
/// OK sign
/// </summary>
ThumbsUp,
/// <summary>
/// Victory sign
/// </summary>
Victory,
/// <summary>
/// Relaxed hand pose, grab point does not move
/// </summary>
OpenSteadyGrabPoint,
}
[Obsolete("Use SimulatedArticulatedHandPoses class or other custom class")]
private static readonly Dictionary<GestureId, ArticulatedHandPose> handPoses = new Dictionary<GestureId, ArticulatedHandPose>();
/// <summary>
/// Get pose data for a supported gesture.
/// </summary>
[Obsolete("Use SimulatedArticulatedHandPoses.GetGesturePose() or other custom class")]
public static ArticulatedHandPose GetGesturePose(GestureId gesture)
{
if (handPoses.TryGetValue(gesture, out ArticulatedHandPose pose))
{
return pose;
}
return null;
}
#if UNITY_EDITOR
/// <summary>
/// Load pose data from files.
/// </summary>
[Obsolete("Use SimulatedArticulatedHandPoses or other custom class")]
public static void LoadGesturePoses()
{
string[] gestureNames = Enum.GetNames(typeof(GestureId));
string basePath = Path.Combine("InputSimulation", "ArticulatedHandPoses");
for (int i = 0; i < gestureNames.Length; ++i)
{
string relPath = Path.Combine(basePath, String.Format("ArticulatedHandPose_{0}.json", gestureNames[i]));
string absPath = MixedRealityToolkitFiles.MapRelativeFilePath(MixedRealityToolkitModuleType.Services, relPath);
LoadGesturePose((GestureId)i, absPath);
}
}
[Obsolete("Use SimulatedArticulatedHandPoses class or other custom class")]
private static ArticulatedHandPose LoadGesturePose(GestureId gesture, string filePath)
{
if (!string.IsNullOrEmpty(filePath))
{
var pose = new ArticulatedHandPose();
pose.FromJson(File.ReadAllText(filePath));
handPoses.Add(gesture, pose);
return pose;
}
return null;
}
[Obsolete("Use SimulatedArticulatedHandPoses class or other custom class")]
public static void ResetGesturePoses()
{
handPoses.Clear();
}
#endif
/// Utility class to serialize hand pose as a dictionary with full joint names
[Serializable]
internal struct ArticulatedHandPoseItem
{
private static readonly string[] jointNames = Enum.GetNames(typeof(TrackedHandJoint));
public string joint;
public MixedRealityPose pose;
public TrackedHandJoint JointIndex
{
get
{
int nameIndex = Array.FindIndex(jointNames, IsJointName);
if (nameIndex < 0)
{
Debug.LogError($"Joint name {joint} not in TrackedHandJoint enum");
return TrackedHandJoint.None;
}
return (TrackedHandJoint)nameIndex;
}
set { joint = jointNames[(int)value]; }
}
private bool IsJointName(string s)
{
return s == joint;
}
public ArticulatedHandPoseItem(TrackedHandJoint joint, MixedRealityPose pose)
{
this.joint = jointNames[(int)joint];
this.pose = pose;
}
}
/// Utility class to serialize hand pose as a dictionary with full joint names
[Serializable]
internal class ArticulatedHandPoseDictionary
{
private static readonly int jointCount = Enum.GetNames(typeof(TrackedHandJoint)).Length;
public ArticulatedHandPoseItem[] items = null;
public void FromJointPoses(MixedRealityPose[] jointPoses)
{
items = new ArticulatedHandPoseItem[jointCount];
for (int i = 0; i < jointCount; ++i)
{
items[i].JointIndex = (TrackedHandJoint)i;
items[i].pose = jointPoses[i];
}
}
public void ToJointPoses(MixedRealityPose[] jointPoses)
{
for (int i = 0; i < jointCount; ++i)
{
jointPoses[i] = MixedRealityPose.ZeroIdentity;
}
foreach (var item in items)
{
jointPoses[(int)item.JointIndex] = item.pose;
}
}
}
/// <summary>
/// Serialize pose data to JSON format.
/// </summary>
public string ToJson()
{
var dict = new ArticulatedHandPoseDictionary();
dict.FromJointPoses(localJointPoses);
return JsonUtility.ToJson(dict, true);
}
/// <summary>
/// Deserialize pose data from JSON format.
/// </summary>
public void FromJson(string json)
{
var dict = JsonUtility.FromJson<ArticulatedHandPoseDictionary>(json);
dict.ToJointPoses(localJointPoses);
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// This enumeration identifies two different ways to handle the startup behavior for a feature.
/// Both will warm up the component, ready for its use (e.g. connecting backend services or registering for events.
/// The first causes the feature to start immediately. The second allows the feature to be manually started at a later time.
/// </summary>
public enum AutoStartBehavior
{
/// <summary>
/// Automatically start the feature
/// </summary>
AutoStart = 0,
/// <summary>
/// Delay the start of the feature until the user requests it to begin
/// </summary>
ManualStart
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// Flags used to represent a set of 3D axes
/// </summary>
[System.Flags]
public enum AxisFlags
{
XAxis = 1 << 0,
YAxis = 1 << 1,
ZAxis = 1 << 2
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// The AxisType identifies the type of button or input being sent to the framework from a controller.
/// This is mainly information only or for advanced users to understand the input coming directly from the controller.
/// </summary>
public enum AxisType
{
/// <summary>
/// No Specified type.
/// </summary>
None = 0,
/// <summary>
/// Raw stream from input (proxy only).
/// </summary>
Raw,
/// <summary>
/// Digital On/Off input.
/// </summary>
Digital,
/// <summary>
/// Single Axis analogue input.
/// </summary>
SingleAxis,
/// <summary>
/// Dual Axis analogue input.
/// </summary>
DualAxis,
/// <summary>
/// Position only Axis analogue input.
/// </summary>
ThreeDofPosition,
/// <summary>
/// Rotation only Axis analogue input.
/// </summary>
ThreeDofRotation,
/// <summary>
/// Position AND Rotation analogue input.
/// </summary>
SixDof
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// Collation order type used for sorting
/// </summary>
public enum CollationOrder
{
/// <summary>
/// Don't sort, just display in order received
/// </summary>
None = 0,
/// <summary>
/// Sort by child order of parent
/// </summary>
ChildOrder,
/// <summary>
/// Sort by transform name
/// </summary>
Alphabetical,
/// <summary>
/// Sort by child order of parent, reversed
/// </summary>
ChildOrderReversed,
/// <summary>
/// Sort by transform name, reversed
/// </summary>
AlphabeticalReversed
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// The ExperienceScale identifies the environment for which the experience is designed.
/// </summary>
[System.Serializable]
public enum ExperienceScale
{
/// <summary>
/// An experience which utilizes only the headset orientation and is gravity aligned. The coordinate system origin is at head level.
/// </summary>
OrientationOnly = 0,
/// <summary>
/// An experience designed for seated use. The coordinate system origin is at head level.
/// </summary>
Seated,
/// <summary>
/// An experience designed for stationary standing use. The coordinate system origin is at floor level.
/// </summary>
Standing,
/// <summary>
/// An experience designed to support movement throughout a room. The coordinate system origin is at floor level.
/// </summary>
Room,
/// <summary>
/// An experience designed to utilize and move through the physical world. The coordinate system origin is at head level.
/// </summary>
World
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// The Handedness defines which hand a controller is currently operating in.
/// It is up to the developer to determine whether this affects the use of a controller or not.
/// "Other" defines potential controllers that will offer a "third" hand, e.g. a full body tracking suit.
/// </summary>
[Flags]
public enum Handedness : byte
{
/// <summary>
/// No hand specified by the SDK for the controller
/// </summary>
None = 0 << 0,
/// <summary>
/// The controller is identified as being provided in a Left hand
/// </summary>
Left = 1 << 0,
/// <summary>
/// The controller is identified as being provided in a Right hand
/// </summary>
Right = 1 << 1,
/// <summary>
/// The controller is identified as being either left and/or right handed.
/// </summary>
Both = Left | Right,
/// <summary>
/// Reserved, for systems that provide alternate hand state.
/// </summary>
Other = 1 << 2,
/// <summary>
/// Global catchall, used to map actions to any controller (provided the controller supports it)
/// </summary>
/// <remarks>Note, by default the specific hand actions will override settings mapped as both</remarks>
Any = Other | Left | Right,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// Flags used to represent the number of hands that can be used in manipulation
/// </summary>
[System.Flags]
public enum ManipulationHandFlags
{
OneHanded = 1 << 0,
TwoHanded = 1 << 1,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Utilities
{
/// <summary>
/// Flags used to represent whether manipulation can be far, near or both
/// </summary>
[System.Flags]
public enum ManipulationProximityFlags
{
Near = 1 << 0,
Far = 1 << 1,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit
{
/// <summary>
/// Mixed reality platform capabilities.
/// </summary>
public enum MixedRealityCapability
{
/// <summary>
/// Articulated hand input
/// </summary>
ArticulatedHand = 0,
/// <summary>
/// Gaze-Gesture-Voice hand input
/// </summary>
GGVHand,
/// <summary>
/// Motion controller input
/// </summary>
MotionController,
/// <summary>
/// Eye gaze targeting
/// </summary>
EyeTracking,
/// <summary>
/// Voice commands using app defined keywords
/// </summary>
VoiceCommand,
/// <summary>
/// Voice to text dictation
/// </summary>
VoiceDictation,
/// <summary>
/// Spatial meshes
/// </summary>
SpatialAwarenessMesh,
/// <summary>
/// Spatial planes
/// </summary>
SpatialAwarenessPlane,
/// <summary>
/// Spatial points
/// </summary>
SpatialAwarenessPoint
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment