Commit 29de0c28 authored by BlackAngle233's avatar BlackAngle233
Browse files

10.19 learned

parent 912976bb
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Generic Input Action Rule for raising actions based on specific criteria.
/// </summary>
[Serializable]
public struct InputActionRuleVectorAxis : IInputActionRule<Vector3>
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="baseAction">The Base Action that the rule will listen to.</param>
/// <param name="ruleAction">The Action to raise if the criteria is met.</param>
/// <param name="criteria">The criteria to check against for determining if the action should be raised.</param>
public InputActionRuleVectorAxis(MixedRealityInputAction baseAction, MixedRealityInputAction ruleAction, Vector3 criteria)
{
this.baseAction = baseAction;
this.ruleAction = ruleAction;
this.criteria = criteria;
}
[SerializeField]
[Tooltip("The Base Action that the rule will listen to.")]
private MixedRealityInputAction baseAction;
/// <inheritdoc />
public MixedRealityInputAction BaseAction => baseAction;
[SerializeField]
[Tooltip("The Action to raise if the criteria is met.")]
private MixedRealityInputAction ruleAction;
/// <inheritdoc />
public MixedRealityInputAction RuleAction => ruleAction;
[SerializeField]
[Tooltip("The criteria to check against for determining if the action should be raised.")]
private Vector3 criteria;
/// <inheritdoc />
public Vector3 Criteria => criteria;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
using UnityEngine.Events;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Keyword/UnityEvent pair that ties voice input to UnityEvents wired up in the inspector.
/// </summary>
[Serializable]
public struct KeywordAndResponse
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="keyword">The keyword to listen for.</param>
/// <param name="response">The handler to be invoked when the keyword is recognized.</param>
public KeywordAndResponse(string keyword, UnityEvent response)
{
this.keyword = keyword;
this.response = response;
}
[SerializeField]
[Tooltip("The keyword to listen for.")]
private string keyword;
/// <summary>
/// The keyword to listen for.
/// </summary>
public string Keyword => keyword;
[SerializeField]
[Tooltip("The handler to be invoked when the keyword is recognized.")]
private UnityEvent response;
/// <summary>
/// The handler to be invoked when the keyword is recognized.
/// </summary>
public UnityEvent Response => response;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Data structure for mapping gestures to <see cref="MixedRealityInputAction"/>s that can be raised by the Input System.
/// </summary>
[Serializable]
public struct MixedRealityGestureMapping
{
/// <summary>
/// Constructor.
/// </summary>
public MixedRealityGestureMapping(string description, GestureInputType gestureType, MixedRealityInputAction action)
{
this.description = description;
this.gestureType = gestureType;
this.action = action;
}
[SerializeField]
private string description;
/// <summary>
/// Simple, human readable description of the gesture.
/// </summary>
public string Description => description;
[SerializeField]
private GestureInputType gestureType;
/// <summary>
/// Type of Gesture.
/// </summary>
public GestureInputType GestureType => gestureType;
[SerializeField]
private MixedRealityInputAction action;
/// <summary>
/// Action for the associated gesture.
/// </summary>
public MixedRealityInputAction Action => action;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using Microsoft.MixedReality.Toolkit.Windows.Input;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Configuration profile settings for setting up and consuming Input Actions.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Gestures Profile", fileName = "MixedRealityGesturesProfile", order = (int)CreateProfileMenuItemIndices.Gestures)]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/Input/Gestures.html")]
public class MixedRealityGesturesProfile : BaseMixedRealityProfile
{
[EnumFlags]
[SerializeField]
[Tooltip("The recognizable Manipulation Gestures.")]
private WindowsGestureSettings manipulationGestures = 0;
/// <summary>
/// The recognizable Manipulation Gestures.
/// </summary>
public WindowsGestureSettings ManipulationGestures => manipulationGestures;
[EnumFlags]
[SerializeField]
[Tooltip("The recognizable Navigation Gestures.")]
private WindowsGestureSettings navigationGestures = 0;
/// <summary>
/// The recognizable Navigation Gestures.
/// </summary>
public WindowsGestureSettings NavigationGestures => navigationGestures;
[SerializeField]
[Tooltip("Should the Navigation use Rails on start?\nNote: This can be changed at runtime to switch between the two Navigation settings.")]
private bool useRailsNavigation = false;
public bool UseRailsNavigation => useRailsNavigation;
[EnumFlags]
[SerializeField]
[Tooltip("The recognizable Rails Navigation Gestures.")]
private WindowsGestureSettings railsNavigationGestures = 0;
/// <summary>
/// The recognizable Navigation Gestures.
/// </summary>
public WindowsGestureSettings RailsNavigationGestures => railsNavigationGestures;
[SerializeField]
private AutoStartBehavior windowsGestureAutoStart = AutoStartBehavior.AutoStart;
public AutoStartBehavior WindowsGestureAutoStart => windowsGestureAutoStart;
[SerializeField]
private MixedRealityGestureMapping[] gestures =
{
new MixedRealityGestureMapping("Hold", GestureInputType.Hold, MixedRealityInputAction.None),
new MixedRealityGestureMapping("Navigation", GestureInputType.Navigation, MixedRealityInputAction.None),
new MixedRealityGestureMapping("Manipulation", GestureInputType.Manipulation, MixedRealityInputAction.None),
};
/// <summary>
/// The currently configured gestures for the application.
/// </summary>
public MixedRealityGestureMapping[] Gestures => gestures;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System;
using System.Collections;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// An Input Action for mapping an action to an Input Sources Button, Joystick, Sensor, etc.
/// </summary>
[Serializable]
public struct MixedRealityInputAction : IEqualityComparer
{
/// <summary>
/// Constructor.
/// </summary>
public MixedRealityInputAction(uint id, string description, AxisType axisConstraint = AxisType.None)
{
this.id = id;
this.description = description;
this.axisConstraint = axisConstraint;
}
public static MixedRealityInputAction None { get; } = new MixedRealityInputAction(0, "None");
/// <summary>
/// The Unique Id of this Input Action.
/// </summary>
public uint Id => id;
[SerializeField]
private uint id;
/// <summary>
/// A short description of the Input Action.
/// </summary>
public string Description => description;
[SerializeField]
private string description;
/// <summary>
/// The Axis constraint for the Input Action
/// </summary>
public AxisType AxisConstraint => axisConstraint;
[SerializeField]
private AxisType axisConstraint;
public static bool operator ==(MixedRealityInputAction left, MixedRealityInputAction right)
{
return left.Equals(right);
}
public static bool operator !=(MixedRealityInputAction left, MixedRealityInputAction right)
{
return !left.Equals(right);
}
#region IEqualityComparer Implementation
bool IEqualityComparer.Equals(object left, object right)
{
if (ReferenceEquals(null, left) || ReferenceEquals(null, right)) { return false; }
if (!(left is MixedRealityInputAction) || !(right is MixedRealityInputAction)) { return false; }
return ((MixedRealityInputAction)left).Equals((MixedRealityInputAction)right);
}
public bool Equals(MixedRealityInputAction other)
{
return Id == other.Id &&
AxisConstraint == other.AxisConstraint;
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) { return false; }
return obj is MixedRealityInputAction && Equals((MixedRealityInputAction)obj);
}
int IEqualityComparer.GetHashCode(object obj)
{
return obj is MixedRealityInputAction ? ((MixedRealityInputAction)obj).GetHashCode() : 0;
}
public override int GetHashCode()
{
return $"{Id}.{AxisConstraint}".GetHashCode();
}
#endregion IEqualityComparer Implementation
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Input Action Rules Profile", fileName = "MixedRealityInputActionRulesProfile", order = (int)CreateProfileMenuItemIndices.InputActionRules)]
public class MixedRealityInputActionRulesProfile : BaseMixedRealityProfile
{
[SerializeField]
private InputActionRuleDigital[] inputActionRulesDigital = null;
/// <summary>
/// All the Input Action Rules for <see cref="bool"/> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRuleDigital[] InputActionRulesDigital => inputActionRulesDigital;
[SerializeField]
private InputActionRuleSingleAxis[] inputActionRulesSingleAxis = null;
/// <summary>
/// All the Input Action Rules for <see cref="float"/> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRuleSingleAxis[] InputActionRulesSingleAxis => inputActionRulesSingleAxis;
[SerializeField]
private InputActionRuleDualAxis[] inputActionRulesDualAxis = null;
/// <summary>
/// All the Input Action Rules for <see href="https://docs.unity3d.com/ScriptReference/Vector2.html">Vector2</see> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRuleDualAxis[] InputActionRulesDualAxis => inputActionRulesDualAxis;
[SerializeField]
private InputActionRuleVectorAxis[] inputActionRulesVectorAxis = null;
/// <summary>
/// All the Input Action Rules for <see href="https://docs.unity3d.com/ScriptReference/Vector3.html">Vector3</see> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRuleVectorAxis[] InputActionRulesVectorAxis => inputActionRulesVectorAxis;
[SerializeField]
private InputActionRuleQuaternionAxis[] inputActionRulesQuaternionAxis = null;
/// <summary>
/// All the Input Action Rules for <see href="https://docs.unity3d.com/ScriptReference/Quaternion.html">Quaternion</see> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRuleQuaternionAxis[] InputActionRulesQuaternionAxis => inputActionRulesQuaternionAxis;
[SerializeField]
private InputActionRulePoseAxis[] inputActionRulesPoseAxis = null;
/// <summary>
/// All the Input Action Rules for <see cref="Microsoft.MixedReality.Toolkit.Utilities.MixedRealityPose"/> based <see cref="MixedRealityInputAction"/>s
/// </summary>
public InputActionRulePoseAxis[] InputActionRulesPoseAxis => inputActionRulesPoseAxis;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System.Collections.Generic;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Configuration profile settings for setting up and consuming Input Actions.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Input Actions Profile", fileName = "MixedRealityInputActionsProfile", order = (int)CreateProfileMenuItemIndices.InputActions)]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/Input/InputActions.html")]
public class MixedRealityInputActionsProfile : BaseMixedRealityProfile
{
private readonly string[] defaultInputActions =
{
"Select",
"Menu",
"Grip",
"Pointer",
"Walk",
"Look",
"Interact",
"Pickup",
"Inventory",
"ConversationSelect"
}; // Examples only, to be refined later.
private readonly AxisType[] defaultInputActionsAxis =
{
AxisType.Digital,
AxisType.Digital,
AxisType.SixDof,
AxisType.SixDof,
AxisType.DualAxis,
AxisType.DualAxis,
AxisType.DualAxis,
AxisType.Digital,
AxisType.DualAxis,
AxisType.DualAxis
}; // Examples only, to be refined later
[SerializeField]
[Tooltip("The list of actions users can do in your application.")]
private MixedRealityInputAction[] inputActions =
{
// 0 is reserved for "None"
new MixedRealityInputAction(1, "Select"),
new MixedRealityInputAction(2, "Menu"),
new MixedRealityInputAction(3, "Grip")
}; // Examples only, to be refined later
/// <summary>
/// The list of actions users can do in your application.
/// </summary>
/// <remarks>Input Actions are device agnostic and can be paired with any number of device inputs across all platforms.</remarks>
public MixedRealityInputAction[] InputActions => inputActions;
/// <summary>
/// Reset the current InputActions definitions to the Mixed Reality Toolkit defaults
/// If existing mappings exist, they will be preserved and pushed to the end of the array
/// </summary>
/// <returns>Default MRTK Actions plus any custom actions (if already configured)</returns>
public MixedRealityInputAction[] LoadMixedRealityToolKitDefaults()
{
var defaultActions = new List<MixedRealityInputAction>();
bool exists = false;
for (uint i = 0; i < defaultInputActions.Length; i++)
{
defaultActions.Add(new MixedRealityInputAction(i, defaultInputActions[i], defaultInputActionsAxis[i]));
}
for (int i = 0; i < inputActions.Length; i++)
{
if (defaultActions.Contains(inputActions[i]))
{
exists = true;
}
if (!exists)
{
defaultActions.Add(inputActions[i]);
}
exists = false;
}
return inputActions = defaultActions.ToArray();
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.CompilerServices;
using UnityEngine;
[assembly: InternalsVisibleTo("Microsoft.MixedReality.Toolkit.Tests.EditModeTests")]
[assembly: InternalsVisibleTo("Microsoft.MixedReality.Toolkit.Tests.PlayModeTests")]
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Configuration profile settings for setting up controller pointers.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Input System Profile", fileName = "MixedRealityInputSystemProfile", order = (int)CreateProfileMenuItemIndices.Input)]
[MixedRealityServiceProfile(typeof(IMixedRealityInputSystem))]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/Input/Overview.html")]
public class MixedRealityInputSystemProfile : BaseMixedRealityProfile
{
[SerializeField]
private MixedRealityInputDataProviderConfiguration[] dataProviderConfigurations = System.Array.Empty<MixedRealityInputDataProviderConfiguration>();
/// <summary>
/// List of input data provider configurations to initialize and manage by the Input System registrar
/// </summary>
public MixedRealityInputDataProviderConfiguration[] DataProviderConfigurations
{
get { return dataProviderConfigurations; }
internal set { dataProviderConfigurations = value; }
}
[SerializeField]
[Tooltip("The focus provider service concrete type to use when raycasting.")]
[Implements(typeof(IMixedRealityFocusProvider), TypeGrouping.ByNamespaceFlat)]
private SystemType focusProviderType;
/// <summary>
/// The focus provider service concrete type to use when raycasting.
/// </summary>
public SystemType FocusProviderType
{
get { return focusProviderType; }
internal set { focusProviderType = value; }
}
[SerializeField]
[Tooltip("The raycast provider service concrete type to use when raycasting.")]
[Implements(typeof(IMixedRealityRaycastProvider), TypeGrouping.ByNamespaceFlat)]
private SystemType raycastProviderType;
/// <summary>
/// The raycast provider service concrete type to use when raycasting.
/// </summary>
public SystemType RaycastProviderType
{
get { return raycastProviderType; }
internal set { raycastProviderType = value; }
}
/// <summary>
/// Maximum number of colliders that can be detected in a SphereOverlap scene query.
/// </summary>
public int FocusQueryBufferSize => focusQueryBufferSize;
[SerializeField]
[Range(1, 2048)]
[Tooltip("Maximum number of colliders that can be detected in a SphereOverlap scene query.")]
private int focusQueryBufferSize = 128;
[SerializeField]
[Tooltip("In case of a compound collider, does the individual collider receive focus")]
private bool focusIndividualCompoundCollider = false;
/// <summary>
/// In case of a compound collider, does the individual collider receive focus
/// </summary>
public bool FocusIndividualCompoundCollider
{
get { return focusIndividualCompoundCollider; }
set { focusIndividualCompoundCollider = value; }
}
[SerializeField]
[Tooltip("Input System Action Mapping profile for wiring up Controller input to Actions.")]
private MixedRealityInputActionsProfile inputActionsProfile;
/// <summary>
/// Input System Action Mapping profile for wiring up Controller input to Actions.
/// </summary>
public MixedRealityInputActionsProfile InputActionsProfile
{
get { return inputActionsProfile; }
internal set { inputActionsProfile = value; }
}
[SerializeField]
[Tooltip("Input Action Rules Profile for raising actions based on specific criteria.")]
private MixedRealityInputActionRulesProfile inputActionRulesProfile;
/// <summary>
/// Input Action Rules Profile for raising actions based on specific criteria.
/// </summary>
public MixedRealityInputActionRulesProfile InputActionRulesProfile
{
get { return inputActionRulesProfile; }
internal set { inputActionRulesProfile = value; }
}
[SerializeField]
[Tooltip("Pointer Configuration options")]
private MixedRealityPointerProfile pointerProfile;
/// <summary>
/// Pointer configuration options
/// </summary>
public MixedRealityPointerProfile PointerProfile
{
get { return pointerProfile; }
internal set { pointerProfile = value; }
}
[SerializeField]
[Tooltip("Gesture Mapping Profile for recognizing gestures across all platforms.")]
private MixedRealityGesturesProfile gesturesProfile;
/// <summary>
/// Gesture Mapping Profile for recognizing gestures across all platforms.
/// </summary>
public MixedRealityGesturesProfile GesturesProfile
{
get { return gesturesProfile; }
internal set { gesturesProfile = value; }
}
/// <summary>
/// The list of cultures where speech recognition is supported
/// </summary>
private List<CultureInfo> supportedVoiceCultures = new List<CultureInfo>
{
new CultureInfo("en-US"),
new CultureInfo("en-CA"),
new CultureInfo("fr-CA"),
new CultureInfo("en-GB"),
new CultureInfo("en-AU"),
new CultureInfo("de-DE"),
new CultureInfo("fr-FR"),
new CultureInfo("zh-CN"),
new CultureInfo("ja-JP"),
new CultureInfo("es-ES"),
new CultureInfo("it-IT")
};
/// <summary>
/// Returns whether speech is supported for the current language or not
/// </summary>
public bool IsSpeechSupported
{
get
{
return supportedVoiceCultures.Contains(CultureInfo.CurrentUICulture);
}
}
[SerializeField]
[Tooltip("Speech Command profile for wiring up Voice Input to Actions.")]
private MixedRealitySpeechCommandsProfile speechCommandsProfile;
/// <summary>
/// Speech commands profile for configured speech commands, for use by the speech recognition system
/// </summary>
public MixedRealitySpeechCommandsProfile SpeechCommandsProfile
{
get { return speechCommandsProfile; }
internal set { speechCommandsProfile = value; }
}
[SerializeField]
[Tooltip("Enable and configure the devices for your application.")]
private bool enableControllerMapping = false;
/// <summary>
/// Enable and configure the devices for your application.
/// </summary>
public bool IsControllerMappingEnabled
{
get { return controllerMappingProfile != null && enableControllerMapping; }
internal set { enableControllerMapping = value; }
}
[SerializeField]
[Tooltip("Device profile for wiring up physical inputs to Actions.")]
private MixedRealityControllerMappingProfile controllerMappingProfile;
/// <summary>
/// Active profile for controller mapping configuration
/// </summary>
public MixedRealityControllerMappingProfile ControllerMappingProfile
{
get { return controllerMappingProfile; }
internal set { controllerMappingProfile = value; }
}
[SerializeField]
[Tooltip("Device profile for rendering spatial controllers.")]
private MixedRealityControllerVisualizationProfile controllerVisualizationProfile;
/// <summary>
/// Device profile for rendering spatial controllers.
/// </summary>
public MixedRealityControllerVisualizationProfile ControllerVisualizationProfile
{
get { return controllerVisualizationProfile; }
internal set { controllerVisualizationProfile = value; }
}
[SerializeField]
[Tooltip("Profile for configuring Hands tracking.")]
private MixedRealityHandTrackingProfile handTrackingProfile;
/// <summary>
/// Active profile for hands tracking
/// </summary>
public MixedRealityHandTrackingProfile HandTrackingProfile
{
get { return handTrackingProfile; }
private set { handTrackingProfile = value; }
}
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Configuration profile settings for setting up controller pointers.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Pointer Profile", fileName = "MixedRealityInputPointerProfile", order = (int)CreateProfileMenuItemIndices.Pointer)]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/Input/Pointers.html")]
public class MixedRealityPointerProfile : BaseMixedRealityProfile
{
[SerializeField]
[Tooltip("Maximum distance at which all pointers can collide with a GameObject, unless it has an override extent.")]
private float pointingExtent = 10f;
/// <summary>
/// Maximum distance at which all pointers can collide with a GameObject, unless it has an override extent.
/// </summary>
public float PointingExtent => pointingExtent;
[SerializeField]
[Tooltip("The LayerMasks, in prioritized order, that are used to determine the GazeTarget when raycasting.")]
private LayerMask[] pointingRaycastLayerMasks = { UnityEngine.Physics.DefaultRaycastLayers };
/// <summary>
/// The LayerMasks, in prioritized order, that are used to determine the GazeTarget when raycasting.
/// </summary>
public LayerMask[] PointingRaycastLayerMasks => pointingRaycastLayerMasks;
[SerializeField]
private bool debugDrawPointingRays = false;
/// <summary>
/// Toggle to enable or disable debug pointing rays.
/// </summary>
public bool DebugDrawPointingRays => debugDrawPointingRays;
[SerializeField]
private Color[] debugDrawPointingRayColors = null;
/// <summary>
/// The colors to use when debugging pointer rays.
/// </summary>
public Color[] DebugDrawPointingRayColors => debugDrawPointingRayColors;
[Prefab]
[SerializeField]
[Tooltip("The gaze cursor prefab to use on the Gaze pointer.")]
private GameObject gazeCursorPrefab = null;
/// <summary>
/// The gaze cursor prefab to use on the Gaze pointer.
/// </summary>
public GameObject GazeCursorPrefab => gazeCursorPrefab;
[SerializeField]
[Tooltip("The concrete type of IMixedRealityGazeProvider to use.")]
[Implements(typeof(IMixedRealityGazeProvider), TypeGrouping.ByNamespaceFlat)]
private SystemType gazeProviderType;
/// <summary>
/// The concrete type of <see cref="IMixedRealityGazeProvider"/> to use.
/// </summary>
public SystemType GazeProviderType
{
get { return gazeProviderType; }
internal set { gazeProviderType = value; }
}
[SerializeField]
[Tooltip("If true, platform-specific head gaze override is used, when available. Otherwise, the center of the camera frame is used by default.")]
private bool useHeadGazeOverride = false;
/// <summary>
/// If true, platform-specific head gaze override is used, when available. Otherwise, the center of the camera frame is used by default.
/// </summary>
public bool UseHeadGazeOverride => useHeadGazeOverride;
[SerializeField]
[Tooltip("If true, eye-based tracking will be used as gaze input when available. Requires the 'Gaze Input' permission and device eye calibration to have been run.")]
private bool isEyeTrackingEnabled = false;
/// <summary>
/// If true, eye-based tracking will be used as gaze input when available.
/// </summary>
public bool IsEyeTrackingEnabled
{
get { return isEyeTrackingEnabled; }
internal set { isEyeTrackingEnabled = value; }
}
[SerializeField]
[Tooltip("The Pointer options for this profile.")]
private PointerOption[] pointerOptions = System.Array.Empty<PointerOption>();
/// <summary>
/// The Pointer options for this profile.
/// </summary>
public PointerOption[] PointerOptions => pointerOptions;
[SerializeField]
[Implements(typeof(IMixedRealityPointerMediator), TypeGrouping.ByNamespaceFlat)]
[Tooltip("The concrete Pointer Mediator component to use. This is a component that mediates all pointers in system, disabling / enabling them based on the state of other pointers.")]
private SystemType pointerMediator = null;
/// <summary>
/// The concrete Pointer Mediator component to use.
/// This is a component that mediates all pointers in system, disabling / enabling them based on the state of other pointers.
/// </summary>
public SystemType PointerMediator
{
get { return pointerMediator; }
}
[SerializeField]
[Implements(typeof(IMixedRealityPrimaryPointerSelector), TypeGrouping.ByNamespaceFlat)]
[Tooltip("Primary pointer selector implementation to use. This is used by the focus provider to choose the primary pointer.")]
private SystemType primaryPointerSelector = null;
/// <summary>
/// Primary pointer selector implementation to use. This is used by the focus provider to choose the primary pointer.
/// </summary>
public SystemType PrimaryPointerSelector
{
get { return primaryPointerSelector; }
}
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// The resulting hit information from an IMixedRealityRaycastProvider.
/// </summary>
public struct MixedRealityRaycastHit
{
public Vector3 point;
public Vector3 normal;
public Vector3 barycentricCoordinate;
public float distance;
public int triangleIndex;
public Vector2 textureCoord;
public Vector2 textureCoord2;
public Transform transform;
public Vector2 lightmapCoord;
public bool raycastValid;
public Collider collider;
public MixedRealityRaycastHit(bool raycastValid, RaycastHit hitInfo)
{
this.raycastValid = raycastValid;
if (raycastValid)
{
point = hitInfo.point;
normal = hitInfo.normal;
barycentricCoordinate = hitInfo.barycentricCoordinate;
distance = hitInfo.distance;
triangleIndex = hitInfo.triangleIndex;
textureCoord = hitInfo.textureCoord;
MeshCollider meshCollider = hitInfo.collider as MeshCollider;
if (meshCollider == null || meshCollider.sharedMesh.isReadable)
{
textureCoord2 = hitInfo.textureCoord2;
}
else
{
textureCoord2 = Vector2.zero;
}
transform = hitInfo.transform;
lightmapCoord = hitInfo.lightmapCoord;
collider = hitInfo.collider;
}
else
{
point = Vector3.zero;
normal = Vector3.zero;
barycentricCoordinate = Vector3.zero;
distance = 0;
triangleIndex = 0;
textureCoord = Vector2.zero;
textureCoord2 = Vector2.zero;
transform = null;
lightmapCoord = Vector2.zero;
collider = null;
}
}
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Configuration profile settings for setting up and consuming Speech Commands.
/// </summary>
[CreateAssetMenu(menuName = "Mixed Reality Toolkit/Profiles/Mixed Reality Speech Commands Profile", fileName = "MixedRealitySpeechCommandsProfile", order = (int)CreateProfileMenuItemIndices.Speech)]
[HelpURL("https://microsoft.github.io/MixedRealityToolkit-Unity/Documentation/Input/Speech.html")]
public class MixedRealitySpeechCommandsProfile : BaseMixedRealityProfile
{
[SerializeField]
[Tooltip("Whether the recognizer should be activated on start.")]
private AutoStartBehavior startBehavior = AutoStartBehavior.AutoStart;
/// <summary>
/// The list of Speech Commands users use in your application.
/// </summary>
public AutoStartBehavior SpeechRecognizerStartBehavior => startBehavior;
[SerializeField]
[Tooltip("Select the minimum confidence level for recognized words")]
private RecognitionConfidenceLevel recognitionConfidenceLevel = RecognitionConfidenceLevel.Medium;
/// <summary>
/// The speech recognizer's minimum confidence level setting that will raise the action.
/// </summary>
public RecognitionConfidenceLevel SpeechRecognitionConfidenceLevel => recognitionConfidenceLevel;
[SerializeField]
[Tooltip("The list of Speech Commands users use in your application.")]
private SpeechCommands[] speechCommands = System.Array.Empty<SpeechCommands>();
/// <summary>
/// The list of Speech Commands users use in your application.
/// </summary>
public SpeechCommands[] SpeechCommands => speechCommands;
}
}
\ No newline at end of file

// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Specifies how a pointer in MRTK's default input system behaves.
/// </summary>
public enum PointerBehavior
{
/// <summary>
/// Pointer active state is managed by MRTK input system. If it is a near pointer (grab, poke), it
/// will be always enabled. If it is not a near pointer, it will get disabled if any near pointer on the
/// same hand is active. This is what allows rays to turn off when a hand is near a grabbable.
/// </summary>
Default = 0,
/// <summary>
/// Pointer is always on, regardless of what other pointers are active.
/// </summary>
AlwaysOn,
/// <summary>
/// Pointer is always off, regardless of what other pointers are active.
/// </summary>
AlwaysOff
};
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using Microsoft.MixedReality.Toolkit.Utilities;
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Defines a pointer option to assign to a controller.
/// </summary>
[Serializable]
public struct PointerOption
{
/// <summary>
/// Constructor.
/// </summary>
public PointerOption(SupportedControllerType controllerType, Handedness handedness, GameObject pointerPrefab)
{
this.controllerType = controllerType;
this.handedness = handedness;
this.pointerPrefab = pointerPrefab;
}
[EnumFlags]
[SerializeField]
[Tooltip("The type of Controller this pointer can be attached to at runtime.")]
private SupportedControllerType controllerType;
/// <summary>
/// The type of Controller this pointer can be attached to at runtime.
/// </summary>
/// <remarks>If <see cref="Microsoft.MixedReality.Toolkit.Utilities.Handedness.None"/> is selected, then it will attach to any controller type</remarks>
public SupportedControllerType ControllerType => controllerType;
[SerializeField]
[Tooltip("Defines valid hand(s) to create the pointer prefab on.")]
private Handedness handedness;
/// <summary>
/// Defines valid hand(s) to create the pointer prefab on.
/// </summary>
public Handedness Handedness => handedness;
[SerializeField]
[Tooltip("The prefab with an IMixedRealityPointer component to create when a valid controller becomes available.")]
private GameObject pointerPrefab;
/// <summary>
/// The prefab with an <see cref="Microsoft.MixedReality.Toolkit.Input.IMixedRealityPointer"/> component to create when a valid controller becomes available.
/// </summary>
public GameObject PointerPrefab => pointerPrefab;
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
using UnityEngine;
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Data structure for mapping Voice and Keyboard input to <see cref="MixedRealityInputAction"/>s that can be raised by the Input System.
/// </summary>
[Serializable]
public struct SpeechCommands
{
/// <summary>
/// Constructor.
/// </summary>
/// <param name="keyword">The Keyword.</param>
/// <param name="keyCode">The KeyCode.</param>
/// <param name="action">The Action to perform when Keyword or KeyCode is recognized.</param>
/// <param name="localizationKey"> An optional key to use to override the keyword with a localized version </param>
public SpeechCommands(string keyword, KeyCode keyCode, MixedRealityInputAction action, string localizationKey = "")
{
this.keyword = keyword;
this.keyCode = keyCode;
this.action = action;
this.localizationKey = localizationKey;
this.localizedKeyword = null;
}
[SerializeField]
[Tooltip("The key to use to find a localized keyword")]
private string localizationKey;
private string localizedKeyword;
/// <summary>
/// The localized version of the keyword
/// </summary>
public string LocalizedKeyword
{
get
{
#if WINDOWS_UWP
if (!string.IsNullOrWhiteSpace(localizationKey) && string.IsNullOrWhiteSpace(localizedKeyword))
{
try
{
var resourceLoader = global::Windows.ApplicationModel.Resources.ResourceLoader.GetForViewIndependentUse();
localizedKeyword = resourceLoader.GetString(localizationKey);
}
catch(System.Exception e)
{
// Ignore the exception and just use the fallback
Debug.LogError("GetLocalizedKeywordException: " + e.Message);
}
}
#endif
return string.IsNullOrWhiteSpace(localizedKeyword) ? keyword : localizedKeyword;
}
}
[SerializeField]
[Tooltip("The Fallback keyword to listen for.")]
private string keyword;
/// <summary>
/// The Fallback Keyword to listen for, or the localization key if no fallback keyword was set.
/// </summary>
public string Keyword
{
get
{
return string.IsNullOrWhiteSpace(keyword) ? localizationKey : keyword;
}
}
[SerializeField]
[Tooltip("The corresponding KeyCode that also raises the same action as the Localized Keyword.")]
private KeyCode keyCode;
/// <summary>
/// The corresponding KeyCode that also raises the same action as the Keyword.
/// </summary>
public KeyCode KeyCode => keyCode;
[SerializeField]
[Tooltip("The Action that is raised by either the Localized Keyword or KeyCode.")]
private MixedRealityInputAction action;
/// <summary>
/// The <see cref="MixedRealityInputAction"/> that is raised by either the Keyword or KeyCode.
/// </summary>
public MixedRealityInputAction Action => action;
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit.Input
{
/// <summary>
/// Type of Events to receive from a PokePointer.
/// </summary>
public enum TouchableEventType
{
Touch,
Pointer,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System;
namespace Microsoft.MixedReality.Toolkit.Windows.Input
{
/// <summary>
/// Copy of Unity's <see href="https://docs.unity3d.com/ScriptReference/XR.WSA.Input.GestureSettings.html">GestureSettings</see>
/// </summary>
[Flags]
public enum WindowsGestureSettings
{
/// <summary>
/// <para>Enable support for the tap gesture.</para>
/// </summary>
Tap = 1 << 0, // HEX: 0x00000001 | Decimal: 1
/// <summary>
/// <para>Enable support for the double-tap gesture.</para>
/// </summary>
DoubleTap = 1 << 1, // HEX: 0x00000002 | Decimal: 2
/// <summary>
/// <para>Enable support for the hold gesture.</para>
/// </summary>
Hold = 1 << 2, // HEX: 0x00000004 | Decimal: 4
/// <summary>
/// <para>Enable support for the manipulation gesture which tracks changes to the hand's position. This gesture is relative to the start position of the gesture and measures an absolute movement through the world.</para>
/// </summary>
ManipulationTranslate = 1 << 3, // HEX: 0x00000008 | Decimal: 8
/// <summary>
/// <para>Enable support for the navigation gesture, in the horizontal axis.</para>
/// </summary>
NavigationX = 1 << 4, // HEX: 0x00000010 | Decimal: 16
/// <summary>
/// <para>Enable support for the navigation gesture, in the vertical axis.</para>
/// </summary>
NavigationY = 1 << 5, // HEX: 0x00000020 | Decimal: 32
/// <summary>
/// <para>Enable support for the navigation gesture, in the depth axis.</para>
/// </summary>
NavigationZ = 1 << 6, // HEX: 0x00000040 | Decimal: 64
/// <summary>
/// <para>Enable support for the navigation gesture, in the horizontal axis using rails (guides).</para>
/// </summary>
NavigationRailsX = 1 << 7, // HEX: 0x00000080 | Decimal: 128
/// <summary>
/// <para>Enable support for the navigation gesture, in the vertical axis using rails (guides).</para>
/// </summary>
NavigationRailsY = 1 << 8, // HEX: 0x00000100 | Decimal: 256
/// <summary>
/// <para>Enable support for the navigation gesture, in the depth axis using rails (guides).</para>
/// </summary>
NavigationRailsZ = 1 << 9, // HEX: 0x00000200 | Decimal: 512
}
}
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit
{
/// <summary>
/// How to apply the distortion along the line.
/// </summary>
public enum DistortionMode
{
/// <summary>
/// Use the normalized length of the line plus its distortion strength curve to determine distortion strength
/// </summary>
NormalizedLength = 0,
/// <summary>
/// Use a single value to determine distortion strength
/// </summary>
Uniform,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit
{
/// <summary>
/// Default options for how to distribute interpolated points in a line renderer
/// </summary>
public enum InterpolationMode
{
/// <summary>
/// Specify the number of interpolation steps manually
/// </summary>
FromSteps = 0,
/// <summary>
/// Create steps based on total length of line + manually specified length
/// </summary>
FromLength,
/// <summary>
/// Create steps based on total length of line + animation curve
/// </summary>
FromCurve
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit
{
/// <summary>
/// Defines the type of interpolation to use when calculating a spline.
/// </summary>
public enum InterpolationType
{
Bezier = 0,
CatmullRom,
Hermite,
}
}
\ No newline at end of file
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace Microsoft.MixedReality.Toolkit
{
/// <summary>
/// Defines how a base line data provider will transform its points
/// </summary>
public enum LinePointTransformMode
{
/// <summary>
/// Use the local line transform. More reliable but with a performance cost.
/// </summary>
UseTransform,
/// <summary>
/// Use a matrix. Lines that are not active and enabled will not update point positions.
/// </summary>
UseMatrix,
}
}
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment