Building on the latest XR Hands package to make an engaging and intuitive experience featuring advanced hand-tracking and complex entities you can interact with.
The Unity's Gestures sample for the XR Hands package includes the StaticHandGesture component, which compares hand tracking data with a hand shape or pose to notify event listeners of whether the gesture has been Performed or has Ended.
However, there are many tedious things about this system. For one, it uses a single XRHandTrackingEvents component per hand, meaning that tracking the same gesture for both hands requires two separate StaticHandGesture component with the same duplicate settings.
There's also no right-off the bat way for Instantiated objects to reference specific StaticHandGesture components in the scene.
As a solution, I developed a much more flexible event system in the form of a Scriptable Object asset to allow easy reference of a specific hand gesture. Each asset handles events for both hands, using the same settings. This also makes it easier to add new and listen to tracking of future gestures.
using UnityEngine;
using UnityEngine.Events;
using UnityEngine.XR.Hands.Gestures;
using UnityEngine.XR.Hands;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
///
/// Asset that handles hand gesture detection for both hands
///
[CreateAssetMenu()]
public class HandGestureEvents : ScriptableObject
{
///
/// Settings used in the hand detector
///
[Serializable]
public class DetectionSettings
{
[SerializeField] ScriptableObject handShapeOrPose;
[SerializeField] Transform targetTransform;
public float minimumHoldTime = 0.2f;
public float detectionInterval = 0.1f;
public XRHandShape HandShape { get; private set; }
public XRHandPose HandPose { get; private set; }
public void Initialise()
{
HandShape = handShapeOrPose as XRHandShape;
HandPose = handShapeOrPose as XRHandPose;
if (HandPose != null && HandPose.relativeOrientation != null)
{
HandPose.relativeOrientation.targetTransform = targetTransform;
}
}
}
///
/// Captures specific hand gesture events on a single hand using detection settings
///
public class HandDetector
{
DetectionSettings settings;
XRHandJointsUpdatedEventArgs handJointsUpdatedEventArgs;
public UnityEvent OnPerformed { get; private set; } = new();
public UnityEvent OnEnded { get; private set; } = new();
public void Initialise(DetectionSettings settings)
{
this.settings = settings;
handJointsUpdatedEventArgs = new XRHandJointsUpdatedEventArgs();
}
///
/// Checks for hand gesture Performed and Ended events
///
/// Hand compared against target gesture hand pose/shape
public void CheckGestureEvents(XRHand hand)
{
// Update event args
handJointsUpdatedEventArgs.hand = hand;
// Check by detection interval
if (Time.timeSinceLevelLoad < timeOfLastConditionCheck + settings.detectionInterval)
return;
// Compare updated hand against conditions of hand shape or hand pose
var detected =
settings.HandShape != null && settings.HandShape.CheckConditions(handJointsUpdatedEventArgs) ||
settings.HandPose != null && settings.HandPose.CheckConditions(handJointsUpdatedEventArgs);
// Set holdStartTime for tracking gesture hold duration
if (!wasDetected && detected)
{
holdStartTime = Time.timeSinceLevelLoad;
}
// Check if gesture ends after being performed
else if (wasDetected && !detected)
{
performedTriggered = false;
OnEnded?.Invoke();
}
wasDetected = detected;
if (!performedTriggered && detected)
{
// Wait until gesture is held for minimumHoldTime duration before firing Performed event
var holdTimer = Time.timeSinceLevelLoad - holdStartTime;
if (holdTimer > settings.minimumHoldTime)
{
OnPerformed?.Invoke();
performedTriggered = true;
}
}
timeOfLastConditionCheck = Time.timeSinceLevelLoad;
}
bool wasDetected;
bool performedTriggered;
float timeOfLastConditionCheck;
float holdStartTime;
public void Reset()
{
OnPerformed.RemoveAllListeners();
OnEnded.RemoveAllListeners();
}
}
[SerializeField] DetectionSettings detectionSettings;
public HandDetector leftHandDetector = new(), rightHandDetector = new();
XRHandSubsystem handSubsystem;
bool leftHandIsTracked, rightHandIsTracked;
protected void OnEnable()
{
ResetDetectors();
leftHandDetector = new();
rightHandDetector = new();
detectionSettings.Initialise();
leftHandDetector.Initialise(detectionSettings);
rightHandDetector.Initialise(detectionSettings);
GetHandSubsystem();
}
protected void OnDisable()
{
ResetDetectors();
// Dispose hand subsystem
if (handSubsystem != null)
{
handSubsystem.trackingAcquired -= OnTrackingAcquired;
handSubsystem.trackingLost -= OnTrackingLost;
handSubsystem.updatedHands -= OnUpdatedHands;
handSubsystem = null;
}
}
async void GetHandSubsystem()
{
// Get hand subsystem
List handSubsystems = new();
while (handSubsystem == null)
{
SubsystemManager.GetSubsystems(handSubsystems);
for (var i = 0; i < handSubsystems.Count; ++i)
{
var handSubsystem = handSubsystems[i];
if (handSubsystem.running)
{
this.handSubsystem = handSubsystem;
break;
}
}
await Task.Yield();
}
// Subscribe to events
handSubsystem.trackingAcquired += OnTrackingAcquired;
handSubsystem.trackingLost += OnTrackingLost;
handSubsystem.updatedHands += OnUpdatedHands;
}
void OnTrackingAcquired(XRHand hand)
{
OnTrackingAcquiredOrLost(hand, true);
}
void OnTrackingLost(XRHand hand)
{
OnTrackingAcquiredOrLost(hand, false);
}
void OnTrackingAcquiredOrLost(XRHand hand, bool isTracked)
{
if (hand.handedness == Handedness.Left)
{
leftHandIsTracked = isTracked;
}
else if (hand.handedness == Handedness.Right)
{
rightHandIsTracked = isTracked;
}
}
///
/// Passes new updated hand data to detectors to process
///
///
///
///
void OnUpdatedHands(XRHandSubsystem subsystem, XRHandSubsystem.UpdateSuccessFlags updateSuccessFlags, XRHandSubsystem.UpdateType updateType)
{
if (updateType != XRHandSubsystem.UpdateType.Dynamic)
return;
if (((updateSuccessFlags & XRHandSubsystem.UpdateSuccessFlags.LeftHandJoints)
!= XRHandSubsystem.UpdateSuccessFlags.None) &&
leftHandIsTracked)
{
leftHandDetector.CheckGestureEvents(subsystem.leftHand);
}
if (((updateSuccessFlags & XRHandSubsystem.UpdateSuccessFlags.RightHandJoints)
!= XRHandSubsystem.UpdateSuccessFlags.None) &&
rightHandIsTracked)
{
rightHandDetector.CheckGestureEvents(subsystem.rightHand);
}
}
public UnityEvent Performed(Handedness handedness) => GetHandDetector(handedness).OnPerformed;
public UnityEvent Ended(Handedness handedness) => GetHandDetector(handedness).OnEnded;
HandDetector GetHandDetector(Handedness handedness) =>
handedness switch
{
Handedness.Left => leftHandDetector,
Handedness.Right => rightHandDetector,
_ => null
};
void ResetDetectors()
{
leftHandDetector?.Reset();
rightHandDetector?.Reset();
}
}
Asside from grabbing enemies and props, special hand gestures are used to perform special abilities.
The Hand Gun ability uses motion tracking to fire bullets with the flick of your wrist. Aim is also stabilised using smoothing and a linked list of previous tracked poses before firing.
The Laser Fingers ability fires bolts of lasers from finger tips.
They both use the direction from the distal finger joint to the tip to get a more stable direction.
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR.Hands;
///
/// Scriptable Object which handles the hand gun ability
///
[CreateAssetMenu(menuName = "Hand Special/Hand Gun")]
public class HandGun : HandSpecial
{
[Header("Aiming")]
// Fire ray visual
[SerializeField] LineRenderer firepointPrefab;
LineRenderer firepoint;
[SerializeField] float firepointRayLength = 1f;
Vector3 indexTipPos;
Vector3 aimDir;
Vector3 aimDirStabilised;
Vector3 aimDirSmoothVelocity;
[SerializeField] float aimDirSmoothTime;
// Used to select more stable fire orientations from past frames
struct Aim
{
public Vector3 Point { get; private set; }
public Vector3 Dir { get; private set; }
public Vector3 Up { get; private set; }
public Aim(Vector3 point, Vector3 dir, Vector3 up)
{
Point = point;
Dir = dir;
Up = up;
}
}
[SerializeField] int aimHistoryLength = 3;
LinkedList aimHistory = new();
Vector3 FirePos => aimHistory.Last.Value.Point;
Vector3 FireDir => aimHistory.Last.Value.Dir;
Vector3 FireUp => aimHistory.Last.Value.Up;
[Header("Shooting")]
[SerializeField] float shootEnterAngularSpeedThreshold;
[SerializeField] Projectile bulletPrefab;
bool needToReload;
[Header("Reloading")]
[SerializeField, Range(-1, 1f)] float reloadEnterPointForwardThreshold;
[SerializeField] float timeToReload;
float timerToReload;
[Header("SFX")]
[SerializeField] AudioClipAsset shootAudioClipAsset;
[SerializeField] AudioClipAsset reloadAudioClipAsset;
///
/// Initialises the hand gun
///
/// Used to subscribe to tracking events of the corresponding hand
public override void Startup(XRHandTrackingEvents trackingEvents)
{
base.Startup(trackingEvents);
firepoint = objectPooler.Get(firepointPrefab);
AddOnJointsUpdatedListener();
}
///
/// Called by the HandSpecialController
///
public override void Update()
{
base.Update();
// Smooth aim dir visual
aimDirStabilised = Vector3.SmoothDamp(aimDirStabilised, aimDir, ref aimDirSmoothVelocity, aimDirSmoothTime);
UpdateFirepointRendering();
timerToReload += Time.deltaTime;
}
protected override void OnJointsUpdated(XRHandJointsUpdatedEventArgs eventArgs)
{
HandleShooting(eventArgs.hand);
}
///
/// Handles shooting and reloading logic
///
/// Hand used to check for shooting
void HandleShooting(XRHand hand)
{
// Capture wrist for flick detection
XRHandJoint wristJoint = hand.GetJoint(XRHandJointID.Wrist);
wristJoint.TryGetPose(out Pose wristPose);
// Capture finger joints (tip & distal) for fire orientation
indexTipPos = GetJointGlobalPos(XRHandJointID.IndexTip);
Vector3 indexDistalPos = GetJointGlobalPos(XRHandJointID.IndexDistal);
aimDir = (indexTipPos - indexDistalPos).normalized;
if (!needToReload)
{
// Check for wrist flick input for shooting
wristJoint.TryGetAngularVelocity(out Vector3 wristAngularVelocity);
if (wristAngularVelocity.magnitude >= shootEnterAngularSpeedThreshold && // Flicking fast enough
// Flicking vertically and upwards
((Mathf.Abs(wristAngularVelocity.x) > Mathf.Abs(wristAngularVelocity.y) && wristAngularVelocity.x < 0f) ||
(Mathf.Abs(wristAngularVelocity.z) > Mathf.Abs(wristAngularVelocity.y) && wristAngularVelocity.z < 0f)))
{
Shoot();
}
else
{
// Keep track of previous fire orientations for shooting
AddToAimHistory(indexTipPos, aimDir, -wristPose.right);
}
}
else if (timerToReload > timeToReload)
{
// No need to localise directions as they are still relative to each other and we are not seeing it
if (Vector3.Dot(-wristPose.right, FireUp) >= reloadEnterPointForwardThreshold)
{
Reload();
}
}
// Helpers
Vector3 GetJointGlobalPos(XRHandJointID jointId)
{
hand.GetJoint(jointId).TryGetPose(out Pose jointPose);
return originManager.LocalisePos(jointPose.position);
}
}
void UpdateFirepointRendering()
{
firepoint.SetPosition(0, indexTipPos);
firepoint.SetPosition(1, indexTipPos + aimDirStabilised * firepointRayLength);
}
void AddToAimHistory(Vector3 point, Vector3 dir, Vector3 up)
{
// Linked list with latest recordings at the start, and latest at the end
Aim latestAim = new(point, dir, up);
aimHistory.AddFirst(latestAim);
// Enforce length
if (aimHistory.Count > aimHistoryLength)
{
// Shift newer items towards the end by removing the last node
aimHistory.RemoveLast();
}
}
void Shoot()
{
needToReload = true;
timerToReload = 0f;
Projectile newBullet = objectPooler.Get(bulletPrefab, FirePos);
newBullet.Initialise(FireDir);
ApplyCost();
audioManager.PlaySound(shootAudioClipAsset, FirePos);
}
void Reload()
{
needToReload = false;
audioManager.PlaySound(reloadAudioClipAsset, aimDir);
}
///
/// Disposal
///
protected override void OnDestroy()
{
base.OnDestroy();
if (firepoint)
{
objectPooler.Return(firepoint);
}
}
}
I made a script which extends the XRDirectInteractor to allow grabbing enemies and props using the grab hand gesture.
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR.Hands;
using UnityEngine.XR.Interaction.Toolkit;
///
/// Interactor that grabs interactables with a grab gesture while handing collisions with physical hand
///
public class PhysicalHandInteractor : XRDirectInteractor
{
[SerializeField] Handedness handedness; // Getting the corresponding physical hand and its selection events
PhysicalHand physicalHand; // For toggling hand collisions ignore
[Header("Grabbing")]
public HandGestureEvents grabGesture; // For special select input
[Header("Dropping")]
// Ignore hand collisions on drop for exit duration
[SerializeField] float dropExitTime;
float dropExitTimer;
HashSet droppingInteractables = new();
// Map previously grabbed interactables with their colliders to avoid repeating get component operations
Dictionary grabbedCollidersDict = new();
protected override void OnEnable()
{
base.OnEnable();
grabGesture.Performed(handedness).AddListener(TryGrab);
grabGesture.Ended(handedness).AddListener(Drop);
}
protected override void OnDisable()
{
base.OnDisable();
grabGesture.Performed(handedness).RemoveListener(TryGrab);
grabGesture.Ended(handedness).RemoveListener(Drop);
}
protected override void Start()
{
base.Start();
physicalHand = Player.Instance.GetHand(handedness).physical;
}
private void Update()
{
if (droppingInteractables.Count > 0)
{
UpdateDroppingInteractables();
}
}
void TryGrab()
{
// Get interactables in select range
List interactables = new(); // Buffer to fill
GetValidTargets(interactables);
// Check if there are any interactables found
if (interactables.Count > 0)
{
// Check if there is any match between interaction layer masks
if ((interactables[0].interactionLayers & interactionLayers) != 0)
{
Grab(interactables[0]);
}
}
}
public void Grab(IXRInteractable interactable)
{
// Drop current selection
if (interactablesSelected.Count > 0)
{
Drop(true);
}
SetIgnoreHandCollisionsWithInteractable(interactable, true); // Ignore hand collisions
StartManualInteraction((IXRSelectInteractable)interactable);
}
public void Drop() => Drop(true);
public void Drop(bool trackDropping)
{
// Check if there is a selection
if (interactablesSelected.Count == 0)
return;
IXRSelectInteractable currentSelection = interactablesSelected[0];
if (trackDropping)
{
droppingInteractables.Add(currentSelection);
}
EndManualInteraction();
}
void UpdateDroppingInteractables()
{
dropExitTimer += Time.deltaTime;
if (dropExitTimer >= dropExitTime)
{
foreach (var droppingInteractable in droppingInteractables)
{
SetIgnoreHandCollisionsWithInteractable(droppingInteractable, false);
}
droppingInteractables = new();
dropExitTimer = 0f;
}
}
// Sets ignore with hand colliders via Physics.IgnoreCollision()
void SetIgnoreHandCollisionsWithInteractable(IXRInteractable interactable, bool ignore)
=> physicalHand.SetIgnoreCollisionWith(GetInteractableColliders(interactable), ignore);
Collider[] GetInteractableColliders(IXRInteractable interactable)
{
return grabbedCollidersDict.ContainsKey(interactable)
? grabbedCollidersDict[interactable]
: interactable.transform.GetComponentsInChildren();
}
}
The enemy AI logic is split into separate modules for movement, attack and physics which is managed by the master, EnemyController.
Context-based steering is used to guide the movement of enemies around the map in a way that's more organic compared to rigid path-finding algorithms like A*, while also being much more performant.
It uses steering behaviours to weigh a set of directions based on nearby obstacles and target locations. The best direction is then selected, whether its the largest weighed direction or the average of multiple.
This makes the movement more natural and present in the moment as it doesn't require heavy context processing of the whole map but instead it's detected surroundings.
using System;
using System.Collections;
using UnityEngine;
///
/// Processes context steering behaviours to select a final best direction
///
[RequireComponent(typeof(ContextSteeringData))]
public class ContextSteeringSolver : MonoBehaviour
{
// Data container holding weighted direcitons, detected obstacles and targets
ContextSteeringData data;
[SerializeField] SteeringBehaviour[] behaviours;
// Detects nearby obstacles to add to the steering data
[SerializeField] OverlapDetector obstacleDetector;
[SerializeField] float solveDelay;
Vector3 selectedDirection = Vector3.zero;
bool updatingDirection;
public event Action OnDirectionSelect;
public event Action OnDirectionEnd;
[Header("Gizmos")]
[SerializeField] bool drawGizmos = true;
bool firstTarget;
private void Awake()
{
data = GetComponent();
// Copy steering behaviour Scriptable Objects
for (int i = 0; i < behaviours.Length; i++)
{
behaviours[i] = Instantiate(behaviours[i]);
}
}
private void OnEnable()
{
obstacleDetector.OnDetectedColliders += OnDetectedObstacles;
}
private void OnDisable()
{
EndDirection();
obstacleDetector.OnDetectedColliders -= OnDetectedObstacles;
}
public void Initialise(float charRadius)
{
// Initialise steering behaviours with character radius to use in detection
for (int i = 0; i < behaviours.Length; i++)
{
behaviours[i].Initialise(charRadius);
}
}
private void Update()
{
if (data.CurrentTargetPoint != null)
{
if (!updatingDirection)
{
updatingDirection = true;
StartCoroutine(DirectionUpdate());
}
}
else if (data.TargetsCount > 0)
{
data.CurrentTargetPoint = data.TargetPoints[0];
}
else if (firstTarget)
{
EndDirection();
}
}
IEnumerator DirectionUpdate()
{
if (data.CurrentTargetPoint == null)
{
EndDirection();
yield break;
}
else
{
SelectDirection(SolveDirection());
yield return new WaitForSeconds(solveDelay);
StartCoroutine(DirectionUpdate());
}
}
public Vector3 SolveDirection()
{
data.ResetWeights();
// Apply influence of context steering behaviours to weights
for (int i = 0; i < behaviours.Length; i++)
{
behaviours[i].ApplySteering(ref data);
}
// Subtract avoid values from interest values to get final interest weights
for (int i = 0; i < data.weights.Length; i++)
{
data.weights[i].interest = Mathf.Clamp01(data.weights[i].interest - data.weights[i].avoid);
}
// Select the strongest interest direction
int strongestInterestIdx = 0;
for (int i = 0; i < data.weights.Length; i++)
{
if (data.weights[i].interest > data.weights[strongestInterestIdx].interest)
{
strongestInterestIdx = i;
}
}
return selectedDirection = data.weights[strongestInterestIdx].direction;
}
void SelectDirection(Vector3 newDirection)
{
OnDirectionSelect?.Invoke(newDirection);
}
void EndDirection()
{
updatingDirection = false;
OnDirectionEnd?.Invoke();
}
public void AddTarget(Vector3 point)
{
firstTarget = true;
data.TargetPoints.Add(point);
}
void OnDetectedObstacles(Collider[] obstacleColliders)
{
data.Obstacles = obstacleColliders;
}
private void OnDrawGizmosSelected()
{
if (!drawGizmos || !Application.isPlaying)
return;
// Selected Direction
Gizmos.color = Color.yellow;
Gizmos.DrawLine(transform.position, transform.position + selectedDirection * ContextSteeringData.GIZMOS_WEIGHT_LENGTH);
// Weights
for (int i = 0; i < data.weights.Length; i++)
{
// Avoid
Gizmos.color = Color.red;
Gizmos.DrawLine(transform.position, transform.position + data.weights[i].AvoidVelocity * ContextSteeringData.GIZMOS_WEIGHT_LENGTH);
// Interest
Gizmos.color = Color.green;
Gizmos.DrawLine(transform.position, transform.position + data.weights[i].InterestVelocity * ContextSteeringData.GIZMOS_WEIGHT_LENGTH);
}
for (int i = 0; i < behaviours.Length; i++)
{
behaviours[i].OnDrawGizmosSelected();
}
}
}
Common steering behaviours include Obstacle Avoidance and Seek (for moving toward targets). We can make things more interesting by adding more unique behaviours like Strafing, which can then be replaced with Seek for charging towards the target during combat.