/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* Licensed under the Oculus SDK License Agreement (the "License");
* you may not use the Oculus SDK except in compliance with the License,
* which is provided at the time of installation or download, or which
* otherwise accompanies this software in either electronic or hard copy form.
*
* You may obtain a copy of the License at
*
* https://developer.oculus.com/licenses/oculussdk/
*
* Unless required by applicable law or agreed to in writing, the Oculus SDK
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using Meta.XR.Util;
using UnityEngine;
///
/// This class manages the face expressions data provided per frame, and is responsible for stopping and
/// starting face tracking. Use this class to read face tracking data, accessible via
/// and to drive the blend shapes on a .
/// For more information, see [Face Tracking for Movement SDK for Unity](https://developer.oculus.com/documentation/unity/move-face-tracking/).
///
///
/// Refer to the enum for the list of face expressions that contain
/// weights that can be applied to blend shapes.
///
[HelpURL("https://developer.oculus.com/documentation/unity/move-face-tracking/")]
[Feature(Feature.FaceTracking)]
public class OVRFaceExpressions : MonoBehaviour, IReadOnlyCollection, OVRFaceExpressions.WeightProvider
{
///
/// The interface for the weight provider that uses to expose information
/// about the face expressions weights available from face tracking.
///
public interface WeightProvider
{
float GetWeight(FaceExpression expression);
}
///
/// This will be true if face tracking is enabled, otherwise false. This is returning the current face tracking
/// enabled state from - to enable/disable face tracking, please refer to
/// and .
///
public bool FaceTrackingEnabled => OVRPlugin.faceTracking2Enabled;
///
/// True if the facial expressions returned from the current face tracking data are valid, otherwise false. This
/// is equivalent to checking if the is valid on this frame.
///
///
/// This value gets updated in every frame. You should check this
/// value before querying for face expressions.
///
public bool ValidExpressions { get; private set; }
///
/// True if the eye look-related blend shapes are valid, otherwise false.
///
///
/// This property affects the behavior of two sets of blend shapes.
///
/// **EyesLook:**
/// -
/// -
/// -
/// -
/// -
/// -
/// -
/// -
///
/// **EyesClosed:**
/// -
/// -
///
/// **When is `false`:**
/// - The `EyesLook` blend shapes are set to zero.
/// - The `EyesClosed` blend shapes range from 0..1, and represent the true state of the eyelids.
///
/// **When is `true`:**
/// - The `EyesLook` blend shapes are valid.
/// - The `EyesClosed` blend shapes are modified so that the sum of the `EyesClosedX` and `EyesLookDownX` blend shapes
/// range from 0..1. This helps avoid double deformation of the avatar's eye lids when they may be driven by both
/// the `EyesClosed` and `EyesLookDown` blend shapes. To recover the true `EyesClosed` values, add the
/// minimum of `EyesLookDownL` and `EyesLookDownR` blend shapes back using the following formula:
/// `EyesClosedL` += min(`EyesLookDownL`, `EyesLookDownR`)
/// `EyesClosedR` += min(`EyesLookDownL`, `EyesLookDownR`)
///
public bool EyeFollowingBlendshapesValid { get; private set; }
private OVRPlugin.FaceState _currentFaceState;
///
/// True if the visemes are valid, otherwise false.
///
///
/// This value gets updated in every frame. You should check this
/// value before querying for visemes.
/// If you query visemes when it's false,
/// InvalidOperationException will be thrown.
///
public bool AreVisemesValid { get; private set; }
private OVRPlugin.FaceVisemesState _currentFaceVisemesState;
private const OVRPermissionsRequester.Permission FaceTrackingPermission =
OVRPermissionsRequester.Permission.FaceTracking;
private const OVRPermissionsRequester.Permission RecordAudioPermission =
OVRPermissionsRequester.Permission.RecordAudio;
private Action _onPermissionGranted;
private static int _trackingInstanceCount;
private void Awake()
{
_onPermissionGranted = OnPermissionGranted;
}
private void OnEnable()
{
_trackingInstanceCount++;
if (!StartFaceTracking())
{
enabled = false;
}
}
private void OnPermissionGranted(string permissionId)
{
if (permissionId == OVRPermissionsRequester.GetPermissionId(FaceTrackingPermission) ||
permissionId == OVRPermissionsRequester.GetPermissionId(RecordAudioPermission))
{
OVRPermissionsRequester.PermissionGranted -= _onPermissionGranted;
enabled = true;
}
}
private OVRPlugin.FaceTrackingDataSource[] GetRequestedFaceTrackingDataSources()
{
var runtimeSettings = OVRRuntimeSettings.GetRuntimeSettings();
if (runtimeSettings.RequestsAudioFaceTracking && runtimeSettings.RequestsVisualFaceTracking)
{
return new OVRPlugin.FaceTrackingDataSource[] { OVRPlugin.FaceTrackingDataSource.Visual, OVRPlugin.FaceTrackingDataSource.Audio };
}
else if (runtimeSettings.RequestsVisualFaceTracking)
{
return new OVRPlugin.FaceTrackingDataSource[] { OVRPlugin.FaceTrackingDataSource.Visual };
}
else if (runtimeSettings.RequestsAudioFaceTracking)
{
return new OVRPlugin.FaceTrackingDataSource[] { OVRPlugin.FaceTrackingDataSource.Audio };
}
else
{
return new OVRPlugin.FaceTrackingDataSource[] { };
}
}
private bool StartFaceTracking()
{
if (!OVRPermissionsRequester.IsPermissionGranted(FaceTrackingPermission) &&
!OVRPermissionsRequester.IsPermissionGranted(RecordAudioPermission))
{
OVRPermissionsRequester.PermissionGranted -= _onPermissionGranted;
OVRPermissionsRequester.PermissionGranted += _onPermissionGranted;
return false;
}
if (!OVRPlugin.StartFaceTracking2(GetRequestedFaceTrackingDataSources()))
{
Debug.LogWarning($"[{nameof(OVRFaceExpressions)}] Failed to start face tracking.");
return false;
}
OVRPlugin.SetFaceTrackingVisemesEnabled(OVRRuntimeSettings.GetRuntimeSettings().EnableFaceTrackingVisemesOutput);
return true;
}
private void OnDisable()
{
if (--_trackingInstanceCount == 0)
{
OVRPlugin.StopFaceTracking2();
}
}
private void OnDestroy()
{
OVRPermissionsRequester.PermissionGranted -= _onPermissionGranted;
}
private void Update()
{
ValidExpressions =
OVRPlugin.GetFaceState2(OVRPlugin.Step.Render, -1, ref _currentFaceState)
&& _currentFaceState.Status.IsValid;
EyeFollowingBlendshapesValid = ValidExpressions && _currentFaceState.Status.IsEyeFollowingBlendshapesValid;
AreVisemesValid =
OVRPlugin.GetFaceVisemesState(OVRPlugin.Step.Render, ref _currentFaceVisemesState) == OVRPlugin.Result.Success
&& _currentFaceVisemesState.IsValid;
}
///
/// This will return the weight of the specified present in the expression weights array.
///
/// Returns weight of the specified ,
/// which will be within the range of 0.0f to 1.0f inclusive.
///
/// Thrown when is false.
///
///
/// Thrown when value is not in range.
///
public float this[FaceExpression expression]
{
get
{
CheckValidity();
if (expression < 0 || expression >= FaceExpression.Max)
{
throw new ArgumentOutOfRangeException(nameof(expression),
expression,
$"Value must be between 0 to {(int)FaceExpression.Max}");
}
return _currentFaceState.ExpressionWeights[(int)expression];
}
}
///
/// Returns the weight of the specified by accessing the expression weights array
/// present through .
///
/// The specified to get the weight for.
/// The weight of the specified .
public float GetWeight(FaceExpression expression) => this[expression];
///
/// This method will try to get the weight of the specified if it's
/// valid. This can be used if it isn't certain that the specified
/// is a valid expression, or if the facial expressions on this frame are valid.
///
/// The expression to get the weight of.
/// The output argument that will contain the expression weight or 0.0 if it's not valid.
/// Returns true if the expression weight is valid, false otherwise.
public bool TryGetFaceExpressionWeight(FaceExpression expression, out float weight)
{
if (!ValidExpressions || expression < 0 || expression >= FaceExpression.Max)
{
weight = 0;
return false;
}
weight = _currentFaceState.ExpressionWeights[(int)expression];
return true;
}
///
/// This will return the weight of the given viseme.
///
/// Returns weight of viseme ranged between 0.0 to 1.0.
///
/// Thrown when is false.
///
///
/// Thrown when value is not in range.
///
public float GetViseme(FaceViseme viseme)
{
CheckVisemesValidity();
if (viseme < 0 || viseme >= FaceViseme.Count)
{
throw new ArgumentOutOfRangeException(nameof(viseme),
viseme,
$"Value must be between 0 to {(int)FaceViseme.Count}");
}
return _currentFaceVisemesState.Visemes[(int)viseme];
}
///
/// This method tries to gets the weight of the given viseme if it's available.
///
/// The viseme to get the weight of.
/// The output argument that will contain the viseme weight or 0.0 if it's not available.
/// Returns true if the viseme weight is valid, false otherwise.
public bool TryGetFaceViseme(FaceViseme viseme, out float weight)
{
if (!AreVisemesValid || viseme < 0 || viseme >= FaceViseme.Count)
{
weight = 0;
return false;
}
weight = _currentFaceVisemesState.Visemes[(int)viseme];
return true;
}
///
/// Copies visemes to a pre-allocated array.
///
/// Pre-allocated destination array for visemes
/// Starting index in the destination array
///
/// Thrown when is null.
///
///
/// Thrown when there is not enough capacity to copy weights to at index.
///
///
/// Thrown when value is out of bounds.
///
///
/// Thrown when is false.
///
public void CopyVisemesTo(float[] array, int startIndex = 0)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (startIndex < 0 || startIndex >= array.Length)
{
throw new ArgumentOutOfRangeException(nameof(startIndex),
startIndex,
$"Value must be between 0 to {array.Length - 1}");
}
if (array.Length - startIndex < (int)FaceViseme.Count)
{
throw new ArgumentException(
$"Capacity is too small - required {(int)FaceViseme.Count}, available {array.Length - startIndex}.",
nameof(array));
}
CheckVisemesValidity();
for (int i = 0; i < (int)FaceViseme.Count; i++)
{
array[i + startIndex] = _currentFaceVisemesState.Visemes[i];
}
}
///
/// The face part type used for getting the face tracking confidence weight in .
///
public enum FaceRegionConfidence
{
///
/// Represents the lower part of the face. It includes the mouth, chin and a portion of the nose and cheek.
///
Lower = OVRPlugin.FaceRegionConfidence.Lower,
///
/// Represents the upper part of the face. It includes the eyes, eyebrows and a portion of the nose and cheek.
///
Upper = OVRPlugin.FaceRegionConfidence.Upper,
///
/// Used to determine the size of the enum.
///
Max = OVRPlugin.FaceRegionConfidence.Max
}
///
/// This method tries to get the confidence weight of the given face part if it's available. This can be used
/// if it isn't certain that the facial expressions on this frame are valid.
///
/// The part of the face to get the confidence weight of.
/// The output argument that will contain the weight confidence or 0.0 if it's not valid.
/// Returns true if the weight confidence is valid, false otherwise.
public bool TryGetWeightConfidence(FaceRegionConfidence region, out float weightConfidence)
{
if (!ValidExpressions || region < 0 || region >= FaceRegionConfidence.Max)
{
weightConfidence = 0;
return false;
}
weightConfidence = _currentFaceState.ExpressionWeightConfidences[(int)region];
return true;
}
///
/// The source type that the face tracking data is currently based off of. This is part of the data contained
/// in .
///
public enum FaceTrackingDataSource
{
///
/// Represents visual based face tracking. This is the case if the face tracking data came from
/// visual based face tracking.
///
Visual = OVRPlugin.FaceTrackingDataSource.Visual,
///
/// Represents audio based face tracking. if the face tracking data came from audio based face tracking.
///
Audio = OVRPlugin.FaceTrackingDataSource.Audio,
///
/// Used to determine the size of the enum.
///
[InspectorName(null)]
Count = OVRPlugin.FaceTrackingDataSource.Count,
}
///
/// This method tries to get the data source that was used for the current frame for face tracking data. This
/// can be used if it isn't certain that the facial expressions on this frame are valid.
///
/// The output argument that will contain the tracking
/// data source.
/// Returns true if the face tracking data source is valid, false otherwise.
public bool TryGetFaceTrackingDataSource(out FaceTrackingDataSource dataSource)
{
dataSource = (FaceTrackingDataSource)_currentFaceState.DataSource;
return ValidExpressions;
}
internal void CheckValidity()
{
if (!ValidExpressions)
{
throw new InvalidOperationException(
$"Face expressions are not valid at this time. Use {nameof(ValidExpressions)} to check for validity.");
}
}
internal void CheckVisemesValidity()
{
if (!AreVisemesValid)
{
throw new InvalidOperationException(
$"Face visemes are not valid at this time. Use {nameof(AreVisemesValid)} to check for validity.");
}
}
///
/// Copies expression weights to a pre-allocated array.
///
/// Pre-allocated destination array for expression weights
/// Starting index in the destination array
///
/// Thrown when is null.
///
///
/// Thrown when there is not enough capacity to copy weights to at index.
///
///
/// Thrown when value is out of bounds.
///
///
/// Thrown when is false.
///
public void CopyTo(float[] array, int startIndex = 0)
{
if (array == null)
{
throw new ArgumentNullException(nameof(array));
}
if (startIndex < 0 || startIndex >= array.Length)
{
throw new ArgumentOutOfRangeException(nameof(startIndex),
startIndex,
$"Value must be between 0 to {array.Length - 1}");
}
if (array.Length - startIndex < (int)FaceExpression.Max)
{
throw new ArgumentException(
$"Capacity is too small - required {(int)FaceExpression.Max}, available {array.Length - startIndex}.",
nameof(array));
}
CheckValidity();
for (int i = 0; i < (int)FaceExpression.Max; i++)
{
array[i + startIndex] = _currentFaceState.ExpressionWeights[i];
}
}
///
/// Allocates a float array and copies expression weights to it.
///
public float[] ToArray()
{
var array = new float[(int)OVRFaceExpressions.FaceExpression.Max];
this.CopyTo(array);
return array;
}
///
/// List of face expressions, based off of the Facial Action Coding System (FACS).
///
public enum FaceExpression
{
[InspectorName("None")]
Invalid = OVRPlugin.FaceExpression2.Invalid,
BrowLowererL = OVRPlugin.FaceExpression2.Brow_Lowerer_L,
BrowLowererR = OVRPlugin.FaceExpression2.Brow_Lowerer_R,
CheekPuffL = OVRPlugin.FaceExpression2.Cheek_Puff_L,
CheekPuffR = OVRPlugin.FaceExpression2.Cheek_Puff_R,
CheekRaiserL = OVRPlugin.FaceExpression2.Cheek_Raiser_L,
CheekRaiserR = OVRPlugin.FaceExpression2.Cheek_Raiser_R,
CheekSuckL = OVRPlugin.FaceExpression2.Cheek_Suck_L,
CheekSuckR = OVRPlugin.FaceExpression2.Cheek_Suck_R,
ChinRaiserB = OVRPlugin.FaceExpression2.Chin_Raiser_B,
ChinRaiserT = OVRPlugin.FaceExpression2.Chin_Raiser_T,
DimplerL = OVRPlugin.FaceExpression2.Dimpler_L,
DimplerR = OVRPlugin.FaceExpression2.Dimpler_R,
EyesClosedL = OVRPlugin.FaceExpression2.Eyes_Closed_L,
EyesClosedR = OVRPlugin.FaceExpression2.Eyes_Closed_R,
EyesLookDownL = OVRPlugin.FaceExpression2.Eyes_Look_Down_L,
EyesLookDownR = OVRPlugin.FaceExpression2.Eyes_Look_Down_R,
EyesLookLeftL = OVRPlugin.FaceExpression2.Eyes_Look_Left_L,
EyesLookLeftR = OVRPlugin.FaceExpression2.Eyes_Look_Left_R,
EyesLookRightL = OVRPlugin.FaceExpression2.Eyes_Look_Right_L,
EyesLookRightR = OVRPlugin.FaceExpression2.Eyes_Look_Right_R,
EyesLookUpL = OVRPlugin.FaceExpression2.Eyes_Look_Up_L,
EyesLookUpR = OVRPlugin.FaceExpression2.Eyes_Look_Up_R,
InnerBrowRaiserL = OVRPlugin.FaceExpression2.Inner_Brow_Raiser_L,
InnerBrowRaiserR = OVRPlugin.FaceExpression2.Inner_Brow_Raiser_R,
JawDrop = OVRPlugin.FaceExpression2.Jaw_Drop,
JawSidewaysLeft = OVRPlugin.FaceExpression2.Jaw_Sideways_Left,
JawSidewaysRight = OVRPlugin.FaceExpression2.Jaw_Sideways_Right,
JawThrust = OVRPlugin.FaceExpression2.Jaw_Thrust,
LidTightenerL = OVRPlugin.FaceExpression2.Lid_Tightener_L,
LidTightenerR = OVRPlugin.FaceExpression2.Lid_Tightener_R,
LipCornerDepressorL = OVRPlugin.FaceExpression2.Lip_Corner_Depressor_L,
LipCornerDepressorR = OVRPlugin.FaceExpression2.Lip_Corner_Depressor_R,
LipCornerPullerL = OVRPlugin.FaceExpression2.Lip_Corner_Puller_L,
LipCornerPullerR = OVRPlugin.FaceExpression2.Lip_Corner_Puller_R,
LipFunnelerLB = OVRPlugin.FaceExpression2.Lip_Funneler_LB,
LipFunnelerLT = OVRPlugin.FaceExpression2.Lip_Funneler_LT,
LipFunnelerRB = OVRPlugin.FaceExpression2.Lip_Funneler_RB,
LipFunnelerRT = OVRPlugin.FaceExpression2.Lip_Funneler_RT,
LipPressorL = OVRPlugin.FaceExpression2.Lip_Pressor_L,
LipPressorR = OVRPlugin.FaceExpression2.Lip_Pressor_R,
LipPuckerL = OVRPlugin.FaceExpression2.Lip_Pucker_L,
LipPuckerR = OVRPlugin.FaceExpression2.Lip_Pucker_R,
LipStretcherL = OVRPlugin.FaceExpression2.Lip_Stretcher_L,
LipStretcherR = OVRPlugin.FaceExpression2.Lip_Stretcher_R,
LipSuckLB = OVRPlugin.FaceExpression2.Lip_Suck_LB,
LipSuckLT = OVRPlugin.FaceExpression2.Lip_Suck_LT,
LipSuckRB = OVRPlugin.FaceExpression2.Lip_Suck_RB,
LipSuckRT = OVRPlugin.FaceExpression2.Lip_Suck_RT,
LipTightenerL = OVRPlugin.FaceExpression2.Lip_Tightener_L,
LipTightenerR = OVRPlugin.FaceExpression2.Lip_Tightener_R,
LipsToward = OVRPlugin.FaceExpression2.Lips_Toward,
LowerLipDepressorL = OVRPlugin.FaceExpression2.Lower_Lip_Depressor_L,
LowerLipDepressorR = OVRPlugin.FaceExpression2.Lower_Lip_Depressor_R,
MouthLeft = OVRPlugin.FaceExpression2.Mouth_Left,
MouthRight = OVRPlugin.FaceExpression2.Mouth_Right,
NoseWrinklerL = OVRPlugin.FaceExpression2.Nose_Wrinkler_L,
NoseWrinklerR = OVRPlugin.FaceExpression2.Nose_Wrinkler_R,
OuterBrowRaiserL = OVRPlugin.FaceExpression2.Outer_Brow_Raiser_L,
OuterBrowRaiserR = OVRPlugin.FaceExpression2.Outer_Brow_Raiser_R,
UpperLidRaiserL = OVRPlugin.FaceExpression2.Upper_Lid_Raiser_L,
UpperLidRaiserR = OVRPlugin.FaceExpression2.Upper_Lid_Raiser_R,
UpperLipRaiserL = OVRPlugin.FaceExpression2.Upper_Lip_Raiser_L,
UpperLipRaiserR = OVRPlugin.FaceExpression2.Upper_Lip_Raiser_R,
TongueTipInterdental = OVRPlugin.FaceExpression2.Tongue_Tip_Interdental,
TongueTipAlveolar = OVRPlugin.FaceExpression2.Tongue_Tip_Alveolar,
TongueFrontDorsalPalate = OVRPlugin.FaceExpression2.Tongue_Front_Dorsal_Palate,
TongueMidDorsalPalate = OVRPlugin.FaceExpression2.Tongue_Mid_Dorsal_Palate,
TongueBackDorsalVelar = OVRPlugin.FaceExpression2.Tongue_Back_Dorsal_Velar,
TongueOut = OVRPlugin.FaceExpression2.Tongue_Out,
TongueRetreat = OVRPlugin.FaceExpression2.Tongue_Retreat,
[InspectorName(null)]
Max = OVRPlugin.FaceExpression2.Max,
}
#region Face expressions enumerator
///
/// Gets the face expressions enumerator, used for enumerating over
/// as a collection to read data in this collection of facial expressions by accessing .
///
///
public FaceExpressionsEnumerator GetEnumerator() =>
new FaceExpressionsEnumerator(_currentFaceState.ExpressionWeights);
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
///
/// The number of items in collection (facial expression weights).
///
public int Count => _currentFaceState.ExpressionWeights?.Length ?? 0;
///
/// The implementation of IEnumerator for face expressions weights, used for enumerating directly over
/// . This is used when reading this data as a collection of facial expressions
/// by accessing .
///
public struct FaceExpressionsEnumerator : IEnumerator
{
private float[] _faceExpressions;
private int _index;
private int _count;
internal FaceExpressionsEnumerator(float[] array)
{
_faceExpressions = array;
_index = -1;
_count = _faceExpressions?.Length ?? 0;
}
///
/// Advances the enumerator to the next element of the collection.
///
/// Returns true if the enumerator was successfully advanced to the next element in this collection,
/// false otherwise.
public bool MoveNext() => ++_index < _count;
///
/// Gets the element of this collection at the current position of the enumerator.
///
public float Current => _faceExpressions[_index];
object IEnumerator.Current => Current;
///
/// Sets the enumerator to its initial position, which is before the first element in the collection.
///
public void Reset() => _index = -1;
public void Dispose()
{
}
}
#endregion
///
/// List of face visemes.
///
public enum FaceViseme
{
[InspectorName("None")]
Invalid = OVRPlugin.FaceViseme.Invalid,
/// The viseme representing silence.
SIL = OVRPlugin.FaceViseme.SIL,
/// The viseme representing p, b, and m.
PP = OVRPlugin.FaceViseme.PP,
/// The viseme representing f and v.
FF = OVRPlugin.FaceViseme.FF,
/// The viseme representing th.
TH = OVRPlugin.FaceViseme.TH,
/// The viseme representing t and d.
DD = OVRPlugin.FaceViseme.DD,
/// The viseme representing k and g.
KK = OVRPlugin.FaceViseme.KK,
/// The viseme representing tS, dZ, and S.
CH = OVRPlugin.FaceViseme.CH,
/// The viseme representing s and z.
SS = OVRPlugin.FaceViseme.SS,
/// The viseme representing n and l.
NN = OVRPlugin.FaceViseme.NN,
/// The viseme representing r.
RR = OVRPlugin.FaceViseme.RR,
/// The viseme representing a:.
AA = OVRPlugin.FaceViseme.AA,
/// The viseme representing e.
E = OVRPlugin.FaceViseme.E,
/// The viseme representing ih.
IH = OVRPlugin.FaceViseme.IH,
/// The viseme representing oh.
OH = OVRPlugin.FaceViseme.OH,
/// The viseme representing ou.
OU = OVRPlugin.FaceViseme.OU,
[InspectorName(null)]
Count = OVRPlugin.FaceViseme.Count,
}
}