Fixed lips tracking

This commit is contained in:
SDraw 2024-01-28 18:59:20 +03:00
parent e84bbacb8d
commit c2a219dfa3
No known key found for this signature in database
GPG key ID: BB95B4DAB2BB8BB5
6 changed files with 103 additions and 81 deletions

29
ml_dht/DataParser.cs Normal file
View file

@ -0,0 +1,29 @@
namespace ml_dht
{
class DataParser
{
MemoryMapReader m_mapReader = null;
byte[] m_buffer = null;
TrackingData m_trackingData;
public DataParser()
{
m_buffer = new byte[1024];
m_mapReader = new MemoryMapReader();
m_mapReader.Open("head/data");
}
~DataParser()
{
m_mapReader.Close();
m_mapReader = null;
}
public void Update()
{
if(m_mapReader.Read(ref m_buffer))
m_trackingData = TrackingData.ToObject(m_buffer);
}
public ref TrackingData GetLatestTrackingData() => ref m_trackingData;
}
}

View file

@ -1,7 +1,9 @@
using ABI.CCK.Components;
using ABI_RC.Core.Player;
using ABI_RC.Core.Player.EyeMovement;
using ABI_RC.Systems.FaceTracking;
using RootMotion.FinalIK;
using System;
using System.Reflection;
using UnityEngine;
using ViveSR.anipal.Lip;
@ -26,10 +28,22 @@ namespace ml_dht
Quaternion m_headRotation;
Vector2 m_gazeDirection;
float m_blinkProgress = 0f;
LipData_v2 m_lipData;
bool m_lipDataSent = false;
Quaternion m_bindRotation;
Quaternion m_lastHeadRotation;
internal HeadTracked()
{
m_lipData = new LipData_v2();
m_lipData.frame = 0;
m_lipData.time = 0;
m_lipData.image = IntPtr.Zero;
m_lipData.prediction_data = new PredictionData_v2();
m_lipData.prediction_data.blend_shape_weight = new float[(int)LipShape_v2.Max];
}
// Unity events
void Start()
{
@ -49,6 +63,12 @@ namespace ml_dht
Settings.SmoothingChange -= this.SetSmoothing;
}
void Update()
{
if(m_enabled && Settings.FaceTracking)
m_lipDataSent = false;
}
// Tracking updates
public void UpdateTrackingData(ref TrackingData p_data)
{
@ -56,6 +76,12 @@ namespace ml_dht
m_headRotation.Set(p_data.m_headRotationX, p_data.m_headRotationY * (Settings.Mirrored ? -1f : 1f), p_data.m_headRotationZ * (Settings.Mirrored ? -1f : 1f), p_data.m_headRotationW);
m_gazeDirection.Set(Settings.Mirrored ? (1f - p_data.m_gazeX) : p_data.m_gazeX, p_data.m_gazeY);
m_blinkProgress = p_data.m_blink;
float l_weight = Mathf.Clamp01(Mathf.InverseLerp(0.25f, 1f, Mathf.Abs(p_data.m_mouthShape)));
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Jaw_Open] = p_data.m_mouthOpen;
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Pout] = ((p_data.m_mouthShape > 0f) ? l_weight : 0f);
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Smile_Left] = ((p_data.m_mouthShape < 0f) ? l_weight : 0f);
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Smile_Right] = ((p_data.m_mouthShape < 0f) ? l_weight : 0f);
}
void OnLookIKPostUpdate()
@ -113,6 +139,24 @@ namespace ml_dht
}
}
internal bool UpdateFaceTracking(CVRFaceTracking p_component)
{
bool l_result = false;
if(m_enabled && Settings.FaceTracking)
{
p_component.LipSyncWasUpdated = true;
if(!m_lipDataSent)
{
FaceTrackingManager.Instance.SubmitNewFacialData(m_lipData);
m_lipDataSent = true;
}
p_component.UpdateShapesLocal_Private();
l_result = true;
}
return l_result;
}
// Settings
void SetEnabled(bool p_state)
{

View file

@ -1,7 +1,7 @@
using ABI.CCK.Components;
using ABI_RC.Core.Player;
using ABI_RC.Core.Player.EyeMovement;
using ABI_RC.Core.Savior;
using ABI_RC.Systems.FaceTracking;
using System.Reflection;
namespace ml_dht
@ -10,7 +10,7 @@ namespace ml_dht
{
static DesktopHeadTracking ms_instance = null;
TrackingModule m_trackingModule = null;
DataParser m_dataParser = null;
HeadTracked m_localTracked = null;
public override void OnInitializeMelon()
@ -20,8 +20,6 @@ namespace ml_dht
Settings.Init();
m_trackingModule = new TrackingModule();
// Patches
HarmonyInstance.Patch(
typeof(PlayerSetup).GetMethod(nameof(PlayerSetup.ClearAvatar)),
@ -45,8 +43,8 @@ namespace ml_dht
while(PlayerSetup.Instance == null)
yield return null;
m_dataParser = new DataParser();
m_localTracked = PlayerSetup.Instance.gameObject.AddComponent<HeadTracked>();
FaceTrackingManager.Instance.RegisterModule(m_trackingModule);
// If you think it's a joke to put patch here, go on, try to put it in OnInitializeMelon, you melon :>
HarmonyInstance.Patch(
@ -54,6 +52,10 @@ namespace ml_dht
null,
new HarmonyLib.HarmonyMethod(typeof(DesktopHeadTracking).GetMethod(nameof(OnEyeControllerUpdate_Postfix), BindingFlags.Static | BindingFlags.NonPublic))
);
HarmonyInstance.Patch(
typeof(CVRFaceTracking).GetMethod("UpdateLocalData", BindingFlags.Instance | BindingFlags.NonPublic),
new HarmonyLib.HarmonyMethod(typeof(DesktopHeadTracking).GetMethod(nameof(OnFaceTrackingLocalUpdate_Prefix), BindingFlags.Static | BindingFlags.NonPublic))
);
}
public override void OnDeinitializeMelon()
@ -61,17 +63,17 @@ namespace ml_dht
if(ms_instance == this)
ms_instance = null;
m_trackingModule = null;
m_dataParser = null;
m_localTracked = null;
}
public override void OnUpdate()
{
if(Settings.Enabled && (m_trackingModule != null))
if(Settings.Enabled && (m_dataParser != null))
{
m_trackingModule.Update();
m_dataParser.Update();
if(m_localTracked != null)
m_localTracked.UpdateTrackingData(ref m_trackingModule.GetLatestTrackingData());
m_localTracked.UpdateTrackingData(ref m_dataParser.GetLatestTrackingData());
}
}
@ -116,5 +118,18 @@ namespace ml_dht
MelonLoader.MelonLogger.Error(e);
}
}
static bool OnFaceTrackingLocalUpdate_Prefix(ref CVRFaceTracking __instance)
{
bool? l_result = ms_instance?.OnFaceTrackingLocalUpdate(__instance);
return l_result.GetValueOrDefault(true);
}
bool OnFaceTrackingLocalUpdate(CVRFaceTracking p_component)
{
bool l_result = true;
if(p_component.UseFacialTracking && (m_localTracked != null))
l_result = !m_localTracked.UpdateFaceTracking(p_component);
return l_result;
}
}
}

View file

@ -21,7 +21,6 @@ Available mod's settings in `Settings - Implementation - Desktop Head Tracking`:
* **Use head tracking:** enables head tracking; default value - `true`.
* **Use eyes tracking:** enables eyes tracking; default value - `true`.
* **Use face tracking:** enables mouth shapes tracking; default value - `true`.
* **Note:** You need to enable desktop tracking of `Vive Face tracking` in `Settings - Implementation` menu page.
* **Note:** Your avatar should have configured `CVR Face Tracking` component.
* **Use blinking:** uses blinking from data; default value - `true`.
* **Mirrored movement:** mirrors movement and gaze along 0YZ plane; default value - `false`.

View file

@ -1,70 +0,0 @@
using ABI_RC.Systems.FaceTracking;
using System;
using UnityEngine;
using ViveSR.anipal.Lip;
namespace ml_dht
{
class TrackingModule : ITrackingModule
{
bool m_registered = false;
bool m_activeAsModule = false;
MemoryMapReader m_mapReader = null;
byte[] m_buffer = null;
TrackingData m_trackingData;
LipData_v2 m_lipData;
public TrackingModule()
{
m_lipData = new LipData_v2();
m_lipData.frame = 0;
m_lipData.time = 0;
m_lipData.image = IntPtr.Zero;
m_lipData.prediction_data = new PredictionData_v2();
m_lipData.prediction_data.blend_shape_weight = new float[(int)LipShape_v2.Max];
m_buffer = new byte[1024];
m_mapReader = new MemoryMapReader();
m_mapReader.Open("head/data");
}
~TrackingModule()
{
m_mapReader.Close();
m_mapReader = null;
}
public (bool, bool) Initialize(bool useEye, bool useLip)
{
m_registered = true;
m_activeAsModule = true;
return (false, true);
}
public void Shutdown()
{
m_activeAsModule = false;
}
public bool IsEyeDataAvailable() => false;
public bool IsLipDataAvailable() => true;
internal void Update()
{
if(m_mapReader.Read(ref m_buffer))
{
m_trackingData = TrackingData.ToObject(m_buffer);
float l_weight = Mathf.Clamp01(Mathf.InverseLerp(0.25f, 1f, Mathf.Abs(m_trackingData.m_mouthShape)));
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Jaw_Open] = m_trackingData.m_mouthOpen;
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Pout] = ((m_trackingData.m_mouthShape > 0f) ? l_weight : 0f);
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Smile_Left] = ((m_trackingData.m_mouthShape < 0f) ? l_weight : 0f);
m_lipData.prediction_data.blend_shape_weight[(int)LipShape_v2.Mouth_Smile_Right] = ((m_trackingData.m_mouthShape < 0f) ? l_weight : 0f);
if(m_registered && m_activeAsModule && Settings.FaceTracking)
FaceTrackingManager.Instance.SubmitNewFacialData(m_lipData);
}
}
internal ref TrackingData GetLatestTrackingData() => ref m_trackingData;
}
}

View file

@ -1,4 +1,5 @@
using ABI_RC.Core.UI;
using ABI.CCK.Components;
using ABI_RC.Core.UI;
using System.Reflection;
using UnityEngine;
@ -6,10 +7,14 @@ namespace ml_dht
{
static class Utils
{
static readonly object[] ms_emptyArray = new object[0];
static readonly FieldInfo ms_view = typeof(CohtmlControlledViewWrapper).GetField("_view", BindingFlags.NonPublic | BindingFlags.Instance);
static readonly MethodInfo ms_updateShapesLocal = typeof(CVRFaceTracking).GetMethod("UpdateShapesLocal", BindingFlags.NonPublic | BindingFlags.Instance);
static public void ExecuteScript(this CohtmlControlledViewWrapper p_instance, string p_script) => ((cohtml.Net.View)ms_view.GetValue(p_instance)).ExecuteScript(p_script);
static public void UpdateShapesLocal_Private(this CVRFaceTracking p_instance) => ms_updateShapesLocal?.Invoke(p_instance, ms_emptyArray);
public static Matrix4x4 GetMatrix(this Transform p_transform, bool p_pos = true, bool p_rot = true, bool p_scl = false)
{
return Matrix4x4.TRS(p_pos ? p_transform.position : Vector3.zero, p_rot ? p_transform.rotation : Quaternion.identity, p_scl ? p_transform.localScale : Vector3.one);