diff --git a/js/config/config-manager.js b/js/config/config-manager.js index 405916d..7ff90df 100644 --- a/js/config/config-manager.js +++ b/js/config/config-manager.js @@ -58,6 +58,8 @@ let defaultConfig = { 'POSITION_Z_RATIO': 0.12, 'UI_TRACKING_MODE_COLLAPSE': false, 'UI_MOOD_COLLAPSE': false, + 'USE_IFACIALMOCAP': false, + 'IFACIALMOCAP_IP': "192.168.1.x", // system parameters 'VERSION': VERSION, 'DEV_DATE': DEV_DATE, @@ -327,7 +329,7 @@ function getBinaryCM() { 'MOOD_FUN', 'MOOD_JOY', 'MOOD_NEUTRAL', 'MOOD_AUTO', 'MULTI_THREAD', 'UI_TRACKING_MODE_COLLAPSE', 'UI_MOOD_COLLAPSE', - 'POSITION_TRACKING']; + 'POSITION_TRACKING', 'USE_IFACIALMOCAP']; } function getSelectCM() { diff --git a/js/control/hotkey.js b/js/control/hotkey.js index e8888e8..bde06db 100644 --- a/js/control/hotkey.js +++ b/js/control/hotkey.js @@ -10,6 +10,11 @@ function setMoodWithHotkey(moodId) { } function userInputKey(e) { + // Prevent hotkeys if user is typing in an input field + if (document.activeElement && (document.activeElement.tagName === 'INPUT' || document.activeElement.tagName === 'TEXTAREA')) { + return; + } + let key = String.fromCharCode(e.which); switch (key) { case 'H': diff --git a/js/interpolator/interpolator.js b/js/interpolator/interpolator.js index 8f55989..d8e675b 100644 --- a/js/interpolator/interpolator.js +++ b/js/interpolator/interpolator.js @@ -11,30 +11,42 @@ let lasTimeDiff = 30; function weightedAvg(o1, o2, w1, w2) { let obj = {}; - Object.keys(o1).forEach(function(key) { - obj[key] = o1[key] + (o2[key] - o1[key]) * w1 / (w1 + w2); + Object.keys(o1).forEach(function (key) { + let v1 = o1[key] || 0; + let v2 = o2[key] || 0; + if (isNaN(v1)) v1 = 0; + if (isNaN(v2)) v2 = 0; + obj[key] = v1 + (v2 - v1) * w1 / (w1 + w2); }); return obj; } function calInfoDiff(o1, o2) { let obj = {}; - Object.keys(o1).forEach(function(key) { - obj[key] = o2[key] - o1[key]; + Object.keys(o1).forEach(function (key) { + let v1 = o1[key] || 0; + let v2 = o2[key] || 0; + if (isNaN(v1)) v1 = 0; + if (isNaN(v2)) v2 = 0; + obj[key] = v2 - v1; }); return obj; } function calScaleDiff(o1, w1) { - Object.keys(o1).forEach(function(key) { + Object.keys(o1).forEach(function (key) { o1[key] *= w1; }); } function sumInfoDiff(o1, o2) { let obj = {}; - Object.keys(o1).forEach(function(key) { - obj[key] = o2[key] + o1[key]; + Object.keys(o1).forEach(function (key) { + let v1 = o1[key] || 0; + let v2 = o2[key] || 0; + if (isNaN(v1)) v1 = 0; + if (isNaN(v2)) v2 = 0; + obj[key] = v2 + v1; }); return obj; } diff --git a/js/ml/ifacialmocap-client.js b/js/ml/ifacialmocap-client.js new file mode 100644 index 0000000..722eca6 --- /dev/null +++ b/js/ml/ifacialmocap-client.js @@ -0,0 +1,185 @@ +const dgram = require('dgram'); + +class IFacialMocapClient { + constructor() { + this.socket = null; + this.port = 49983; // Default iFacialMocap port + this.targetIP = null; + this.faceData = {}; + this.onDataCallback = null; + this.listening = false; + } + + /** + * Initialize the UDP socket and start listening for data. + * @param {string} targetIP - The IP address of the iPhone running iFacialMocap. + * @param {number} port - The port to listen on (default 49983). + */ + connect(targetIP, port = 49983) { + if (this.socket) { + this.disconnect(); + } + + this.targetIP = targetIP; + this.port = port; + this.socket = dgram.createSocket('udp4'); + + this.socket.on('error', (err) => { + console.error(`[iFacialMocap] Socket error:\n${err.stack}`); + this.socket.close(); + this.listening = false; // Ensure listening status is updated on error + if (this.onConnectCallback) { + this.onConnectCallback(false); // Indicate connection failure + this.onConnectCallback = null; + } + }); + + this.socket.on('message', (msg, rinfo) => { + const dataStr = msg.toString(); + // console.log(`[iFacialMocap] Raw: ${dataStr.substring(0, 100)}...`); // Log start of message + this.parseData(dataStr); + if (this.onConnectCallback) { + this.onConnectCallback(true); + this.onConnectCallback = null; // Only trigger once per connection attempt/success + } + }); + + this.socket.on('listening', () => { + const address = this.socket.address(); + console.log(`[iFacialMocap] Client listening on ${address.address}:${address.port}`); + this.listening = true; + // Send initiation packet + this.sendInitiation(); + }); + + this.socket.bind(this.port); + } + + /** + * Send the initiation string to the iPhone to start the stream. + */ + sendInitiation() { + if (!this.socket || !this.targetIP) return; + + const message = Buffer.from('iFacialMocap_sahuasouryya9218sauhuiayeta91555dy3719'); + this.socket.send(message, this.port, this.targetIP, (err) => { + if (err) { + console.error('Failed to send iFacialMocap initiation packet:', err); + } else { + console.log('Sent iFacialMocap initiation packet to ' + this.targetIP); + } + }); + } + + /** + * Stop listening and close the socket. + */ + disconnect() { + if (this.socket) { + this.socket.close(); + this.socket = null; + this.listening = false; + console.log('IFacialMocap client disconnected'); + } + } + + /** + * Parse the pipe-delimited data string from iFacialMocap. + * Format: blendShape1|value1|blendShape2|value2... + * @param {string} dataStr + */ + parseData(dataStr) { + // Debug raw data (throttled) + if (!this.logCounter) this.logCounter = 0; + if (this.logCounter < 5) { + console.log('[iFacialMocap] Raw DataStr:', dataStr); + } + + const parts = dataStr.split('|'); + const newData = {}; + + parts.forEach(part => { + if (!part) return; + + if (part.startsWith('=head#')) { + // Parse head data. Based on logs: + // vals[0..2] seem to be Rotation (Degrees) + // vals[3..5] seem to be Position (Meters?) + const vals = part.substring(6).split(','); + if (vals.length >= 6) { + newData['headPitch'] = parseFloat(vals[0]); + newData['headYaw'] = parseFloat(vals[1]); + newData['headRoll'] = parseFloat(vals[2]); + newData['headX'] = parseFloat(vals[3]); + newData['headY'] = parseFloat(vals[4]); + newData['headZ'] = parseFloat(vals[5]); + } + } else if (part.startsWith('rightEye#')) { + // Parse right eye: rightEye#val1,val2,val3 + const vals = part.substring(9).split(','); + if (vals.length >= 3) { + newData['rightEyeRotX'] = parseFloat(vals[0]); + newData['rightEyeRotY'] = parseFloat(vals[1]); + newData['rightEyeRotZ'] = parseFloat(vals[2]); + } + } else if (part.startsWith('leftEye#')) { + // Parse left eye: leftEye#val1,val2,val3 + const vals = part.substring(8).split(','); + if (vals.length >= 3) { + newData['leftEyeRotX'] = parseFloat(vals[0]); + newData['leftEyeRotY'] = parseFloat(vals[1]); + newData['leftEyeRotZ'] = parseFloat(vals[2]); + } + } else { + // Assume blendshape: key-value + const lastDashIndex = part.lastIndexOf('-'); + if (lastDashIndex !== -1) { + const key = part.substring(0, lastDashIndex); + const valStr = part.substring(lastDashIndex + 1); + const value = parseFloat(valStr); + if (!isNaN(value)) { + newData[key] = value; + } + } + } + }); + + // Update internal state + Object.assign(this.faceData, newData); + + if (this.onDataCallback) { + this.onDataCallback(this.faceData); + } + } + + /** + * Set a callback to be called whenever new face data is parsed. + * @param {function} callback + */ + onFaceData(callback) { + this.onDataCallback = callback; + } + + /** + * Get the current face data. + * @returns {object} + */ + getFaceData() { + return this.faceData; + } + + /** + * Set a callback to be called when connection status changes. + * @param {function} callback + */ + onConnect(callback) { + this.onConnectCallback = callback; + } +} + +// Export as a global or module depending on the environment +if (typeof module !== 'undefined' && module.exports) { + module.exports = IFacialMocapClient; +} else { + window.IFacialMocapClient = IFacialMocapClient; +} diff --git a/js/ml/ml-manager.js b/js/ml/ml-manager.js index c70203b..0020a3c 100644 --- a/js/ml/ml-manager.js +++ b/js/ml/ml-manager.js @@ -11,7 +11,7 @@ let tmpResult = null; // face landmark resolver function onFaceLandmarkResult(keyPoints, faceInfo) { if (faceInfo) { - Object.keys(faceInfo).forEach(function(key) { + Object.keys(faceInfo).forEach(function (key) { let sr = getSR(getKeyType(key)) / getCMV("SENSITIVITY_SCALE"); let v = (1 - sr) * faceInfo[key] + sr * tmpInfo[key]; tmpInfo[key] = isNaN(v) ? 0 : v; @@ -23,7 +23,7 @@ function onFaceLandmarkResult(keyPoints, faceInfo) { // pose landmark resolver function onPoseLandmarkResult(keyPoints, poseInfo) { if (poseInfo) { - Object.keys(poseInfo).forEach(function(key) { + Object.keys(poseInfo).forEach(function (key) { let sr = getSR(getKeyType(key)) / getCMV("SENSITIVITY_SCALE"); let v = (1 - sr) * poseInfo[key] + sr * tmpInfo[key]; tmpInfo[key] = isNaN(v) ? 0 : v; @@ -39,7 +39,7 @@ function onHandLandmarkResult(keyPoints, handInfo, leftright) { let preRate = 1 - leftright * 2; if (handInfo) { handTrackers[leftright] = new Date().getTime(); - Object.keys(handInfo).forEach(function(key) { + Object.keys(handInfo).forEach(function (key) { let sr = getSR('hand') / getCMV("SENSITIVITY_SCALE"); if (key in tmpInfo) { let v = (1 - sr) * handInfo[key] + sr * tmpInfo[key]; @@ -52,7 +52,7 @@ function onHandLandmarkResult(keyPoints, handInfo, leftright) { function noHandLandmarkResult(leftright) { let prefix = ["left", "right"][leftright]; let tmpHandInfo = getDefaultHandInfo(leftright); - Object.keys(tmpHandInfo).forEach(function(key) { + Object.keys(tmpHandInfo).forEach(function (key) { let sr = getSR(getKeyType(key)); if (key in tmpInfo) { let v = (1 - sr) * tmpHandInfo[key] + sr * tmpInfo[key]; @@ -64,7 +64,7 @@ function noHandLandmarkResult(leftright) { // parse inference result function mergePoints(PoI, tPoI) { - Object.keys(tPoI).forEach(function(key) { + Object.keys(tPoI).forEach(function (key) { PoI[key] = tPoI[key]; }); } @@ -78,7 +78,7 @@ async function onHolisticResults(results) { let allPoI = {}; let allLog = {}; - if (results.faceLandmarks) { + if (results.faceLandmarks && !getCMV('USE_IFACIALMOCAP')) { let keyPoints = packFaceHolistic(results.faceLandmarks); mergePoints(allPoI, keyPoints); let faceInfo = face2Info(keyPoints); @@ -113,11 +113,107 @@ async function onHolisticResults(results) { postCoreLog(allLog); postPoI(allPoI); - if (results.faceLandmarks) { + if (results.faceLandmarks && !getCMV('USE_IFACIALMOCAP')) { pushInfo(tmpInfo); } } +window.applyIFacialMocapData = function (data) { + let useMocap = getCMV('USE_IFACIALMOCAP'); + if (useMocap === undefined) useMocap = true; + + if (!useMocap) return; + + // --- Extended Shapes & Mood Logic --- + const allowTracking = getCMV("MOOD") !== "fun"; + + // Helper to get value (0-100 -> 0-1) with safety + const getVal = (key) => { + if (data[key] !== undefined) { + let v = data[key] / 100.0; + return isNaN(v) ? 0 : v; + } + return 0; + }; + + // --- Head Rotation --- + const toRad = Math.PI / 180; + + if (data.headPitch !== undefined && !isNaN(data.headPitch)) tmpInfo['pitch'] = -data.headPitch * toRad; + if (data.headYaw !== undefined && !isNaN(data.headYaw)) tmpInfo['yaw'] = data.headYaw * toRad; + if (data.headRoll !== undefined && !isNaN(data.headRoll)) tmpInfo['roll'] = data.headRoll * toRad; + + // Optional: Head Position (scaled) + // if (data.headX !== undefined && !isNaN(data.headX)) tmpInfo['x'] = data.headX * 0.5; // Scale TBD + + // --- Smoothing Helper --- + // Smooth factor: 0.0 = no move, 1.0 = instant move. + // Lower = smoother but more latency. 0.5 is a good balance. + const SMOOTH_FACTOR = 0.5; + const MOUTH_SMOOTH = 0.8; // Faster response for speech + + const applySmoothVal = (key, targetVal, factor = SMOOTH_FACTOR) => { + let oldVal = tmpInfo[key] || 0; + let newVal = oldVal * (1 - factor) + targetVal * factor; + tmpInfo[key] = newVal; + }; + + applySmoothVal('leftEyeOpen', 1 - getVal('eyeBlink_L')); + applySmoothVal('rightEyeOpen', 1 - getVal('eyeBlink_R')); + + // Iris X (Yaw) + let irisX = (getVal('eyeLookIn_L') + getVal('eyeLookOut_R')) - (getVal('eyeLookOut_L') + getVal('eyeLookIn_R')); + let targetIrisX = isNaN(irisX) ? 0 : irisX * 0.2; + applySmoothVal('irisPos', targetIrisX); + + // Iris Y (Pitch) + let irisY = (getVal('eyeLookUp_L') + getVal('eyeLookUp_R')) - (getVal('eyeLookDown_L') + getVal('eyeLookDown_R')); + let targetIrisY = isNaN(irisY) ? 0 : irisY * 0.2; + applySmoothVal('irisYPos', targetIrisY); + + // --- Mouth --- + const AMP = 1.0; + const MOUTH_OFFSET = 0.1; + const applyOffset = (v) => Math.max(0, v - MOUTH_OFFSET) * (1 / (1 - MOUTH_OFFSET)); + + let jawVal = applyOffset(getVal('jawOpen')); + applySmoothVal('mouth', Math.min(1, jawVal * AMP), MOUTH_SMOOTH); + + // Apply to other shapes + applySmoothVal('mouthFunnel', Math.min(1, applyOffset(getVal('mouthFunnel')) * AMP), MOUTH_SMOOTH); + applySmoothVal('mouthPucker', Math.min(1, applyOffset(getVal('mouthPucker')) * AMP), MOUTH_SMOOTH); + + // Stretch + let stretchVal = Math.min(1, applyOffset((getVal('mouthStretch_L') * 0.5 + getVal('mouthStretch_R') * 0.5))); + applySmoothVal('mouthStretch', stretchVal, MOUTH_SMOOTH); + + applySmoothVal('mouthSmile', Math.min(1, (getVal('mouthSmile_L') * 0.5 + getVal('mouthSmile_R') * 0.5) * 1.2), MOUTH_SMOOTH); + + // New Mouth Shapes + applySmoothVal('mouthFrown', Math.min(1, (getVal('mouthFrown_L') + getVal('mouthFrown_R')) * 0.5 * AMP), MOUTH_SMOOTH); + applySmoothVal('mouthDimple', Math.min(1, (getVal('mouthDimple_L') + getVal('mouthDimple_R')) * 0.5 * AMP), MOUTH_SMOOTH); + applySmoothVal('mouthPress', Math.min(1, (getVal('mouthPress_L') + getVal('mouthPress_R')) * 0.5 * AMP), MOUTH_SMOOTH); + applySmoothVal('mouthShrug', Math.min(1, (getVal('mouthShrugLower') + getVal('mouthShrugUpper')) * 0.5 * AMP), MOUTH_SMOOTH); + applySmoothVal('mouthRoll', Math.min(1, (getVal('mouthRollLower') + getVal('mouthRollUpper')) * 0.5 * AMP), MOUTH_SMOOTH); + + // Tongue + applySmoothVal('tongueOut', getVal('tongueOut'), MOUTH_SMOOTH); + + // --- Brows --- + const BROW_AMP = 2.5; + + applySmoothVal('brows', Math.min(1, getVal('browInnerUp') * BROW_AMP)); + applySmoothVal('browOuterUp', Math.min(1, (getVal('browOuterUp_L') + getVal('browOuterUp_R')) * 0.5 * BROW_AMP)); + applySmoothVal('browDown', Math.min(1, (getVal('browDown_L') + getVal('browDown_R')) * 0.5 * BROW_AMP)); + + // --- Cheeks --- + applySmoothVal('cheekPuff', getVal('cheekPuff')); + applySmoothVal('cheekSquint', (getVal('cheekSquint_L') + getVal('cheekSquint_R')) * 0.5); + + // Force update + pushInfo(tmpInfo); +} + // call worker with image async function postImage() { let modelConfig = { @@ -139,7 +235,7 @@ async function onWorkerResults(e) { if (e.data && e.data['metakey'] == getMetaKey()) { try { correctMeta(); - setTimeout(function() { + setTimeout(function () { postImage(); }, getCMV("DYNA_ML_DURATION")); } catch (err) { @@ -173,49 +269,108 @@ function extractMouthEyes(keys) { 'p': {}, 'e': {} }; - if (getCMV("MOOD") != "fun") { - // mouth - let mouthRatio = ratioLimit((keys['mouth'] - getCMV("MOUTH_OPEN_OFFSET")) * getCMV('MOUTH_RATIO')); + + // --- Extended Shapes & Mood Logic --- + const allowTracking = getCMV("MOOD") !== "fun"; + + // --- Mouth --- + // Basic Open (A) + let mouthRatio; + if (getCMV('USE_IFACIALMOCAP')) { + // iFacialMocap data is already 0-1, use directly + mouthRatio = keys['mouth']; + } else { + // Webcam data needs scaling + mouthRatio = ratioLimit((keys['mouth'] - getCMV("MOUTH_OPEN_OFFSET")) * getCMV('MOUTH_RATIO')); + } + if (allowTracking) { meinfo['b']['aa'] = mouthRatio; - // irises - let irispos = keys['irisPos']; - let irisY = (irispos - getCMV('IRIS_POS_OFFSET')) * getCMV('IRIS_POS_RATIO'); - meinfo['r']['rightEye'] = [0, irisY, 0]; - meinfo['r']['leftEye'] = [0, irisY, 0]; - // brows - let browspos = Math.min(1, Math.max(0, keys['brows'] - getCMV("BROWS_OFFSET")) * getCMV("BROWS_RATIO")); - meinfo['b']['Brows up'] = browspos; - // auto - let happyThresholdForEyes = 1; - if (getCMV("MOOD") == "auto") { - let autoV = Math.max(-1, Math.min(1, keys["auto"] * getCMV("MOOD_AUTO_RATIO"))); - let absauto = Math.max(0, Math.abs(autoV) - getCMV("MOOD_AUTO_OFFSET")); - let browspos = Math.min(1, Math.max(0, keys['brows'] - getCMV("BROWS_OFFSET")) * getCMV("BROWS_RATIO")); - let browslimit = 0.1; - let balFun = Math.min(browslimit, Math.max(0, browspos)); - let balSor = Math.min(browslimit / 2, Math.max(0, (browslimit - balFun) / 2)); - let balAng = Math.min(browslimit / 2, Math.max(0, (browslimit - balFun) / 2)); - if (autoV < 0) { - meinfo['b']['angry'] = balAng; - meinfo['b']['sad'] = absauto + balSor; - meinfo['b']['happy'] = balFun; - meinfo['b']['ee'] = 0; - } else { - happyThresholdForEyes = 1 - absauto; - meinfo['b']['angry'] = balAng; - meinfo['b']['sad'] = balSor; - meinfo['b']['happy'] = absauto + balFun; - meinfo['b']['ee'] = absauto; - } - } - // eyes - let leo = keys['leftEyeOpen']; - let reo = keys['rightEyeOpen']; - if (getCMV("EYE_SYNC") || Math.abs(reo - leo) < getCMV('EYE_LINK_THRESHOLD')) { - let avgEye = (reo + leo) / 2; - leo = avgEye; - reo = avgEye; + } + + // Extended Shapes (if available from iFacialMocap) + if (keys['mouthFunnel'] !== undefined) meinfo['b']['oh'] = keys['mouthFunnel']; // O + if (keys['mouthPucker'] !== undefined) meinfo['b']['ou'] = keys['mouthPucker']; // U + if (keys['mouthStretch'] !== undefined) meinfo['b']['ih'] = keys['mouthStretch']; // I / E + + // Frown -> Sorrow/Sad + if (allowTracking && keys['mouthFrown'] !== undefined && getCMV("MOOD") !== "sorrow") { + let frownVal = keys['mouthFrown'] > 0.1 ? keys['mouthFrown'] : 0; + // Mix with existing sad if any + let currentSad = meinfo['b']['sad'] || 0; + meinfo['b']['sad'] = Math.max(currentSad, frownVal); + } + + // Tongue + if (allowTracking && keys['tongueOut'] !== undefined) { + meinfo['b']['tongue'] = keys['tongueOut'] > 0.1 ? keys['tongueOut'] : 0; + } + + // Cheek Puff + if (allowTracking && keys['cheekPuff'] !== undefined) { + meinfo['b']['puff'] = keys['cheekPuff'] > 0.1 ? keys['cheekPuff'] : 0; + } + + // --- Brows --- + let browInner = keys['brows'] || 0; + let browOuter = keys['browOuterUp'] || 0; + // Combine inner and outer for general "Brows up" + let browspos = Math.min(1, Math.max(0, Math.max(browInner, browOuter) - getCMV("BROWS_OFFSET")) * getCMV("BROWS_RATIO")); + meinfo['b']['Brows up'] = browspos; + + if (allowTracking && keys['browDown'] !== undefined && getCMV("MOOD") !== "angry") { + meinfo['b']['angry'] = keys['browDown'] > 0.1 ? keys['browDown'] : 0; + } + + // --- Iris Rotation --- + let irispos = keys['irisPos']; + if (isNaN(irispos)) irispos = 0; + + let irisY = (irispos - getCMV('IRIS_POS_OFFSET')) * getCMV('IRIS_POS_RATIO'); // Yaw (Left/Right) + + let irisYPos = keys['irisYPos'] || 0; + if (isNaN(irisYPos)) irisYPos = 0; + + let irisX = -irisYPos * getCMV('IRIS_POS_RATIO'); // Pitch (Up/Down) + + // Safety check for array values + if (isNaN(irisX)) irisX = 0; + if (isNaN(irisY)) irisY = 0; + + meinfo['r']['rightEye'] = [irisX, irisY, 0]; + meinfo['r']['leftEye'] = [irisX, irisY, 0]; + + // --- Auto Mood / Smile --- + if (allowTracking && keys['mouthSmile'] !== undefined) { + meinfo['b']['happy'] = keys['mouthSmile'] > 0.1 ? keys['mouthSmile'] : 0; + } + + // Keep existing Auto Mood logic as fallback + if (getCMV("MOOD") == "auto" && keys['mouthSmile'] === undefined) { + let autoV = Math.max(-1, Math.min(1, keys["auto"] * getCMV("MOOD_AUTO_RATIO"))); + let absauto = Math.max(0, Math.abs(autoV) - getCMV("MOOD_AUTO_OFFSET")); + if (autoV < 0) { + meinfo['b']['angry'] = Math.max(meinfo['b']['angry'] || 0, absauto); // Mix + } else { + meinfo['b']['happy'] = Math.max(meinfo['b']['happy'] || 0, absauto); // Mix } + } + + // --- Eyes Blink --- + let happyThresholdForEyes = 1; + // If happy is high, eyes might squint. + if (meinfo['b']['happy'] > 0.5) happyThresholdForEyes = 1 - (meinfo['b']['happy'] - 0.5); + + let leo = keys['leftEyeOpen']; + let reo = keys['rightEyeOpen']; + + if (getCMV("EYE_SYNC") || Math.abs(reo - leo) < getCMV('EYE_LINK_THRESHOLD')) { + let avgEye = (reo + leo) / 2; + leo = avgEye; + reo = avgEye; + } + + // Right Eye + if (allowTracking) { if (reo < getCMV('RIGHT_EYE_CLOSE_THRESHOLD')) { meinfo['b']['blinkRight'] = happyThresholdForEyes; } else if (reo < getCMV('RIGHT_EYE_OPEN_THRESHOLD')) { @@ -224,6 +379,8 @@ function extractMouthEyes(keys) { } else { meinfo['b']['blinkRight'] = 0; } + + // Left Eye if (leo < getCMV('LEFT_EYE_CLOSE_THRESHOLD')) { meinfo['b']['blinkLeft'] = happyThresholdForEyes; } else if (leo < getCMV('LEFT_EYE_OPEN_THRESHOLD')) { @@ -232,7 +389,13 @@ function extractMouthEyes(keys) { } else { meinfo['b']['blinkLeft'] = 0; } + } else { + // Force eyes open (0) if tracking is disabled (e.g. Fun mood takes over), + // or let them stay at default. Usually 0 is safe to avoid conflict. + meinfo['b']['blinkRight'] = 0; + meinfo['b']['blinkLeft'] = 0; } + return meinfo; } @@ -265,7 +428,7 @@ function extractHandLandmark(keys) { for (let leftright of [0, 1]) { let prefix = ["left", "right"][leftright]; let preRate = 1 - leftright * 2; - Object.keys(fingerRates).forEach(function(finger) { + Object.keys(fingerRates).forEach(function (finger) { let fingerRate = fingerRates[finger] * getCMV("FINGER_GRIP_RATIO"); let spreadRate = spreadRates[finger] * getCMV("FINGER_SPREAD_RATIO"); let preRatio = keys[prefix + finger]; @@ -309,16 +472,16 @@ function extractBody(keys) { let leanRatio = Math.min(1, Math.max(-1, keys['lean'])) * 0.6; // head binfo['r']['head'] = [radLimit(keys['pitch'] * getCMV('HEAD_RATIO')) * getCMV('VRM_XR'), - radLimit(keys['yaw'] * getCMV('HEAD_RATIO') - leanRatio * 0.3) * getCMV('VRM_YR'), - radLimit(keys['roll'] * getCMV('HEAD_RATIO') - tiltRatio * 0.3) * getCMV('VRM_ZR')] + radLimit(keys['yaw'] * getCMV('HEAD_RATIO') - leanRatio * 0.3) * getCMV('VRM_YR'), + radLimit(keys['roll'] * getCMV('HEAD_RATIO') - tiltRatio * 0.3) * getCMV('VRM_ZR')] // neck binfo['r']['neck'] = [radLimit(keys['pitch'] * getCMV('NECK_RATIO')) * getCMV('VRM_XR'), - radLimit(keys['yaw'] * getCMV('NECK_RATIO') - leanRatio * 0.7) * getCMV('VRM_YR'), - radLimit(keys['roll'] * getCMV('NECK_RATIO') - tiltRatio * 0.7) * getCMV('VRM_ZR')]; + radLimit(keys['yaw'] * getCMV('NECK_RATIO') - leanRatio * 0.7) * getCMV('VRM_YR'), + radLimit(keys['roll'] * getCMV('NECK_RATIO') - tiltRatio * 0.7) * getCMV('VRM_ZR')]; // chest binfo['r']['spine'] = [radLimit(keys['pitch'] * getCMV('CHEST_RATIO')) * getCMV('VRM_XR'), - radLimit(keys['yaw'] * getCMV('CHEST_RATIO') + leanRatio) * getCMV('VRM_YR'), - radLimit(keys['roll'] * getCMV('CHEST_RATIO') + tiltRatio) * getCMV('VRM_ZR')]; + radLimit(keys['yaw'] * getCMV('CHEST_RATIO') + leanRatio) * getCMV('VRM_YR'), + radLimit(keys['roll'] * getCMV('CHEST_RATIO') + tiltRatio) * getCMV('VRM_ZR')]; // left right arm if (getCMV('TRACKING_MODE') == "Upper-Body") { for (let i = 0; i < 2; i++) { @@ -330,7 +493,7 @@ function extractBody(keys) { let hr = keys[prefix + 'Roll']; let hp = keys[prefix + 'Pitch']; let armEuler = armMagicEuler(wx, wy, hy, hr, hp, i); - Object.keys(armEuler).forEach(function(armkey) { + Object.keys(armEuler).forEach(function (armkey) { binfo['e'][prefix + armkey] = armEuler[armkey]; }); } @@ -339,9 +502,9 @@ function extractBody(keys) { } function mergeInfo(minfo, tinfo) { - Object.keys(tinfo).forEach(function(key0) { + Object.keys(tinfo).forEach(function (key0) { if (key0 in minfo) { - Object.keys(tinfo[key0]).forEach(function(key1) { + Object.keys(tinfo[key0]).forEach(function (key1) { minfo[key0][key1] = tinfo[key0][key1]; }); } else {