mirror of
https://github.com/overte-org/community-apps.git
synced 2025-08-17 18:23:58 +02:00
Merge pull request #41 from overte-org/feature/face_tracking
Desktop mode face tracking app based on Mediapipe
This commit is contained in:
commit
fca892de14
2 changed files with 1096 additions and 0 deletions
209
applications/emocam/emocam1.js
Normal file
209
applications/emocam/emocam1.js
Normal file
|
@ -0,0 +1,209 @@
|
|||
//
|
||||
// emocam.js
|
||||
//
|
||||
// Created by George Deac, October 21st 2023.
|
||||
// Copyright 2023 George Deac.
|
||||
// Copyright 2023 Overte e.V.
|
||||
//
|
||||
// Overte Application for Mediapipe face tracking in Desktop mode.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
|
||||
(function() {
|
||||
var TABLET_BUTTON_NAME = "EMOTIONS";
|
||||
var TRANSITION_TIME_SECONDS = 0.25;
|
||||
var onEmoteScreen = false;
|
||||
var button;
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var icon = "face.png";
|
||||
var activeIcon = "face.png";
|
||||
var isActive = true;
|
||||
var EMOTE_APP_BASE = "index.html?"+Date.now();
|
||||
var EMOTE_APP_URL = Script.resolvePath(EMOTE_APP_BASE);
|
||||
var EMOTE_APP_SORT_ORDER = 12;
|
||||
var EMOTE_LABEL = "FACE";
|
||||
var pitchValue = 0;
|
||||
var yawValue = 0;
|
||||
var forwardValue = 0;
|
||||
var sideValue = 0;
|
||||
|
||||
button = tablet.addButton({
|
||||
icon: "https://metaverse.8agora.com/facedetection/face.png",
|
||||
activeIcon: "https://metaverse.8agora.com/facedetection/facei.png",
|
||||
text: EMOTE_LABEL,
|
||||
sortOrder: EMOTE_APP_SORT_ORDER
|
||||
});
|
||||
|
||||
function onClicked() {
|
||||
if (onEmoteScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
} else {
|
||||
onEmoteScreen = true;
|
||||
tablet.gotoWebScreen(EMOTE_APP_URL);
|
||||
//webWindow = new OverlayWebWindow(' ', EMOTE_APP_URL, 480, 810, false);
|
||||
}
|
||||
}
|
||||
|
||||
function onScreenChanged(type, url) {
|
||||
onEmoteScreen = type === "Web" && (url.indexOf(EMOTE_APP_BASE) === url.length - EMOTE_APP_BASE.length);
|
||||
button.editProperties({ isActive: onEmoteScreen });
|
||||
}
|
||||
|
||||
var mapping = Controller.newMapping();
|
||||
var yawBinding = mapping.from(function() { return yawValue; }).to(Controller.Actions.DeltaYaw);
|
||||
var pitchBinding = mapping.from(function() { return pitchValue; }).to(Controller.Actions.DeltaPitch);
|
||||
var forwardBinding = mapping.from(function() { return forwardValue; }).to(Controller.Actions.TranslateZ);
|
||||
var sideBinding = mapping.from(function() { return sideValue; }).to(Controller.Actions.TranslateX);
|
||||
mapping.enable();
|
||||
|
||||
function onWebEventReceived(event) {
|
||||
|
||||
var parsed = JSON.parse(event);
|
||||
|
||||
if (parsed.type === "tracking" || parsed.type === "trackingmotion"){
|
||||
var emotion = parsed.data;
|
||||
MyAvatar.hasScriptedBlendshapes = true;
|
||||
var bend ={
|
||||
"EyeOpen_L": emotion["eyeWideLeft"]*4,
|
||||
"EyeOpen_R": emotion["eyeWideRight"]*4,
|
||||
"EyeBlink_L": emotion["eyeBlinkLeft"],
|
||||
"EyeBlink_R": emotion["eyeBlinkRight"],
|
||||
"EyeSquint_L": emotion["eyeSquintLeft"],
|
||||
"EyeSquint_R": emotion["eyeSquintRight"],
|
||||
"BrowsD_L": emotion["browDownLeft"],
|
||||
"BrowsD_R": emotion["browDownRight"],
|
||||
"BrowsU_C": emotion["browInnerUp"],
|
||||
"BrowsU_L": emotion["browOuterUpLeft"],
|
||||
"BrowsU_R": emotion["browOuterUpRight"],
|
||||
"JawOpen": emotion["jawOpen"],
|
||||
"MouthOpen": emotion["jawOpen"],
|
||||
"JawFwd": emotion["jawForward"],
|
||||
"MouthFrown_L": emotion["mouthFrownLeft"],
|
||||
"MouthFrown_R": emotion["mouthFrownRight"],
|
||||
"MouthSmile_L": emotion["mouthSmileLeft"]*0.8,
|
||||
"MouthSmile_R": emotion["mouthSmileRight"]*0.8,
|
||||
"MouthDimple_L": emotion["mouthStretchLeft"],
|
||||
"MouthDimple_R": emotion["mouthStretchRight"],
|
||||
"NoseSneer_L": emotion["noseSneerLeft"],
|
||||
"NoseSneer_R": emotion["noseSneerRight"],
|
||||
"Puff": emotion["cheekPuff"]*1.3,
|
||||
"CheekSquint_L": emotion["cheekSquintLeft"],
|
||||
"CheekSquint_R": emotion["cheekSquintRight"],
|
||||
"EyeDown_L": emotion["eyeLookDownLeft"]*1.2,
|
||||
"EyeDown_R": emotion["eyeLookDownRight"]*1.2,
|
||||
"EyeIn_L": emotion["eyeLookInLeft"],
|
||||
"EyeIn_R": emotion["eyeLookInRight"],
|
||||
"EyeOut_L": emotion["eyeLookOutLeft"],
|
||||
"EyeOut_R": emotion["eyeLookOutRight"],
|
||||
"EyeUp_L": emotion["eyeLookUpLeft"],
|
||||
"EyeUp_R": emotion["eyeLookUpRight"],
|
||||
"EyeSquint_L": emotion["eyeSquintLeft"],
|
||||
"EyeSquint_R": emotion["eyeSquintRight"],
|
||||
"TongueOut": emotion["jawForward"],
|
||||
"JawLeft": emotion["jawLeft"]*3,
|
||||
"JawRight": emotion["jawRight"]*3,
|
||||
"MouthClose": emotion["mouthClose"],
|
||||
"MouthDimple_L": emotion["mouthDimpleLeft"],
|
||||
"MouthDimple_R": emotion["mouthDimpleRight"],
|
||||
"LipsFunnel": emotion["mouthFunnel"],
|
||||
"MouthLeft": emotion["mouthLeft"],
|
||||
"MouthLowerDown_L": emotion["mouthLowerDownLeft"],
|
||||
"MouthLowerDown_R": emotion["mouthLowerDownRight"],
|
||||
"MouthPress_L": emotion["mouthPressLeft"],
|
||||
"MouthPress_L": emotion["mouthPressRight"],
|
||||
"LipsPucker": emotion["mouthPucker"],
|
||||
"MouthRight": emotion["mouthRight"],
|
||||
"MouthRollLower": emotion["mouthRollLower"],
|
||||
"MouthRollUpper": emotion["mouthRollUpper"],
|
||||
"MouthShrugLower": emotion["mouthShrugLower"],
|
||||
"MouthShrugUpper": emotion["mouthShrugUpper"],
|
||||
"MouthUpperUp_L": emotion["mouthUpperUpLeft"],
|
||||
"MouthUpperUp_R": emotion["mouthUpperUpRight"]
|
||||
};
|
||||
if (parsed.type === "trackingmotion"){
|
||||
print("pitch: "+ parsed.pitch + "yaw: " +parsed.yaw);
|
||||
if (parsed.pitch <= -15 || parsed.pitch >= 5){
|
||||
forwardValue = 0;
|
||||
yawValue = 0;
|
||||
if (parsed.pitch > 5){
|
||||
pitchValue = 0.3;
|
||||
}
|
||||
if (parsed.pitch < -15){
|
||||
pitchValue = -0.3;
|
||||
}
|
||||
if (parsed.pitch >= -15 && parsed.pitch <= 5){
|
||||
pitchValue = 0;
|
||||
}
|
||||
} else {
|
||||
pitchValue = 0;
|
||||
if (parsed.yaw <= - 10 || parsed.yaw >= 10){
|
||||
forwardValue = 0;
|
||||
|
||||
if (parsed.yaw > 10){
|
||||
yawValue = parsed.yaw /20;
|
||||
}
|
||||
if (parsed.yaw < -10){
|
||||
yawValue = parsed.yaw /20;
|
||||
}
|
||||
if (parsed.yaw >= - 10 && parsed.yaw <= 10){
|
||||
yawValue = 0;
|
||||
}
|
||||
} else {
|
||||
yawValue = 0;
|
||||
if (emotion["browInnerUp"]>0.1){
|
||||
forwardValue = -1;
|
||||
}
|
||||
if (emotion["browDownLeft"]>0.4){
|
||||
forwardValue = 1;
|
||||
}
|
||||
if (emotion["browInnerUp"] <= 0.1 && emotion["browDownLeft"] <= 0.4){
|
||||
forwardValue = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
var direction = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(parsed.pitch, parsed.yaw, 0 ), {x: 0, y: 0, z: 100});
|
||||
direction = Vec3.multiplyQbyV(MyAvatar.orientation, direction);
|
||||
direction = Vec3.sum(direction, MyAvatar.position);
|
||||
MyAvatar.setHeadLookAt(direction);
|
||||
print("YAW="+MyAvatar.headYaw);
|
||||
for (var blendshape in bend) {
|
||||
MyAvatar.setBlendshape(blendshape, bend[blendshape]);
|
||||
}
|
||||
}
|
||||
}
|
||||
function setEmotion(currentEmotion) {
|
||||
if (emotion !== lastEmotionUsed) {
|
||||
lastEmotionUsed = emotion;
|
||||
}
|
||||
if (currentEmotion !== lastEmotionUsed) {
|
||||
changingEmotionPercentage = 0.0;
|
||||
emotion = currentEmotion;
|
||||
isChangingEmotion = true;
|
||||
MyAvatar.hasScriptedBlendshapes = true;
|
||||
}
|
||||
}
|
||||
|
||||
button.clicked.connect(onClicked);
|
||||
tablet.screenChanged.connect(onScreenChanged);
|
||||
tablet.webEventReceived.connect(onWebEventReceived);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
|
||||
if (onEmoteScreen) {
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
button.clicked.disconnect(onClicked);
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
if (tablet) {
|
||||
tablet.removeButton(button);
|
||||
}
|
||||
|
||||
MyAvatar.restoreAnimation();
|
||||
mapping.disable();
|
||||
});
|
||||
}());
|
887
applications/emocam/index.html
Normal file
887
applications/emocam/index.html
Normal file
|
@ -0,0 +1,887 @@
|
|||
<!--
|
||||
index.html
|
||||
|
||||
Created by George Deac, October 21st 2023.
|
||||
Copyright 2023 George Deac.
|
||||
Copyright 2023 The MediaPipe Authors.
|
||||
|
||||
Overte Application for Mediapipe face tracking in Desktop mode.
|
||||
|
||||
Distributed under the Apache License, Version 2.0.
|
||||
See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
-->
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en" >
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
|
||||
<title>MediaPipe Face Landmarker</title>
|
||||
|
||||
|
||||
<style>
|
||||
@use "@material";
|
||||
body {
|
||||
font-family: helvetica, arial, sans-serif;
|
||||
margin: 2em;
|
||||
bgcolor: "white";
|
||||
color: #3d3d3d;
|
||||
--mdc-theme-primary: #007f8b;
|
||||
--mdc-theme-on-primary: #f1f3f4;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-style: italic;
|
||||
color: #ff6f00;
|
||||
color: #007f8b;
|
||||
}
|
||||
|
||||
h2 {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
em {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
video {
|
||||
clear: both;
|
||||
display: block;
|
||||
transform: rotateY(180deg);
|
||||
-webkit-transform: rotateY(180deg);
|
||||
-moz-transform: rotateY(180deg);
|
||||
}
|
||||
|
||||
section {
|
||||
opacity: 1;
|
||||
transition: opacity 500ms ease-in-out;
|
||||
}
|
||||
|
||||
header,
|
||||
footer {
|
||||
clear: both;
|
||||
}
|
||||
|
||||
.removed {
|
||||
display: none;
|
||||
}
|
||||
|
||||
.invisible {
|
||||
opacity: 0.2;
|
||||
}
|
||||
|
||||
.note {
|
||||
font-style: italic;
|
||||
font-size: 130%;
|
||||
}
|
||||
|
||||
.videoView,
|
||||
.detectOnClick,
|
||||
.blend-shapes {
|
||||
position: relative;
|
||||
|
||||
width: 48%;
|
||||
margin: 2% 1%;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.videoView p,
|
||||
.detectOnClick p {
|
||||
position: absolute;
|
||||
padding: 5px;
|
||||
background-color: #007f8b;
|
||||
color: #fff;
|
||||
border: 1px dashed rgba(255, 255, 255, 0.7);
|
||||
z-index: 2;
|
||||
font-size: 12px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.highlighter {
|
||||
background: rgba(0, 255, 0, 0.25);
|
||||
border: 1px dashed #fff;
|
||||
z-index: 1;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.canvas {
|
||||
z-index: 1;
|
||||
position: absolute;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.output_canvas {
|
||||
transform: rotateY(180deg);
|
||||
-webkit-transform: rotateY(180deg);
|
||||
-moz-transform: rotateY(180deg);
|
||||
}
|
||||
|
||||
.detectOnClick {
|
||||
z-index: 0;
|
||||
}
|
||||
|
||||
.detectOnClick img {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.blend-shapes-item {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
height: 20px;
|
||||
}
|
||||
|
||||
.blend-shapes-label {
|
||||
display: flex;
|
||||
width: 120px;
|
||||
justify-content: flex-end;
|
||||
align-items: center;
|
||||
margin-right: 4px;
|
||||
}
|
||||
|
||||
.blend-shapes-value {
|
||||
display: flex;
|
||||
height: 16px;
|
||||
align-items: center;
|
||||
background-color: #007f8b;
|
||||
}
|
||||
.wrapper {
|
||||
max-width: 800px;
|
||||
margin: 50px auto;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-weight: 200;
|
||||
font-size: 3em;
|
||||
margin: 0 0 0.1em 0;
|
||||
}
|
||||
|
||||
h2 {
|
||||
font-weight: 200;
|
||||
font-size: 0.9em;
|
||||
margin: 0 0 50px;
|
||||
color: #555;
|
||||
}
|
||||
|
||||
a {
|
||||
margin-top: 50px;
|
||||
display: block;
|
||||
color: #3e95cd;
|
||||
}
|
||||
.switch {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
width: 60px;
|
||||
height: 34px;
|
||||
}
|
||||
/* Hide default HTML checkbox */
|
||||
.switch input {
|
||||
opacity: 0;
|
||||
width: 0;
|
||||
height: 0;
|
||||
}
|
||||
/* The slider */
|
||||
.slider {
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
background-color: #ccc;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
|
||||
.slider:before {
|
||||
position: absolute;
|
||||
content: "";
|
||||
height: 26px;
|
||||
width: 26px;
|
||||
left: 4px;
|
||||
bottom: 4px;
|
||||
background-color: white;
|
||||
-webkit-transition: .4s;
|
||||
transition: .4s;
|
||||
}
|
||||
|
||||
input:checked + .slider {
|
||||
background-color: #007db8;
|
||||
}
|
||||
|
||||
input:focus + .slider {
|
||||
box-shadow: 0 0 1px #007db8;
|
||||
}
|
||||
|
||||
input:checked + .slider:before {
|
||||
-webkit-transform: translateX(26px);
|
||||
-ms-transform: translateX(26px);
|
||||
transform: translateX(26px);
|
||||
}
|
||||
|
||||
/* Rounded sliders */
|
||||
.slider.round {
|
||||
border-radius: 34px;
|
||||
}
|
||||
|
||||
.slider.round:before {
|
||||
border-radius: 50%;
|
||||
}
|
||||
</style>
|
||||
|
||||
<script>
|
||||
window.console = window.console || function(t) {};
|
||||
</script>
|
||||
|
||||
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="Cache-control" content="no-cache, no-store, must-revalidate">
|
||||
<meta http-equiv="Pragma" content="no-cache">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
|
||||
|
||||
<link href="https://unpkg.com/material-components-web@latest/dist/material-components-web.min.css" rel="stylesheet">
|
||||
<script src="https://unpkg.com/material-components-web@latest/dist/material-components-web.min.js"></script>
|
||||
<script src="https://cdn.jsdelivr.net/npm/chart.js@2.9.3/dist/Chart.min.js"></script>
|
||||
</head>
|
||||
<body bgcolor="white">
|
||||
|
||||
|
||||
<div id="liveView" class="videoView">
|
||||
<table width="320px">
|
||||
<tr>
|
||||
<td>
|
||||
<button id="webcamButton" class="mdc-button mdc-button--raised">
|
||||
<span class="mdc-button__ripple"></span>
|
||||
<span class="mdc-button__label">ENABLE PREDICTIONS</span>
|
||||
</button>
|
||||
</td>
|
||||
<td>
|
||||
|
||||
</td>
|
||||
<td>
|
||||
Use Head Navigation:
|
||||
</td>
|
||||
<td>
|
||||
<label class="switch">
|
||||
<input type="checkbox" id="switchmove" class="switchmove">
|
||||
<span class="slider"></span>
|
||||
</label>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<div style="position: relative;">
|
||||
<video id="webcam" autoplay playsinline></video>
|
||||
<canvas class="output_canvas" id="output_canvas" style="position: absolute; left: 0px; top: 0px;"></canvas>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<select id="landmarkDropdown" onchange="handleLandmarkChange()">
|
||||
<!-- Options for the dropdown will be populated in the script -->
|
||||
</select>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<input type="text" id="maxY" value="100" />
|
||||
<button onclick="increaseY()">Increase</button>
|
||||
<button onclick="decreaseY()">Decrease</button>
|
||||
</div>
|
||||
|
||||
|
||||
<!-- canvas element in a container -->
|
||||
<div class="wrapper">
|
||||
<canvas id="canvas" width="480" height="480"></canvas>
|
||||
</div>
|
||||
|
||||
<p id="output"></p>
|
||||
<div class="blend-shapes">
|
||||
<ul class="blend-shapes-list" id="video-blend-shapes"></ul>
|
||||
</div>
|
||||
</section>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
<script src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.1/jquery.min.js"></script>
|
||||
<script>
|
||||
|
||||
// Graph max range adjust
|
||||
var maxY = 100; // Default Y max value
|
||||
|
||||
function increaseY() {
|
||||
if(maxY < 100) {
|
||||
maxY += 25;
|
||||
}
|
||||
else maxY += 100;
|
||||
document.getElementById('maxY').value = maxY;
|
||||
updateChartMaxY();
|
||||
}
|
||||
|
||||
function decreaseY() {
|
||||
if(maxY <= 100) {
|
||||
maxY -= 25;
|
||||
}
|
||||
else maxY -= 100;
|
||||
document.getElementById('maxY').value = maxY;
|
||||
updateChartMaxY();
|
||||
}
|
||||
|
||||
function updateChartMaxY() {
|
||||
|
||||
var dropdown = document.getElementById("landmarkDropdown");
|
||||
var selectedLandmark = dropdown.options[dropdown.selectedIndex].value;
|
||||
|
||||
// update the data for the chart
|
||||
var mapping = landmarkMappings[selectedLandmark];
|
||||
|
||||
// Clip maxY logic here for mapping.y_data_1 before updating chart
|
||||
for (var i = 0; i < mapping.y_data_1.length; i++) {
|
||||
if (mapping.y_data_1[i] > maxY) {
|
||||
mapping.y_data_1[i] = maxY;
|
||||
}
|
||||
}
|
||||
|
||||
window.myChart.data.labels = mapping.x_data;
|
||||
window.myChart.data.datasets[0].data = mapping.y_data_1;
|
||||
// update label for the line to reflect the currently selected landmark
|
||||
window.myChart.data.datasets[0].label = selectedLandmark + " morph mapping";
|
||||
|
||||
window.myChart.options.scales.yAxes[0].ticks.max = maxY;
|
||||
|
||||
window.myChart.update();
|
||||
}
|
||||
|
||||
function generateControlPoints(numPoints) {
|
||||
var x_data = [];
|
||||
var y_data_1 = [];
|
||||
|
||||
var stepSize = 100 / (numPoints - 1); // determine the step size based on the desired number of points
|
||||
|
||||
for(var i=0; i<numPoints; i++) {
|
||||
var val = stepSize * i;
|
||||
x_data.push(val);
|
||||
y_data_1.push(val);
|
||||
}
|
||||
|
||||
return {x_data, y_data_1};
|
||||
}
|
||||
|
||||
|
||||
// Initialize landmarks
|
||||
var landmarks = [
|
||||
"browDownLeft",
|
||||
"browDownRight",
|
||||
"browInnerUp",
|
||||
"browOuterUpLeft",
|
||||
"browOuterUpRight",
|
||||
"cheekPuff",
|
||||
"cheekSquintLeft",
|
||||
"cheekSquintRight",
|
||||
"eyeBlinkLeft",
|
||||
"eyeBlinkRight",
|
||||
"eyeLookDownLeft",
|
||||
"eyeLookDownRight",
|
||||
"eyeLookInLeft",
|
||||
"eyeLookInRight",
|
||||
"eyeLookOutLeft",
|
||||
"eyeLookOutRight",
|
||||
"eyeLookUpLeft",
|
||||
"eyeLookUpRight",
|
||||
"eyeSquintLeft",
|
||||
"eyeSquintRight",
|
||||
"eyeWideLeft",
|
||||
"eyeWideRight",
|
||||
"jawForward",
|
||||
"jawLeft",
|
||||
"jawOpen",
|
||||
"jawRight",
|
||||
"mouthClose",
|
||||
"mouthDimpleLeft",
|
||||
"mouthDimpleRight",
|
||||
"mouthFrownLeft",
|
||||
"mouthFrownRight",
|
||||
"mouthFunnel",
|
||||
"mouthLeft",
|
||||
"mouthLowerDownLeft",
|
||||
"mouthLowerDownRight",
|
||||
"mouthPressLeft",
|
||||
"mouthPressRight",
|
||||
"mouthPucker",
|
||||
"mouthRight",
|
||||
"mouthRollLower",
|
||||
"mouthRollUpper",
|
||||
"mouthShrugLower",
|
||||
"mouthShrugUpper",
|
||||
"mouthSmileLeft",
|
||||
"mouthSmileRight",
|
||||
"mouthStretchLeft",
|
||||
"mouthStretchRight",
|
||||
"mouthUpperUpLeft",
|
||||
"mouthUpperUpRight",
|
||||
"noseSneerLeft",
|
||||
"noseSneerRight"
|
||||
];
|
||||
|
||||
var landmarkMappings = {};
|
||||
var testInput = 50;
|
||||
var activePoint = null;
|
||||
|
||||
// Generate default linear mappings for each landmark morph with 6 control points each
|
||||
for (var i = 0; i < landmarks.length; i++) {
|
||||
landmarkMappings[landmarks[i]] = generateControlPoints(5 + 1);
|
||||
}
|
||||
|
||||
// Handle dropdown change
|
||||
function handleLandmarkChange() {
|
||||
var dropdown = document.getElementById("landmarkDropdown");
|
||||
var selectedLandmark = dropdown.options[dropdown.selectedIndex].value;
|
||||
|
||||
// update the data for the chart
|
||||
var mapping = landmarkMappings[selectedLandmark];
|
||||
|
||||
// Clip maxY logic here for mapping.y_data_1 before updating chart
|
||||
for (var i = 0; i < mapping.y_data_1.length; i++) {
|
||||
if (mapping.y_data_1[i] > maxY) {
|
||||
mapping.y_data_1[i] = maxY;
|
||||
}
|
||||
}
|
||||
|
||||
window.myChart.data.labels = mapping.x_data;
|
||||
window.myChart.data.datasets[0].data = mapping.y_data_1;
|
||||
// update label for the line to reflect the currently selected landmark
|
||||
window.myChart.data.datasets[0].label = selectedLandmark + " morph mapping";
|
||||
|
||||
|
||||
window.myChart.update();
|
||||
|
||||
}
|
||||
|
||||
// Functions to handle pointer events
|
||||
function down_handler(event) {
|
||||
// check for data point near event location
|
||||
const points = window.myChart.getElementAtEvent(event, {intersect: false});
|
||||
if (points.length > 0) {
|
||||
// grab nearest point, start dragging
|
||||
activePoint = points[0];
|
||||
canvas.onpointermove = move_handler;
|
||||
};
|
||||
};
|
||||
|
||||
function up_handler(event) {
|
||||
// release grabbed point, stop dragging
|
||||
activePoint = null;
|
||||
canvas.onpointermove = null;
|
||||
};
|
||||
|
||||
function move_handler(event)
|
||||
{
|
||||
// locate grabbed point in chart data
|
||||
if (activePoint != null) {
|
||||
var data = activePoint._chart.data;
|
||||
var datasetIndex = activePoint._datasetIndex;
|
||||
|
||||
// read mouse position
|
||||
const helpers = Chart.helpers;
|
||||
var position = helpers.getRelativePosition(event, myChart);
|
||||
|
||||
// convert mouse position to chart y axis value
|
||||
var chartArea = window.myChart.chartArea;
|
||||
var yAxis = window.myChart.scales["y-axis-0"];
|
||||
var yValue = map(position.y, chartArea.bottom, chartArea.top, yAxis.min, yAxis.max);
|
||||
|
||||
// Prevent yValue from exceeding bounds
|
||||
yValue = Math.min(yValue, maxY);
|
||||
yValue = Math.max(yValue, 0);
|
||||
|
||||
// update y value of active data point
|
||||
data.datasets[datasetIndex].data[activePoint._index] = yValue;
|
||||
window.myChart.update();
|
||||
};
|
||||
};
|
||||
|
||||
// Draw a line chart on the canvas context
|
||||
window.onload = function () {
|
||||
var ctx = document.getElementById("canvas").getContext("2d");
|
||||
var canvas = document.getElementById("canvas");
|
||||
var mapping = landmarkMappings[landmarks[0]]; // start with first landmark
|
||||
window.myChart = Chart.Line(ctx, {
|
||||
data: {
|
||||
labels: mapping.x_data,
|
||||
datasets: [
|
||||
{
|
||||
data: mapping.y_data_1,
|
||||
label: landmarks[0] + " morph mapping",
|
||||
borderColor: "#3e95cd",
|
||||
fill: false,
|
||||
pointRadius: 8, // Adjust this for point size
|
||||
pointHoverRadius: 10, // Adjust this for point size on hover
|
||||
},
|
||||
]
|
||||
},
|
||||
options: {
|
||||
animation: {
|
||||
duration: 0
|
||||
},
|
||||
tooltips: {
|
||||
mode: 'nearest'
|
||||
},
|
||||
scales: {
|
||||
yAxes: [{
|
||||
ticks: {
|
||||
min: 0,
|
||||
max: maxY
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// set pointer event handlers for canvas element
|
||||
canvas.onpointerdown = down_handler;
|
||||
canvas.onpointerup = up_handler;
|
||||
canvas.onpointermove = null;
|
||||
|
||||
// Populate dropdown with landmarks
|
||||
var dropdown = document.getElementById("landmarkDropdown");
|
||||
for (var i = 0; i < landmarks.length; i++) {
|
||||
var option = document.createElement("option");
|
||||
option.text = landmarks[i];
|
||||
option.value = landmarks[i];
|
||||
dropdown.add(option);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
// MinMax map value to other coordinate system
|
||||
function map(value, start1, stop1, start2, stop2) {
|
||||
return start2 + (stop2 - start2) * ((value - start1) / (stop1 - start1))
|
||||
};
|
||||
|
||||
|
||||
function monotoneCubicSplineInterpolation(x, y) {
|
||||
var size = x.length;
|
||||
var delta = [];
|
||||
for (var i = 0; i < size - 1; i++) {
|
||||
delta[i] = (y[i + 1] - y[i]) / (x[i + 1] - x[i]);
|
||||
}
|
||||
|
||||
var m = [];
|
||||
m[0] = delta[0];
|
||||
for (var i = 1; i < size - 1; i++) {
|
||||
m[i] = (delta[i - 1] + delta[i]) * 0.5;
|
||||
}
|
||||
m[size - 1] = delta[size - 2];
|
||||
|
||||
for (var i = 0; i < size - 1; i++) {
|
||||
if (delta[i] === 0) {
|
||||
m[i] = m[i + 1] = 0;
|
||||
} else {
|
||||
var alpha = m[i] / delta[i];
|
||||
var beta = m[i + 1] / delta[i];
|
||||
if (alpha * alpha + beta * beta > 9) {
|
||||
var tau = 3 / Math.sqrt(alpha * alpha + beta * beta);
|
||||
m[i] = tau * alpha * delta[i];
|
||||
m[i + 1] = tau * beta * delta[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return function(val) {
|
||||
if (val < x[0] || val > x[size - 1]) {
|
||||
throw new Error("The input value is out of the data range.");
|
||||
}
|
||||
var i = 1;
|
||||
while (x[i] < val) i++;
|
||||
i--;
|
||||
|
||||
var h = (x[i + 1] - x[i]);
|
||||
var t = (val - x[i]) / h;
|
||||
return (y[i] * (1 + 2 * t) + h * m[i] * t) * (1 - t) * (1 - t) +
|
||||
(y[i + 1] * (3 - 2 * t) + h * m[i + 1] * (t - 1)) * t * t;
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
function applyMapping(value, x_data, y_data_1) {
|
||||
var cubicSpline = monotoneCubicSplineInterpolation(x_data, y_data_1);
|
||||
var yValue = cubicSpline(value);
|
||||
return yValue;
|
||||
}
|
||||
|
||||
|
||||
function applyLandmarksMapping(values) {
|
||||
// Object to hold the mapped values
|
||||
var mappedValues = {};
|
||||
|
||||
// Loop through each key-value pair in the provided JSON object
|
||||
for (var landmark in values) {
|
||||
// Skip the neutral landmark
|
||||
if (landmark === "_neutral") {
|
||||
continue;
|
||||
}
|
||||
if (values.hasOwnProperty(landmark)) {
|
||||
// Check if there is a mapping for this landmark
|
||||
if (landmarkMappings.hasOwnProperty(landmark)) {
|
||||
// Apply the mapping
|
||||
var value = values[landmark];
|
||||
value *= 100;
|
||||
var mapping = landmarkMappings[landmark];
|
||||
var mappedValue = monotoneCubicSplineInterpolation(mapping.x_data, mapping.y_data_1)(value);
|
||||
mappedValue /= 100;
|
||||
|
||||
// Store the mapped value
|
||||
mappedValues[landmark] = mappedValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mappedValues;
|
||||
}
|
||||
|
||||
|
||||
</script>
|
||||
<script id="rendered-js" type="module">
|
||||
// Copyright 2023 The MediaPipe Authors.
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
import vision from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0";
|
||||
const { FaceLandmarker, FilesetResolver, DrawingUtils } = vision;
|
||||
const demosSection = document.getElementById("demos");
|
||||
const videoBlendShapes = document.getElementById("video-blend-shapes");
|
||||
let faceLandmarker;
|
||||
let runningMode = "IMAGE";
|
||||
let enableWebcamButton;
|
||||
let webcamRunning = false;
|
||||
const videoWidth = 400;
|
||||
var checkedValue = null;
|
||||
var yawDegrees;
|
||||
var pitchDegrees;
|
||||
var rollDegrees;
|
||||
var calibration = 0;
|
||||
var yawC = 0;
|
||||
var pitchC = 0;
|
||||
var rollC = 0;
|
||||
// Before we can use HandLandmarker class we must wait for it to finish
|
||||
// loading. Machine Learning models can be large and take a moment to
|
||||
// get everything needed to run.
|
||||
async function runDemo() {
|
||||
// Read more `CopyWebpackPlugin`, copy wasm set from "https://cdn.skypack.dev/node_modules" to `/wasm`
|
||||
const filesetResolver = await FilesetResolver.forVisionTasks("https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm");
|
||||
faceLandmarker = await FaceLandmarker.createFromOptions(filesetResolver, {
|
||||
baseOptions: {
|
||||
modelAssetPath: "https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task",
|
||||
delegate: "GPU"
|
||||
},
|
||||
outputFaceBlendshapes: true,
|
||||
outputFacialTransformationMatrixes: true,
|
||||
runningMode,
|
||||
numFaces: 1
|
||||
});
|
||||
//demosSection.classList.remove("invisible");
|
||||
}
|
||||
runDemo();
|
||||
|
||||
const video = document.getElementById("webcam");
|
||||
const canvasElement = document.getElementById("output_canvas");
|
||||
const canvasCtx = canvasElement.getContext("2d");
|
||||
// Check if webcam access is supported.
|
||||
function hasGetUserMedia() {
|
||||
return !!(navigator.mediaDevices && navigator.mediaDevices.getUserMedia);
|
||||
}
|
||||
// If webcam supported, add event listener to button for when user
|
||||
// wants to activate it.
|
||||
if (hasGetUserMedia()) {
|
||||
enableWebcamButton = document.getElementById("webcamButton");
|
||||
enableWebcamButton.addEventListener("click", enableCam);
|
||||
}
|
||||
else {
|
||||
console.warn("getUserMedia() is not supported by your browser");
|
||||
}
|
||||
// Enable the live webcam view and start detection.
|
||||
function enableCam(event) {
|
||||
if (!faceLandmarker) {
|
||||
console.log("Wait! faceLandmarker not loaded yet.");
|
||||
return;
|
||||
}
|
||||
if (webcamRunning === true) {
|
||||
webcamRunning = false;
|
||||
enableWebcamButton.innerText = "ENABLE PREDICTIONS";
|
||||
}
|
||||
else {
|
||||
webcamRunning = true;
|
||||
enableWebcamButton.innerText = "DISABLE PREDICTIONS";
|
||||
}
|
||||
// getUsermedia parameters.
|
||||
const constraints = {
|
||||
video: true
|
||||
};
|
||||
// Activate the webcam stream.
|
||||
navigator.mediaDevices.getUserMedia(constraints).then(function (stream) {
|
||||
video.srcObject = stream;
|
||||
video.addEventListener("loadeddata", predictWebcam);
|
||||
});
|
||||
}
|
||||
let lastVideoTime = -1;
|
||||
let results = undefined;
|
||||
const drawingUtils = new DrawingUtils(canvasCtx);
|
||||
async function predictWebcam() {
|
||||
const radio = video.videoHeight / video.videoWidth;
|
||||
video.style.width = videoWidth + "px";
|
||||
video.style.height = videoWidth * radio + "px";
|
||||
canvasElement.style.width = videoWidth + "px";
|
||||
canvasElement.style.height = videoWidth * radio + "px";
|
||||
canvasElement.width = video.videoWidth;
|
||||
canvasElement.height = video.videoHeight;
|
||||
// Now let's start detecting the stream.
|
||||
if (runningMode === "IMAGE") {
|
||||
runningMode = "LIVE_STREAM";
|
||||
await faceLandmarker.setOptions({ runningMode: runningMode });
|
||||
}
|
||||
let nowInMs = Date.now();
|
||||
if (lastVideoTime !== video.currentTime) {
|
||||
lastVideoTime = video.currentTime;
|
||||
results = faceLandmarker.detectForVideo(video, nowInMs);
|
||||
}
|
||||
if (results.faceLandmarks && results.faceLandmarks.length > 0) {
|
||||
|
||||
// Define multiple reference points (ensure that they are distributed evenly across, 1 for the top face, 1 for the bottom face, 1 for the left face, 1 for the right face)
|
||||
const referencePoints = [
|
||||
results.faceLandmarks[0][10],
|
||||
results.faceLandmarks[0][152],
|
||||
results.faceLandmarks[0][33],
|
||||
results.faceLandmarks[0][263],
|
||||
// Add more reference points if desired...
|
||||
];
|
||||
|
||||
// Filter out undefined or null points (if they are not detected by the model at that time)
|
||||
const validPoints = referencePoints.filter(point => point !== undefined && point !== null);
|
||||
|
||||
// Check if there are enough valid points to proceed (we need at least 3 point to build a plane normal)
|
||||
if (validPoints.length >= 3) {
|
||||
// Calculate the centroid of the valid points
|
||||
const centroid = validPoints.reduce((acc, point) => {
|
||||
return { x: acc.x + point.x, y: acc.y + point.y, z: acc.z + point.z };
|
||||
}, { x: 0, y: 0, z: 0 });
|
||||
|
||||
centroid.x /= validPoints.length;
|
||||
centroid.y /= validPoints.length;
|
||||
centroid.z /= validPoints.length;
|
||||
|
||||
// Calculate the normal vector by averaging cross products
|
||||
let normalVector = { x: 0, y: 0, z: 0 };
|
||||
for (let i = 0; i < validPoints.length - 1; i++) {
|
||||
const vector1 = { x: validPoints[i].x - centroid.x, y: validPoints[i].y - centroid.y, z: validPoints[i].z - centroid.z };
|
||||
const vector2 = { x: validPoints[i + 1].x - centroid.x, y: validPoints[i + 1].y - centroid.y, z: validPoints[i + 1].z - centroid.z };
|
||||
|
||||
const crossProduct = {
|
||||
x: vector1.y * vector2.z - vector1.z * vector2.y,
|
||||
y: vector1.z * vector2.x - vector1.x * vector2.z,
|
||||
z: vector1.x * vector2.y - vector1.y * vector2.x
|
||||
};
|
||||
|
||||
normalVector.x += crossProduct.x;
|
||||
normalVector.y += crossProduct.y;
|
||||
normalVector.z += crossProduct.z;
|
||||
}
|
||||
|
||||
normalVector.x /= validPoints.length - 1;
|
||||
normalVector.y /= validPoints.length - 1;
|
||||
normalVector.z /= validPoints.length - 1;
|
||||
|
||||
// Normalize the normal vector (optional but can give better results)
|
||||
const magnitude = Math.sqrt(normalVector.x ** 2 + normalVector.y ** 2 + normalVector.z ** 2);
|
||||
const normalizedNormal = {
|
||||
x: normalVector.x / magnitude,
|
||||
y: normalVector.y / magnitude,
|
||||
z: normalVector.z / magnitude
|
||||
};
|
||||
|
||||
// Calculate the yaw, pitch, and roll angles
|
||||
const yaw = Math.atan2(normalizedNormal.x, normalizedNormal.z);
|
||||
const pitch = Math.atan2(-normalizedNormal.y, Math.sqrt(normalizedNormal.x ** 2 + normalizedNormal.z ** 2));
|
||||
const roll = Math.atan2(normalizedNormal.y, normalizedNormal.x);
|
||||
|
||||
// Convert to degrees if needed
|
||||
if (calibration === 0){
|
||||
yawC = yaw * (180 / Math.PI);
|
||||
pitchC = pitch * (180 / Math.PI);
|
||||
rollC = roll * (180 / Math.PI);
|
||||
calibration = 1;
|
||||
}
|
||||
yawDegrees = yaw * (180 / Math.PI) - yawC;
|
||||
pitchDegrees = pitch * (180 / Math.PI) - pitchC;
|
||||
rollDegrees = roll * (180 / Math.PI) - rollC;
|
||||
|
||||
|
||||
console.log("Yaw: ", yawDegrees, "Pitch: ", pitchDegrees, "Roll: ", rollDegrees);
|
||||
}
|
||||
|
||||
//draw on canvas
|
||||
for (const landmarks of results.faceLandmarks) {
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_TESSELATION, { color: "#C0C0C070", lineWidth: 1 });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYE, { color: "#FF3030" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_RIGHT_EYEBROW, { color: "#FF3030" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_LEFT_EYE, { color: "#30FF30" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_LEFT_EYEBROW, { color: "#30FF30" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_FACE_OVAL, { color: "#E0E0E0" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_LIPS, { color: "#E0E0E0" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_RIGHT_IRIS, { color: "#FF3030" });
|
||||
drawingUtils.drawConnectors(landmarks, FaceLandmarker.FACE_LANDMARKS_LEFT_IRIS, { color: "#30FF30" });
|
||||
}
|
||||
}
|
||||
drawBlendShapes(videoBlendShapes, results.faceBlendshapes);
|
||||
// Call this function again to keep predicting when the browser is ready.
|
||||
if (webcamRunning === true) {
|
||||
window.requestAnimationFrame(predictWebcam);
|
||||
}
|
||||
}
|
||||
|
||||
function drawBlendShapes(el, blendShapes) {
|
||||
if (!blendShapes.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
let mappedJson = blendShapes[0].categories.reduce((obj, item) => {
|
||||
obj[item.categoryName] = item.score;
|
||||
return obj;
|
||||
}, {});
|
||||
|
||||
mappedJson = applyLandmarksMapping(mappedJson)
|
||||
|
||||
|
||||
var inputElements = document.getElementsByClassName('switchmove');
|
||||
if(inputElements[0].checked){
|
||||
var send = {
|
||||
"type": "trackingmotion",
|
||||
"data": mappedJson,
|
||||
"yaw": yawDegrees,
|
||||
"pitch": pitchDegrees,
|
||||
"roll": rollDegrees
|
||||
}
|
||||
} else {
|
||||
var send = {
|
||||
"type": "tracking",
|
||||
"data": mappedJson,
|
||||
"yaw": yawDegrees,
|
||||
"pitch": pitchDegrees,
|
||||
"roll": rollDegrees
|
||||
}
|
||||
}
|
||||
|
||||
EventBridge.emitWebEvent(JSON.stringify(send));
|
||||
}
|
||||
|
||||
</script>
|
||||
|
||||
|
||||
</body>
|
||||
|
||||
</html>
|
Loading…
Reference in a new issue