mirror of
https://github.com/lubosz/overte.git
synced 2025-08-08 03:27:48 +02:00
Merge branch 'instancing' into instancing_fbx
This commit is contained in:
commit
e47078727c
19 changed files with 1012 additions and 328 deletions
Binary file not shown.
Binary file not shown.
|
@ -1544,103 +1544,292 @@
|
||||||
"type": "randomSwitchStateMachine"
|
"type": "randomSwitchStateMachine"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 21,
|
||||||
|
"loopFlag": false,
|
||||||
|
"startFrame": 1,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedReactionPointIntro",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 100,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 21,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedReactionPointLoop",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 134,
|
||||||
|
"loopFlag": false,
|
||||||
|
"mirrorFlag": false,
|
||||||
|
"startFrame": 100,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedReactionPointOutro",
|
||||||
|
"type": "clip"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"currentState": "seatedReactionPointIntro",
|
||||||
|
"randomSwitchTimeMax": 10,
|
||||||
|
"randomSwitchTimeMin": 1,
|
||||||
|
"states": [
|
||||||
|
{
|
||||||
|
"easingType": "easeInOutQuad",
|
||||||
|
"id": "seatedReactionPointIntro",
|
||||||
|
"interpDuration": 18,
|
||||||
|
"interpTarget": 18,
|
||||||
|
"interpType": "evaluateBoth",
|
||||||
|
"priority": 1,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "seatedReactionPointLoop",
|
||||||
|
"var": "seatedReactionPointIntroOnDone"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"easingType": "easeInOutQuad",
|
||||||
|
"id": "seatedReactionPointLoop",
|
||||||
|
"interpDuration": 18,
|
||||||
|
"interpTarget": 18,
|
||||||
|
"interpType": "evaluateBoth",
|
||||||
|
"priority": 0,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "seatedReactionPointOutro",
|
||||||
|
"var": "reactionPointDisabled"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"easingType": "easeInOutQuad",
|
||||||
|
"id": "seatedReactionPointOutro",
|
||||||
|
"interpDuration": 18,
|
||||||
|
"interpTarget": 18,
|
||||||
|
"interpType": "evaluateBoth",
|
||||||
|
"priority": 0,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "seatedReactionPointLoop",
|
||||||
|
"var": "reactionPointEnabled"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"triggerRandomSwitch": ""
|
||||||
|
},
|
||||||
|
"id": "seatedReactionPoint",
|
||||||
|
"type": "randomSwitchStateMachine"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"endFrame": 21,
|
"alpha": 0,
|
||||||
"loopFlag": false,
|
"alphaVar": "seatedPointBlendAlpha",
|
||||||
"startFrame": 1,
|
"blendType": "addAbsolute"
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
|
||||||
},
|
},
|
||||||
"id": "seatedReactionPointIntro",
|
"id": "seatedReactionPointBase",
|
||||||
"type": "clip"
|
"type": "blendLinear"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 11,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 11,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 30,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 30,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 50,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 50,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointUp",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 70,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 70,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointDown",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 90,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 90,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointUpLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 110,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 110,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointUpRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 130,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 130,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointDownLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 150,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 150,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointDownRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 3,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 3,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/sitting_emote_point_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "seatedPointCenter",
|
||||||
|
"type": "clip"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"endFrame": 100,
|
"alpha": [
|
||||||
"loopFlag": true,
|
0,
|
||||||
"startFrame": 21,
|
0,
|
||||||
"timeScale": 1,
|
0
|
||||||
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
],
|
||||||
|
"alphaVar": "pointAroundAlpha",
|
||||||
|
"centerId": "seatedPointCenter",
|
||||||
|
"downId": "seatedPointDown",
|
||||||
|
"downLeftId": "seatedPointDownLeft",
|
||||||
|
"downRightId": "seatedPointDownRight",
|
||||||
|
"leftId": "seatedPointLeft",
|
||||||
|
"rightId": "seatedPointRight",
|
||||||
|
"upId": "seatedPointUp",
|
||||||
|
"upLeftId": "seatedPointUpLeft",
|
||||||
|
"upRightId": "seatedPointUpRight"
|
||||||
},
|
},
|
||||||
"id": "seatedReactionPointLoop",
|
"id": "seatedPointAround",
|
||||||
"type": "clip"
|
"type": "blendDirectional"
|
||||||
},
|
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 134,
|
|
||||||
"loopFlag": false,
|
|
||||||
"mirrorFlag": false,
|
|
||||||
"startFrame": 100,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/sitting_emote_point_all.fbx"
|
|
||||||
},
|
|
||||||
"id": "seatedReactionPointOutro",
|
|
||||||
"type": "clip"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"currentState": "seatedReactionPointIntro",
|
"alpha": 0,
|
||||||
"randomSwitchTimeMax": 10,
|
"alphaVar": "pointBlendAlpha",
|
||||||
"randomSwitchTimeMin": 1,
|
"blendType": "addAbsolute"
|
||||||
"states": [
|
|
||||||
{
|
|
||||||
"easingType": "easeInOutQuad",
|
|
||||||
"id": "seatedReactionPointIntro",
|
|
||||||
"interpDuration": 18,
|
|
||||||
"interpTarget": 18,
|
|
||||||
"interpType": "evaluateBoth",
|
|
||||||
"priority": 1,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "seatedReactionPointLoop",
|
|
||||||
"var": "seatedReactionPointIntroOnDone"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"easingType": "easeInOutQuad",
|
|
||||||
"id": "seatedReactionPointLoop",
|
|
||||||
"interpDuration": 18,
|
|
||||||
"interpTarget": 18,
|
|
||||||
"interpType": "evaluateBoth",
|
|
||||||
"priority": 0,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "seatedReactionPointOutro",
|
|
||||||
"var": "reactionPointDisabled"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"easingType": "easeInOutQuad",
|
|
||||||
"id": "seatedReactionPointOutro",
|
|
||||||
"interpDuration": 18,
|
|
||||||
"interpTarget": 18,
|
|
||||||
"interpType": "evaluateBoth",
|
|
||||||
"priority": 0,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "seatedReactionPointLoop",
|
|
||||||
"var": "reactionPointEnabled"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"triggerRandomSwitch": ""
|
|
||||||
},
|
},
|
||||||
"id": "seatedReactionPoint",
|
"id": "seatedReactionPoint",
|
||||||
"type": "randomSwitchStateMachine"
|
"type": "blendLinear"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -3531,97 +3720,275 @@
|
||||||
"children": [
|
"children": [
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 21,
|
||||||
|
"loopFlag": false,
|
||||||
|
"startFrame": 1,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "reactionPointIntro",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 100,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 21,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "reactionPointLoop",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"endFrame": 134,
|
||||||
|
"loopFlag": false,
|
||||||
|
"startFrame": 100,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
||||||
|
},
|
||||||
|
"id": "reactionPointOutro",
|
||||||
|
"type": "clip"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"endFrame": 21,
|
"currentState": "reactionPointIntro",
|
||||||
"loopFlag": false,
|
"randomSwitchTimeMax": 10,
|
||||||
"startFrame": 1,
|
"randomSwitchTimeMin": 1,
|
||||||
"timeScale": 1,
|
"states": [
|
||||||
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
{
|
||||||
|
"easingType": "easeInOutQuad",
|
||||||
|
"id": "reactionPointIntro",
|
||||||
|
"interpDuration": 1,
|
||||||
|
"interpTarget": 1,
|
||||||
|
"interpType": "evaluateBoth",
|
||||||
|
"priority": 1,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "reactionPointLoop",
|
||||||
|
"var": "reactionPointIntroOnDone"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "reactionPointLoop",
|
||||||
|
"interpDuration": 1,
|
||||||
|
"interpTarget": 1,
|
||||||
|
"priority": 0,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "reactionPointOutro",
|
||||||
|
"var": "reactionPointDisabled"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"easingType": "easeInOutQuad",
|
||||||
|
"id": "reactionPointOutro",
|
||||||
|
"interpDuration": 6,
|
||||||
|
"interpTarget": 6,
|
||||||
|
"interpType": "evaluateBoth",
|
||||||
|
"priority": 0,
|
||||||
|
"resume": false,
|
||||||
|
"transitions": [
|
||||||
|
{
|
||||||
|
"randomSwitchState": "reactionPointLoop",
|
||||||
|
"var": "reactionPointEnabled"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"triggerRandomSwitch": ""
|
||||||
},
|
},
|
||||||
"id": "reactionPointIntro",
|
"id": "reactionPoint",
|
||||||
"type": "clip"
|
"type": "randomSwitchStateMachine"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"children": [
|
"children": [
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 11,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 11,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 30,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 30,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 50,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 50,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointUp",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 70,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 70,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointDown",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 90,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 90,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointUpLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 110,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 110,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointUpRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 130,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 130,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointDownLeft",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 150,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 150,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointDownRight",
|
||||||
|
"type": "clip"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"children": [
|
||||||
|
],
|
||||||
|
"data": {
|
||||||
|
"baseFrame": 1,
|
||||||
|
"baseURL": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx",
|
||||||
|
"blendType": "addAbsolute",
|
||||||
|
"endFrame": 3,
|
||||||
|
"loopFlag": true,
|
||||||
|
"startFrame": 3,
|
||||||
|
"timeScale": 1,
|
||||||
|
"url": "qrc:///avatar/animations/emote_point01_aimoffsets.fbx"
|
||||||
|
},
|
||||||
|
"id": "idlePointCenter",
|
||||||
|
"type": "clip"
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"endFrame": 100,
|
"alpha": [
|
||||||
"loopFlag": true,
|
0,
|
||||||
"startFrame": 21,
|
0,
|
||||||
"timeScale": 1,
|
0
|
||||||
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
],
|
||||||
|
"alphaVar": "pointAroundAlpha",
|
||||||
|
"centerId": "idlePointCenter",
|
||||||
|
"downId": "idlePointDown",
|
||||||
|
"downLeftId": "idlePointDownLeft",
|
||||||
|
"downRightId": "idlePointDownRight",
|
||||||
|
"leftId": "idlePointLeft",
|
||||||
|
"rightId": "idlePointRight",
|
||||||
|
"upId": "idlePointUp",
|
||||||
|
"upLeftId": "idlePointUpLeft",
|
||||||
|
"upRightId": "idlePointUpRight"
|
||||||
},
|
},
|
||||||
"id": "reactionPointLoop",
|
"id": "idlePointAround",
|
||||||
"type": "clip"
|
"type": "blendDirectional"
|
||||||
},
|
|
||||||
{
|
|
||||||
"children": [
|
|
||||||
],
|
|
||||||
"data": {
|
|
||||||
"endFrame": 134,
|
|
||||||
"loopFlag": false,
|
|
||||||
"startFrame": 100,
|
|
||||||
"timeScale": 1,
|
|
||||||
"url": "qrc:///avatar/animations/emote_point01_all.fbx"
|
|
||||||
},
|
|
||||||
"id": "reactionPointOutro",
|
|
||||||
"type": "clip"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"currentState": "reactionPointIntro",
|
"alpha": 0,
|
||||||
"randomSwitchTimeMax": 10,
|
"alphaVar": "pointBlendAlpha",
|
||||||
"randomSwitchTimeMin": 1,
|
"blendType": "addAbsolute"
|
||||||
"states": [
|
|
||||||
{
|
|
||||||
"easingType": "easeInOutQuad",
|
|
||||||
"id": "reactionPointIntro",
|
|
||||||
"interpDuration": 1,
|
|
||||||
"interpTarget": 1,
|
|
||||||
"interpType": "evaluateBoth",
|
|
||||||
"priority": 1,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "reactionPointLoop",
|
|
||||||
"var": "reactionPointIntroOnDone"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "reactionPointLoop",
|
|
||||||
"interpDuration": 1,
|
|
||||||
"interpTarget": 1,
|
|
||||||
"priority": 0,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "reactionPointOutro",
|
|
||||||
"var": "reactionPointDisabled"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"easingType": "easeInOutQuad",
|
|
||||||
"id": "reactionPointOutro",
|
|
||||||
"interpDuration": 6,
|
|
||||||
"interpTarget": 6,
|
|
||||||
"interpType": "evaluateBoth",
|
|
||||||
"priority": 0,
|
|
||||||
"resume": false,
|
|
||||||
"transitions": [
|
|
||||||
{
|
|
||||||
"randomSwitchState": "reactionPointLoop",
|
|
||||||
"var": "reactionPointEnabled"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"triggerRandomSwitch": ""
|
|
||||||
},
|
},
|
||||||
"id": "reactionPoint",
|
"id": "reactionPoint",
|
||||||
"type": "randomSwitchStateMachine"
|
"type": "blendLinear"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
|
@ -5683,16 +6050,16 @@
|
||||||
"upLeftId": "lookUpLeft",
|
"upLeftId": "lookUpLeft",
|
||||||
"upRightId": "lookUpRight"
|
"upRightId": "lookUpRight"
|
||||||
},
|
},
|
||||||
"id": "lookAround",
|
"id": "lookAroundBlend",
|
||||||
"type": "blendDirectional"
|
"type": "blendDirectional"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"data": {
|
"data": {
|
||||||
"alpha": 0,
|
"alpha": 0,
|
||||||
"alphaVar": "additiveBlendAlpha",
|
"alphaVar": "lookBlendAlpha",
|
||||||
"blendType": "addAbsolute"
|
"blendType": "addAbsolute"
|
||||||
},
|
},
|
||||||
"id": "additiveBlend",
|
"id": "lookAround",
|
||||||
"type": "blendLinear"
|
"type": "blendLinear"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|
|
@ -3,8 +3,10 @@
|
||||||
"channels": [
|
"channels": [
|
||||||
{ "from": "Keyboard.A", "when": ["Keyboard.RightMouseButton", "!Keyboard.Control"], "to": "Actions.LATERAL_LEFT" },
|
{ "from": "Keyboard.A", "when": ["Keyboard.RightMouseButton", "!Keyboard.Control"], "to": "Actions.LATERAL_LEFT" },
|
||||||
{ "from": "Keyboard.D", "when": ["Keyboard.RightMouseButton", "!Keyboard.Control"], "to": "Actions.LATERAL_RIGHT" },
|
{ "from": "Keyboard.D", "when": ["Keyboard.RightMouseButton", "!Keyboard.Control"], "to": "Actions.LATERAL_RIGHT" },
|
||||||
{ "from": "Keyboard.E", "when": ["!Application.CameraSelfie", "!Application.CameraLookAt", "!Keyboard.Control"], "to": "Actions.LATERAL_RIGHT" },
|
{ "from": "Keyboard.E", "when": ["!Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LATERAL_RIGHT" },
|
||||||
{ "from": "Keyboard.Q", "when": ["!Application.CameraSelfie", "!Application.CameraLookAt", "!Keyboard.Control"], "to": "Actions.LATERAL_LEFT" },
|
{ "from": "Keyboard.Q", "when": ["!Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LATERAL_LEFT" },
|
||||||
|
{ "from": "Keyboard.Q", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LATERAL_RIGHT" },
|
||||||
|
{ "from": "Keyboard.E", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LATERAL_LEFT" },
|
||||||
{ "from": "Keyboard.T", "when": "!Keyboard.Control", "to": "Actions.TogglePushToTalk" },
|
{ "from": "Keyboard.T", "when": "!Keyboard.Control", "to": "Actions.TogglePushToTalk" },
|
||||||
|
|
||||||
{ "comment" : "Mouse turn need to be small continuous increments",
|
{ "comment" : "Mouse turn need to be small continuous increments",
|
||||||
|
@ -122,18 +124,18 @@
|
||||||
},
|
},
|
||||||
|
|
||||||
{ "from": { "makeAxis" : [
|
{ "from": { "makeAxis" : [
|
||||||
["Keyboard.Q"],
|
["Keyboard.A"],
|
||||||
["Keyboard.E"]
|
["Keyboard.D"]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"when": ["Application.CameraLookAt", "!Keyboard.Control"],
|
"when": ["Application.CameraLookAt", "!Keyboard.Control"],
|
||||||
"to": "Actions.Yaw"
|
"to": "Actions.Yaw"
|
||||||
},
|
},
|
||||||
|
|
||||||
{ "from": { "makeAxis" : [
|
{ "from": { "makeAxis" : [
|
||||||
["Keyboard.E"],
|
["Keyboard.A"],
|
||||||
["Keyboard.Q"]
|
["Keyboard.D"]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"when": ["Application.CameraSelfie", "!Keyboard.Control"],
|
"when": ["Application.CameraSelfie", "!Keyboard.Control"],
|
||||||
"to": "Actions.Yaw"
|
"to": "Actions.Yaw"
|
||||||
|
@ -215,10 +217,6 @@
|
||||||
{ "from": "Keyboard.S", "when": ["!Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.S", "when": ["!Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.S", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_FORWARD" },
|
{ "from": "Keyboard.S", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||||
{ "from": "Keyboard.W", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_BACKWARD" },
|
{ "from": "Keyboard.W", "when": ["Application.CameraSelfie", "!Keyboard.Control"], "to": "Actions.LONGITUDINAL_BACKWARD" },
|
||||||
{ "from": "Keyboard.A", "when": "Application.CameraLookAt", "to": "Actions.LATERAL_LEFT" },
|
|
||||||
{ "from": "Keyboard.D", "when": "Application.CameraLookAt", "to": "Actions.LATERAL_RIGHT" },
|
|
||||||
{ "from": "Keyboard.A", "when": "Application.CameraSelfie", "to": "Actions.LATERAL_RIGHT" },
|
|
||||||
{ "from": "Keyboard.D", "when": "Application.CameraSelfie", "to": "Actions.LATERAL_LEFT" },
|
|
||||||
{ "from": "Keyboard.Shift", "when": ["!Keyboard.Left", "!Keyboard.Right"], "to": "Actions.SPRINT" },
|
{ "from": "Keyboard.Shift", "when": ["!Keyboard.Left", "!Keyboard.Right"], "to": "Actions.SPRINT" },
|
||||||
{ "from": "Keyboard.C", "when": "!Keyboard.Control", "to": "Actions.VERTICAL_DOWN" },
|
{ "from": "Keyboard.C", "when": "!Keyboard.Control", "to": "Actions.VERTICAL_DOWN" },
|
||||||
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },
|
{ "from": "Keyboard.Left", "when": "Keyboard.Shift", "to": "Actions.LATERAL_LEFT" },
|
||||||
|
|
|
@ -99,10 +99,16 @@ static const QString USER_RECENTER_MODEL_FORCE_STAND = QStringLiteral("ForceStan
|
||||||
static const QString USER_RECENTER_MODEL_AUTO = QStringLiteral("Auto");
|
static const QString USER_RECENTER_MODEL_AUTO = QStringLiteral("Auto");
|
||||||
static const QString USER_RECENTER_MODEL_DISABLE_HMD_LEAN = QStringLiteral("DisableHMDLean");
|
static const QString USER_RECENTER_MODEL_DISABLE_HMD_LEAN = QStringLiteral("DisableHMDLean");
|
||||||
|
|
||||||
const QString HEAD_BLENDING_NAME = "lookAroundAlpha";
|
const QString HEAD_BLEND_DIRECTIONAL_ALPHA_NAME = "lookAroundAlpha";
|
||||||
const QString HEAD_ALPHA_NAME = "additiveBlendAlpha";
|
const QString HEAD_BLEND_LINEAR_ALPHA_NAME = "lookBlendAlpha";
|
||||||
const float HEAD_ALPHA_BLENDING = 1.0f;
|
const float HEAD_ALPHA_BLENDING = 1.0f;
|
||||||
|
|
||||||
|
const QString POINT_REACTION_NAME = "point";
|
||||||
|
const QString POINT_BLEND_DIRECTIONAL_ALPHA_NAME = "pointAroundAlpha";
|
||||||
|
const QString POINT_BLEND_LINEAR_ALPHA_NAME = "pointBlendAlpha";
|
||||||
|
const QString POINT_REF_JOINT_NAME = "RightShoulder";
|
||||||
|
const float POINT_ALPHA_BLENDING = 1.0f;
|
||||||
|
|
||||||
MyAvatar::SitStandModelType stringToUserRecenterModel(const QString& str) {
|
MyAvatar::SitStandModelType stringToUserRecenterModel(const QString& str) {
|
||||||
if (str == USER_RECENTER_MODEL_FORCE_SIT) {
|
if (str == USER_RECENTER_MODEL_FORCE_SIT) {
|
||||||
return MyAvatar::ForceSit;
|
return MyAvatar::ForceSit;
|
||||||
|
@ -948,13 +954,16 @@ void MyAvatar::simulate(float deltaTime, bool inView) {
|
||||||
qCDebug(interfaceapp) << "MyAvatar::simulate headPosition is NaN";
|
qCDebug(interfaceapp) << "MyAvatar::simulate headPosition is NaN";
|
||||||
headPosition = glm::vec3(0.0f);
|
headPosition = glm::vec3(0.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
head->setPosition(headPosition);
|
head->setPosition(headPosition);
|
||||||
head->setScale(getModelScale());
|
head->setScale(getModelScale());
|
||||||
head->simulate(deltaTime);
|
head->simulate(deltaTime);
|
||||||
CameraMode mode = qApp->getCamera().getMode();
|
CameraMode mode = qApp->getCamera().getMode();
|
||||||
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
if (_scriptControlsHeadLookAt || mode == CAMERA_MODE_LOOK_AT || mode == CAMERA_MODE_SELFIE) {
|
||||||
updateHeadLookAt(deltaTime);
|
if (!_pointAtActive || !_isPointTargetValid) {
|
||||||
|
updateHeadLookAt(deltaTime);
|
||||||
|
} else {
|
||||||
|
resetHeadLookAt();
|
||||||
|
}
|
||||||
} else if (_headLookAtActive){
|
} else if (_headLookAtActive){
|
||||||
resetHeadLookAt();
|
resetHeadLookAt();
|
||||||
_headLookAtActive = false;
|
_headLookAtActive = false;
|
||||||
|
@ -6113,6 +6122,10 @@ bool MyAvatar::beginReaction(QString reactionName) {
|
||||||
if (reactionIndex >= 0 && reactionIndex < (int)NUM_AVATAR_BEGIN_END_REACTIONS) {
|
if (reactionIndex >= 0 && reactionIndex < (int)NUM_AVATAR_BEGIN_END_REACTIONS) {
|
||||||
std::lock_guard<std::mutex> guard(_reactionLock);
|
std::lock_guard<std::mutex> guard(_reactionLock);
|
||||||
_reactionEnabledRefCounts[reactionIndex]++;
|
_reactionEnabledRefCounts[reactionIndex]++;
|
||||||
|
if (reactionName == POINT_REACTION_NAME) {
|
||||||
|
_pointAtActive = true;
|
||||||
|
_isPointTargetValid = true;
|
||||||
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -6122,13 +6135,18 @@ bool MyAvatar::endReaction(QString reactionName) {
|
||||||
int reactionIndex = beginEndReactionNameToIndex(reactionName);
|
int reactionIndex = beginEndReactionNameToIndex(reactionName);
|
||||||
if (reactionIndex >= 0 && reactionIndex < (int)NUM_AVATAR_BEGIN_END_REACTIONS) {
|
if (reactionIndex >= 0 && reactionIndex < (int)NUM_AVATAR_BEGIN_END_REACTIONS) {
|
||||||
std::lock_guard<std::mutex> guard(_reactionLock);
|
std::lock_guard<std::mutex> guard(_reactionLock);
|
||||||
|
bool wasReactionActive = true;
|
||||||
if (_reactionEnabledRefCounts[reactionIndex] > 0) {
|
if (_reactionEnabledRefCounts[reactionIndex] > 0) {
|
||||||
_reactionEnabledRefCounts[reactionIndex]--;
|
_reactionEnabledRefCounts[reactionIndex]--;
|
||||||
return true;
|
wasReactionActive = true;
|
||||||
} else {
|
} else {
|
||||||
_reactionEnabledRefCounts[reactionIndex] = 0;
|
_reactionEnabledRefCounts[reactionIndex] = 0;
|
||||||
return false;
|
wasReactionActive = false;
|
||||||
}
|
}
|
||||||
|
if (reactionName == POINT_REACTION_NAME) {
|
||||||
|
_pointAtActive = _reactionEnabledRefCounts[reactionIndex] > 0;
|
||||||
|
}
|
||||||
|
return wasReactionActive;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -6139,10 +6157,13 @@ void MyAvatar::updateRigControllerParameters(Rig::ControllerParameters& params)
|
||||||
for (int i = 0; i < TRIGGER_REACTION_NAMES.size(); i++) {
|
for (int i = 0; i < TRIGGER_REACTION_NAMES.size(); i++) {
|
||||||
params.reactionTriggers[i] = _reactionTriggers[i];
|
params.reactionTriggers[i] = _reactionTriggers[i];
|
||||||
}
|
}
|
||||||
|
int pointReactionIndex = beginEndReactionNameToIndex("point");
|
||||||
for (int i = 0; i < BEGIN_END_REACTION_NAMES.size(); i++) {
|
for (int i = 0; i < BEGIN_END_REACTION_NAMES.size(); i++) {
|
||||||
// copy current state into params.
|
// copy current state into params.
|
||||||
params.reactionEnabledFlags[i] = _reactionEnabledRefCounts[i] > 0;
|
params.reactionEnabledFlags[i] = _reactionEnabledRefCounts[i] > 0;
|
||||||
|
if (params.reactionEnabledFlags[i] && i == pointReactionIndex) {
|
||||||
|
params.reactionEnabledFlags[i] = _isPointTargetValid;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (int i = 0; i < TRIGGER_REACTION_NAMES.size(); i++) {
|
for (int i = 0; i < TRIGGER_REACTION_NAMES.size(); i++) {
|
||||||
|
@ -6668,10 +6689,42 @@ void MyAvatar::updateLookAtPosition(FaceTracker* faceTracker, Camera& myCamera)
|
||||||
getHead()->setLookAtPosition(lookAtSpot);
|
getHead()->setLookAtPosition(lookAtSpot);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
glm::vec3 MyAvatar::aimToBlendValues(const glm::vec3& aimVector, const glm::quat& frameOrientation) {
|
||||||
|
// This method computes the values for the directional blending animation node
|
||||||
|
|
||||||
|
glm::vec3 uVector = glm::normalize(frameOrientation * Vectors::UNIT_X);
|
||||||
|
glm::vec3 vVector = glm::normalize(frameOrientation * Vectors::UNIT_Y);
|
||||||
|
|
||||||
|
glm::vec3 aimDirection;
|
||||||
|
if (glm::length(aimVector) > EPSILON) {
|
||||||
|
aimDirection = glm::normalize(aimVector);
|
||||||
|
} else {
|
||||||
|
// aim vector is zero
|
||||||
|
return glm::vec3();
|
||||||
|
}
|
||||||
|
|
||||||
|
float xDot = glm::dot(uVector, aimDirection);
|
||||||
|
float yDot = glm::dot(vVector, aimDirection);
|
||||||
|
|
||||||
|
// Make sure dot products are in range to avoid acosf returning NaN
|
||||||
|
xDot = glm::min(glm::max(xDot, -1.0f), 1.0f);
|
||||||
|
yDot = glm::min(glm::max(yDot, -1.0f), 1.0f);
|
||||||
|
|
||||||
|
float xAngle = acosf(xDot);
|
||||||
|
float yAngle = acosf(yDot);
|
||||||
|
|
||||||
|
// xBlend and yBlend are the values from -1.0 to 1.0 that set the directional blending.
|
||||||
|
// We compute them using the angles (0 to PI/2) => (1.0 to 0.0) and (PI/2 to PI) => (0.0 to -1.0)
|
||||||
|
float xBlend = -(xAngle - 0.5f * PI) / (0.5f * PI);
|
||||||
|
float yBlend = -(yAngle - 0.5f * PI) / (0.5f * PI);
|
||||||
|
glm::vec3 blendValues = glm::vec3(xBlend, yBlend, 0.0f);
|
||||||
|
return blendValues;
|
||||||
|
}
|
||||||
|
|
||||||
void MyAvatar::resetHeadLookAt() {
|
void MyAvatar::resetHeadLookAt() {
|
||||||
if (_skeletonModelLoaded) {
|
if (_skeletonModelLoaded) {
|
||||||
_skeletonModel->getRig().setDirectionalBlending(HEAD_BLENDING_NAME, glm::vec3(),
|
_skeletonModel->getRig().setDirectionalBlending(HEAD_BLEND_DIRECTIONAL_ALPHA_NAME, glm::vec3(),
|
||||||
HEAD_ALPHA_NAME, HEAD_ALPHA_BLENDING);
|
HEAD_BLEND_LINEAR_ALPHA_NAME, HEAD_ALPHA_BLENDING);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6687,39 +6740,10 @@ void MyAvatar::resetLookAtRotation(const glm::vec3& avatarPosition, const glm::q
|
||||||
void MyAvatar::updateHeadLookAt(float deltaTime) {
|
void MyAvatar::updateHeadLookAt(float deltaTime) {
|
||||||
if (_skeletonModelLoaded) {
|
if (_skeletonModelLoaded) {
|
||||||
glm::vec3 lookAtTarget = _scriptControlsHeadLookAt ? _lookAtScriptTarget : _lookAtCameraTarget;
|
glm::vec3 lookAtTarget = _scriptControlsHeadLookAt ? _lookAtScriptTarget : _lookAtCameraTarget;
|
||||||
glm::vec3 avatarXVector = glm::normalize(getWorldOrientation() * Vectors::UNIT_X);
|
glm::vec3 aimVector = lookAtTarget - getDefaultEyePosition();
|
||||||
glm::vec3 avatarYVector = glm::normalize(getWorldOrientation() * Vectors::UNIT_Y);
|
glm::vec3 lookAtBlend = MyAvatar::aimToBlendValues(aimVector, getWorldOrientation());
|
||||||
glm::vec3 avatarZVector = glm::normalize(getWorldOrientation() * Vectors::UNIT_Z);
|
_skeletonModel->getRig().setDirectionalBlending(HEAD_BLEND_DIRECTIONAL_ALPHA_NAME, lookAtBlend,
|
||||||
glm::vec3 headToTargetVector = lookAtTarget - getDefaultEyePosition();
|
HEAD_BLEND_LINEAR_ALPHA_NAME, HEAD_ALPHA_BLENDING);
|
||||||
if (glm::length(headToTargetVector) > EPSILON) {
|
|
||||||
headToTargetVector = glm::normalize(headToTargetVector);
|
|
||||||
} else {
|
|
||||||
// The target point is the avatar head
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
float xDot = glm::dot(avatarXVector, headToTargetVector);
|
|
||||||
float yDot = glm::dot(avatarYVector, headToTargetVector);
|
|
||||||
float zDot = glm::dot(avatarZVector, headToTargetVector);
|
|
||||||
// Force the head to look at one of the sides when the look at point is behind the avatar
|
|
||||||
if (zDot > 0.0f && xDot != 0.0f) {
|
|
||||||
//xDot /= fabsf(xDot);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Make sure dot products are in range to avoid acosf returning NaN
|
|
||||||
xDot = glm::min(glm::max(xDot, -1.0f), 1.0f);
|
|
||||||
yDot = glm::min(glm::max(yDot, -1.0f), 1.0f);
|
|
||||||
|
|
||||||
float xAngle = acosf(xDot);
|
|
||||||
float yAngle = acosf(yDot);
|
|
||||||
|
|
||||||
// xBlend and yBlend are the values from -1.0 to 1.0 that set the directional blending.
|
|
||||||
// We compute them using the angles (0 to PI/2) => (1.0 to 0.0) and (PI/2 to PI) => (0.0 to -1.0)
|
|
||||||
float xBlend = -(xAngle - 0.5f * PI) / (0.5f * PI);
|
|
||||||
float yBlend = -(yAngle - 0.5f * PI) / (0.5f * PI);
|
|
||||||
glm::vec3 lookAtBlend = glm::vec3(xBlend, yBlend, 0.0f);
|
|
||||||
_skeletonModel->getRig().setDirectionalBlending(HEAD_BLENDING_NAME, lookAtBlend,
|
|
||||||
HEAD_ALPHA_NAME, HEAD_ALPHA_BLENDING);
|
|
||||||
|
|
||||||
if (_scriptControlsHeadLookAt) {
|
if (_scriptControlsHeadLookAt) {
|
||||||
_scriptHeadControlTimer += deltaTime;
|
_scriptHeadControlTimer += deltaTime;
|
||||||
|
@ -6743,3 +6767,30 @@ void MyAvatar::setHeadLookAt(const glm::vec3& lookAtTarget) {
|
||||||
_scriptHeadControlTimer = 0.0f;
|
_scriptHeadControlTimer = 0.0f;
|
||||||
_lookAtScriptTarget = lookAtTarget;
|
_lookAtScriptTarget = lookAtTarget;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool MyAvatar::setPointAt(const glm::vec3& pointAtTarget) {
|
||||||
|
if (QThread::currentThread() != thread()) {
|
||||||
|
bool result = false;
|
||||||
|
BLOCKING_INVOKE_METHOD(this, "setPointAt", Q_RETURN_ARG(bool, result),
|
||||||
|
Q_ARG(const glm::vec3&, pointAtTarget));
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
if (_skeletonModelLoaded && _pointAtActive) {
|
||||||
|
glm::vec3 aimVector = pointAtTarget - getJointPosition(POINT_REF_JOINT_NAME);
|
||||||
|
_isPointTargetValid = glm::dot(aimVector, getWorldOrientation() * Vectors::FRONT) > 0.0f;
|
||||||
|
if (_isPointTargetValid) {
|
||||||
|
glm::vec3 pointAtBlend = MyAvatar::aimToBlendValues(aimVector, getWorldOrientation());
|
||||||
|
_skeletonModel->getRig().setDirectionalBlending(POINT_BLEND_DIRECTIONAL_ALPHA_NAME, pointAtBlend,
|
||||||
|
POINT_BLEND_LINEAR_ALPHA_NAME, POINT_ALPHA_BLENDING);
|
||||||
|
}
|
||||||
|
return _isPointTargetValid;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void MyAvatar::resetPointAt() {
|
||||||
|
if (_skeletonModelLoaded) {
|
||||||
|
_skeletonModel->getRig().setDirectionalBlending(POINT_BLEND_DIRECTIONAL_ALPHA_NAME, glm::vec3(),
|
||||||
|
POINT_BLEND_LINEAR_ALPHA_NAME, POINT_ALPHA_BLENDING);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1765,6 +1765,16 @@ public:
|
||||||
*/
|
*/
|
||||||
Q_INVOKABLE glm::vec3 getHeadLookAt() { return _lookAtCameraTarget; }
|
Q_INVOKABLE glm::vec3 getHeadLookAt() { return _lookAtCameraTarget; }
|
||||||
|
|
||||||
|
/**jsdoc
|
||||||
|
* Aims the pointing directional blending towards the provided target point.
|
||||||
|
* The "point" reaction should be triggered before using this method.
|
||||||
|
* <code>MyAvatar.beginReaction("point")</code>
|
||||||
|
* Returns <code>true</code> if the target point lays in front of the avatar.
|
||||||
|
* @function MyAvatar.setPointAt
|
||||||
|
* @param {Vec3} pointAtTarget - The target point in world coordinates.
|
||||||
|
*/
|
||||||
|
Q_INVOKABLE bool setPointAt(const glm::vec3& pointAtTarget);
|
||||||
|
|
||||||
glm::quat getLookAtRotation() { return _lookAtYaw * _lookAtPitch; }
|
glm::quat getLookAtRotation() { return _lookAtYaw * _lookAtPitch; }
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
|
@ -2653,6 +2663,8 @@ private:
|
||||||
bool _shouldTurnToFaceCamera { false };
|
bool _shouldTurnToFaceCamera { false };
|
||||||
bool _scriptControlsHeadLookAt { false };
|
bool _scriptControlsHeadLookAt { false };
|
||||||
float _scriptHeadControlTimer { 0.0f };
|
float _scriptHeadControlTimer { 0.0f };
|
||||||
|
bool _pointAtActive { false };
|
||||||
|
bool _isPointTargetValid { true };
|
||||||
|
|
||||||
Setting::Handle<float> _realWorldFieldOfView;
|
Setting::Handle<float> _realWorldFieldOfView;
|
||||||
Setting::Handle<bool> _useAdvancedMovementControls;
|
Setting::Handle<bool> _useAdvancedMovementControls;
|
||||||
|
@ -2681,6 +2693,8 @@ private:
|
||||||
void updateHeadLookAt(float deltaTime);
|
void updateHeadLookAt(float deltaTime);
|
||||||
void resetHeadLookAt();
|
void resetHeadLookAt();
|
||||||
void resetLookAtRotation(const glm::vec3& avatarPosition, const glm::quat& avatarOrientation);
|
void resetLookAtRotation(const glm::vec3& avatarPosition, const glm::quat& avatarOrientation);
|
||||||
|
void resetPointAt();
|
||||||
|
static glm::vec3 aimToBlendValues(const glm::vec3& aimVector, const glm::quat& frameOrientation);
|
||||||
|
|
||||||
// Avatar Preferences
|
// Avatar Preferences
|
||||||
QUrl _fullAvatarURLFromPreferences;
|
QUrl _fullAvatarURLFromPreferences;
|
||||||
|
|
|
@ -282,9 +282,9 @@ void AudioDeviceList::onDevicesChanged(const QList<HifiAudioDeviceInfo>& devices
|
||||||
|
|
||||||
if (deviceInfo.isDefault()) {
|
if (deviceInfo.isDefault()) {
|
||||||
if (deviceInfo.getMode() == QAudio::AudioInput) {
|
if (deviceInfo.getMode() == QAudio::AudioInput) {
|
||||||
device.display = "Default microphone (recommended)";
|
device.display = "Computer's default microphone (recommended)";
|
||||||
} else {
|
} else {
|
||||||
device.display = "Default audio (recommended)";
|
device.display = "Computer's default audio (recommended)";
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
device.display = device.info.deviceName()
|
device.display = device.info.deviceName()
|
||||||
|
|
|
@ -359,7 +359,7 @@ protected:
|
||||||
A,
|
A,
|
||||||
B
|
B
|
||||||
};
|
};
|
||||||
NetworkAnimState() : clipNodeEnum(NetworkAnimState::None) {}
|
NetworkAnimState() : clipNodeEnum(NetworkAnimState::None), fps(30.0f), loop(false), firstFrame(0.0f), lastFrame(0.0f), blendTime(FLT_MAX) {}
|
||||||
NetworkAnimState(ClipNodeEnum clipNodeEnumIn, const QString& urlIn, float fpsIn, bool loopIn, float firstFrameIn, float lastFrameIn) :
|
NetworkAnimState(ClipNodeEnum clipNodeEnumIn, const QString& urlIn, float fpsIn, bool loopIn, float firstFrameIn, float lastFrameIn) :
|
||||||
clipNodeEnum(clipNodeEnumIn), url(urlIn), fps(fpsIn), loop(loopIn), firstFrame(firstFrameIn), lastFrame(lastFrameIn) {}
|
clipNodeEnum(clipNodeEnumIn), url(urlIn), fps(fpsIn), loop(loopIn), firstFrame(firstFrameIn), lastFrame(lastFrameIn) {}
|
||||||
|
|
||||||
|
|
|
@ -494,7 +494,9 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
||||||
waveInGetDevCaps(WAVE_MAPPER, &wic, sizeof(wic));
|
waveInGetDevCaps(WAVE_MAPPER, &wic, sizeof(wic));
|
||||||
//Use the received manufacturer id to get the device's real name
|
//Use the received manufacturer id to get the device's real name
|
||||||
waveInGetDevCaps(wic.wMid, &wic, sizeof(wic));
|
waveInGetDevCaps(wic.wMid, &wic, sizeof(wic));
|
||||||
|
#if !defined(NDEBUG)
|
||||||
qCDebug(audioclient) << "input device:" << wic.szPname;
|
qCDebug(audioclient) << "input device:" << wic.szPname;
|
||||||
|
#endif
|
||||||
deviceName = wic.szPname;
|
deviceName = wic.szPname;
|
||||||
} else {
|
} else {
|
||||||
WAVEOUTCAPS woc;
|
WAVEOUTCAPS woc;
|
||||||
|
@ -502,7 +504,9 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
||||||
waveOutGetDevCaps(WAVE_MAPPER, &woc, sizeof(woc));
|
waveOutGetDevCaps(WAVE_MAPPER, &woc, sizeof(woc));
|
||||||
//Use the received manufacturer id to get the device's real name
|
//Use the received manufacturer id to get the device's real name
|
||||||
waveOutGetDevCaps(woc.wMid, &woc, sizeof(woc));
|
waveOutGetDevCaps(woc.wMid, &woc, sizeof(woc));
|
||||||
|
#if !defined(NDEBUG)
|
||||||
qCDebug(audioclient) << "output device:" << woc.szPname;
|
qCDebug(audioclient) << "output device:" << woc.szPname;
|
||||||
|
#endif
|
||||||
deviceName = woc.szPname;
|
deviceName = woc.szPname;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -532,10 +536,10 @@ HifiAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
#if !defined(NDEBUG)
|
||||||
qCDebug(audioclient) << "defaultAudioDeviceForMode mode: " << (mode == QAudio::AudioOutput ? "Output" : "Input")
|
qCDebug(audioclient) << "defaultAudioDeviceForMode mode: " << (mode == QAudio::AudioOutput ? "Output" : "Input")
|
||||||
<< " [" << deviceName << "] [" << foundDevice.deviceName() << "]";
|
<< " [" << deviceName << "] [" << foundDevice.deviceName() << "]";
|
||||||
|
#endif
|
||||||
return foundDevice;
|
return foundDevice;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
|
@ -133,7 +133,21 @@ void Avatar::setShowNamesAboveHeads(bool show) {
|
||||||
showNamesAboveHeads = show;
|
showNamesAboveHeads = show;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static const char* avatarTransitStatusToStringMap[] = {
|
||||||
|
"IDLE",
|
||||||
|
"STARTED",
|
||||||
|
"PRE_TRANSIT",
|
||||||
|
"START_TRANSIT",
|
||||||
|
"TRANSITING",
|
||||||
|
"END_TRANSIT",
|
||||||
|
"POST_TRANSIT",
|
||||||
|
"ENDED",
|
||||||
|
"ABORT_TRANSIT"
|
||||||
|
};
|
||||||
|
|
||||||
AvatarTransit::Status AvatarTransit::update(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
|
AvatarTransit::Status AvatarTransit::update(float deltaTime, const glm::vec3& avatarPosition, const AvatarTransit::TransitConfig& config) {
|
||||||
|
AvatarTransit::Status previousStatus = _status;
|
||||||
|
|
||||||
float oneFrameDistance = _isActive ? glm::length(avatarPosition - _endPosition) : glm::length(avatarPosition - _lastPosition);
|
float oneFrameDistance = _isActive ? glm::length(avatarPosition - _endPosition) : glm::length(avatarPosition - _lastPosition);
|
||||||
if (oneFrameDistance > (config._minTriggerDistance * _scale)) {
|
if (oneFrameDistance > (config._minTriggerDistance * _scale)) {
|
||||||
if (oneFrameDistance < (config._maxTriggerDistance * _scale)) {
|
if (oneFrameDistance < (config._maxTriggerDistance * _scale)) {
|
||||||
|
@ -150,6 +164,10 @@ AvatarTransit::Status AvatarTransit::update(float deltaTime, const glm::vec3& av
|
||||||
reset();
|
reset();
|
||||||
_status = Status::ENDED;
|
_status = Status::ENDED;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (previousStatus != _status) {
|
||||||
|
qDebug(avatars_renderer) << "AvatarTransit " << avatarTransitStatusToStringMap[(int)previousStatus] << "->" << avatarTransitStatusToStringMap[_status];
|
||||||
|
}
|
||||||
return _status;
|
return _status;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -13,6 +13,8 @@
|
||||||
|
|
||||||
#include "BakerTypes.h"
|
#include "BakerTypes.h"
|
||||||
#include "ModelMath.h"
|
#include "ModelMath.h"
|
||||||
|
#include "ReweightDeformersTask.h"
|
||||||
|
#include "CollectShapeVerticesTask.h"
|
||||||
#include "BuildGraphicsMeshTask.h"
|
#include "BuildGraphicsMeshTask.h"
|
||||||
#include "CalculateMeshNormalsTask.h"
|
#include "CalculateMeshNormalsTask.h"
|
||||||
#include "CalculateMeshTangentsTask.h"
|
#include "CalculateMeshTangentsTask.h"
|
||||||
|
@ -104,7 +106,7 @@ namespace baker {
|
||||||
|
|
||||||
class BuildModelTask {
|
class BuildModelTask {
|
||||||
public:
|
public:
|
||||||
using Input = VaryingSet6<hfm::Model::Pointer, std::vector<hfm::Mesh>, std::vector<hfm::Joint>, QMap<int, glm::quat>, QHash<QString, int>, FlowData>;
|
using Input = VaryingSet7<hfm::Model::Pointer, std::vector<hfm::Mesh>, std::vector<hfm::Joint>, QMap<int, glm::quat>, QHash<QString, int>, FlowData, std::vector<ShapeVertices>>;
|
||||||
using Output = hfm::Model::Pointer;
|
using Output = hfm::Model::Pointer;
|
||||||
using JobModel = Job::ModelIO<BuildModelTask, Input, Output>;
|
using JobModel = Job::ModelIO<BuildModelTask, Input, Output>;
|
||||||
|
|
||||||
|
@ -115,6 +117,9 @@ namespace baker {
|
||||||
hfmModelOut->jointRotationOffsets = input.get3();
|
hfmModelOut->jointRotationOffsets = input.get3();
|
||||||
hfmModelOut->jointIndices = input.get4();
|
hfmModelOut->jointIndices = input.get4();
|
||||||
hfmModelOut->flowData = input.get5();
|
hfmModelOut->flowData = input.get5();
|
||||||
|
hfmModelOut->shapeVertices = input.get6();
|
||||||
|
// These depend on the ShapeVertices
|
||||||
|
// TODO: Create a task for this rather than calculating it here
|
||||||
hfmModelOut->computeKdops();
|
hfmModelOut->computeKdops();
|
||||||
output = hfmModelOut;
|
output = hfmModelOut;
|
||||||
}
|
}
|
||||||
|
@ -151,8 +156,16 @@ namespace baker {
|
||||||
const auto calculateBlendshapeTangentsInputs = CalculateBlendshapeTangentsTask::Input(normalsPerBlendshapePerMesh, blendshapesPerMeshIn, meshesIn).asVarying();
|
const auto calculateBlendshapeTangentsInputs = CalculateBlendshapeTangentsTask::Input(normalsPerBlendshapePerMesh, blendshapesPerMeshIn, meshesIn).asVarying();
|
||||||
const auto tangentsPerBlendshapePerMesh = model.addJob<CalculateBlendshapeTangentsTask>("CalculateBlendshapeTangents", calculateBlendshapeTangentsInputs);
|
const auto tangentsPerBlendshapePerMesh = model.addJob<CalculateBlendshapeTangentsTask>("CalculateBlendshapeTangents", calculateBlendshapeTangentsInputs);
|
||||||
|
|
||||||
|
// Skinning weight calculations
|
||||||
|
// NOTE: Due to limitations in the current graphics::MeshPointer representation, the output list of ReweightedDeformers is per-mesh. An element is empty if there are no deformers for the mesh of the same index.
|
||||||
|
const auto reweightDeformersInputs = ReweightDeformersTask::Input(meshesIn, shapesIn, dynamicTransformsIn, deformersIn).asVarying();
|
||||||
|
const auto reweightedDeformers = model.addJob<ReweightDeformersTask>("ReweightDeformers", reweightDeformersInputs);
|
||||||
|
// Shape vertices are included/rejected based on skinning weight, and thus must use the reweighted deformers.
|
||||||
|
const auto collectShapeVerticesInputs = CollectShapeVerticesTask::Input(meshesIn, shapesIn, jointsIn, dynamicTransformsIn, reweightedDeformers).asVarying();
|
||||||
|
const auto shapeVerticesPerJoint = model.addJob<CollectShapeVerticesTask>("CollectShapeVertices", collectShapeVerticesInputs);
|
||||||
|
|
||||||
// Build the graphics::MeshPointer for each hfm::Mesh
|
// Build the graphics::MeshPointer for each hfm::Mesh
|
||||||
const auto buildGraphicsMeshInputs = BuildGraphicsMeshTask::Input(meshesIn, url, meshIndicesToModelNames, normalsPerMesh, tangentsPerMesh, shapesIn, dynamicTransformsIn, deformersIn).asVarying();
|
const auto buildGraphicsMeshInputs = BuildGraphicsMeshTask::Input(meshesIn, url, meshIndicesToModelNames, normalsPerMesh, tangentsPerMesh, shapesIn, dynamicTransformsIn, reweightedDeformers).asVarying();
|
||||||
const auto graphicsMeshes = model.addJob<BuildGraphicsMeshTask>("BuildGraphicsMesh", buildGraphicsMeshInputs);
|
const auto graphicsMeshes = model.addJob<BuildGraphicsMeshTask>("BuildGraphicsMesh", buildGraphicsMeshInputs);
|
||||||
|
|
||||||
// Prepare joint information
|
// Prepare joint information
|
||||||
|
@ -185,7 +198,7 @@ namespace baker {
|
||||||
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
|
const auto blendshapesPerMeshOut = model.addJob<BuildBlendshapesTask>("BuildBlendshapes", buildBlendshapesInputs);
|
||||||
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, normalsPerMesh, tangentsPerMesh, blendshapesPerMeshOut).asVarying();
|
const auto buildMeshesInputs = BuildMeshesTask::Input(meshesIn, graphicsMeshes, normalsPerMesh, tangentsPerMesh, blendshapesPerMeshOut).asVarying();
|
||||||
const auto meshesOut = model.addJob<BuildMeshesTask>("BuildMeshes", buildMeshesInputs);
|
const auto meshesOut = model.addJob<BuildMeshesTask>("BuildMeshes", buildMeshesInputs);
|
||||||
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut, jointsOut, jointRotationOffsets, jointIndices, flowData).asVarying();
|
const auto buildModelInputs = BuildModelTask::Input(hfmModelIn, meshesOut, jointsOut, jointRotationOffsets, jointIndices, flowData, shapeVerticesPerJoint).asVarying();
|
||||||
const auto hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
|
const auto hfmModelOut = model.addJob<BuildModelTask>("BuildModel", buildModelInputs);
|
||||||
|
|
||||||
output = Output(hfmModelOut, materialMapping, dracoMeshes, dracoErrors, materialList);
|
output = Output(hfmModelOut, materialMapping, dracoMeshes, dracoErrors, materialList);
|
||||||
|
|
|
@ -36,6 +36,14 @@ namespace baker {
|
||||||
using TangentsPerBlendshape = std::vector<std::vector<glm::vec3>>;
|
using TangentsPerBlendshape = std::vector<std::vector<glm::vec3>>;
|
||||||
|
|
||||||
using MeshIndicesToModelNames = QHash<int, QString>;
|
using MeshIndicesToModelNames = QHash<int, QString>;
|
||||||
|
|
||||||
|
class ReweightedDeformers {
|
||||||
|
public:
|
||||||
|
std::vector<uint16_t> indices;
|
||||||
|
std::vector<uint16_t> weights;
|
||||||
|
uint16_t weightsPerVertex { 0 };
|
||||||
|
bool trimmedToMatch { false };
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_BakerTypes_h
|
#endif // hifi_BakerTypes_h
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// BuildGraphicsMeshTask.h
|
// BuildGraphicsMeshTask.h
|
||||||
// model-baker/src/model-baker
|
// model-baker/src/model-baker
|
||||||
//
|
//
|
||||||
// Created by Sabrina Shanman on 2018/12/06.
|
// Created by Sabrina Shanman on 2019/09/16.
|
||||||
// Copyright 2018 High Fidelity, Inc.
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
//
|
//
|
||||||
// Distributed under the Apache License, Version 2.0.
|
// Distributed under the Apache License, Version 2.0.
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
@ -27,83 +27,7 @@ glm::vec3 normalizeDirForPacking(const glm::vec3& dir) {
|
||||||
return dir;
|
return dir;
|
||||||
}
|
}
|
||||||
|
|
||||||
class ReweightedDeformers {
|
void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphicsMeshPointer, const baker::MeshNormals& meshNormals, const baker::MeshTangents& meshTangentsIn, uint16_t numDeformerControllers, const baker::ReweightedDeformers reweightedDeformers) {
|
||||||
public:
|
|
||||||
std::vector<uint16_t> indices;
|
|
||||||
std::vector<uint16_t> weights;
|
|
||||||
bool trimmedToMatch { false };
|
|
||||||
};
|
|
||||||
|
|
||||||
ReweightedDeformers getReweightedDeformers(size_t numMeshVertices, const hfm::DynamicTransform* dynamicTransform, const std::vector<const hfm::Deformer*> deformers, const uint16_t weightsPerVertex) {
|
|
||||||
size_t numClusterIndices = numMeshVertices * weightsPerVertex;
|
|
||||||
ReweightedDeformers reweightedDeformers;
|
|
||||||
// TODO: Consider having a rootCluster property in the DynamicTransform rather than appending the root to the end of the cluster list.
|
|
||||||
reweightedDeformers.indices.resize(numClusterIndices, (uint16_t)(deformers.size() - 1));
|
|
||||||
reweightedDeformers.weights.resize(numClusterIndices, 0);
|
|
||||||
|
|
||||||
std::vector<float> weightAccumulators;
|
|
||||||
weightAccumulators.resize(numClusterIndices, 0.0f);
|
|
||||||
for (uint16_t i = 0; i < (uint16_t)deformers.size(); ++i) {
|
|
||||||
const hfm::Deformer& deformer = *deformers[i];
|
|
||||||
|
|
||||||
if (deformer.indices.size() != deformer.weights.size()) {
|
|
||||||
reweightedDeformers.trimmedToMatch = true;
|
|
||||||
}
|
|
||||||
size_t numIndicesOrWeights = std::min(deformer.indices.size(), deformer.weights.size());
|
|
||||||
for (size_t j = 0; j < numIndicesOrWeights; ++j) {
|
|
||||||
uint32_t index = deformer.indices[j];
|
|
||||||
float weight = deformer.weights[j];
|
|
||||||
|
|
||||||
// look for an unused slot in the weights vector
|
|
||||||
uint32_t weightIndex = index * weightsPerVertex;
|
|
||||||
uint32_t lowestIndex = -1;
|
|
||||||
float lowestWeight = FLT_MAX;
|
|
||||||
uint16_t k = 0;
|
|
||||||
for (; k < weightsPerVertex; k++) {
|
|
||||||
if (weightAccumulators[weightIndex + k] == 0.0f) {
|
|
||||||
reweightedDeformers.indices[weightIndex + k] = i;
|
|
||||||
weightAccumulators[weightIndex + k] = weight;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (weightAccumulators[weightIndex + k] < lowestWeight) {
|
|
||||||
lowestIndex = k;
|
|
||||||
lowestWeight = weightAccumulators[weightIndex + k];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (k == weightsPerVertex && weight > lowestWeight) {
|
|
||||||
// no space for an additional weight; we must replace the lowest
|
|
||||||
weightAccumulators[weightIndex + lowestIndex] = weight;
|
|
||||||
reweightedDeformers.indices[weightIndex + lowestIndex] = i;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// now that we've accumulated the most relevant weights for each vertex
|
|
||||||
// normalize and compress to 16-bits
|
|
||||||
for (size_t i = 0; i < numMeshVertices; ++i) {
|
|
||||||
size_t j = i * weightsPerVertex;
|
|
||||||
|
|
||||||
// normalize weights into uint16_t
|
|
||||||
float totalWeight = 0.0f;
|
|
||||||
for (size_t k = j; k < j + weightsPerVertex; ++k) {
|
|
||||||
totalWeight += weightAccumulators[k];
|
|
||||||
}
|
|
||||||
|
|
||||||
const float ALMOST_HALF = 0.499f;
|
|
||||||
if (totalWeight > 0.0f) {
|
|
||||||
float weightScalingFactor = (float)(UINT16_MAX) / totalWeight;
|
|
||||||
for (size_t k = j; k < j + weightsPerVertex; ++k) {
|
|
||||||
reweightedDeformers.weights[k] = (uint16_t)(weightScalingFactor * weightAccumulators[k] + ALMOST_HALF);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
reweightedDeformers.weights[j] = (uint16_t)((float)(UINT16_MAX) + ALMOST_HALF);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return reweightedDeformers;
|
|
||||||
}
|
|
||||||
|
|
||||||
void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphicsMeshPointer, const baker::MeshNormals& meshNormals, const baker::MeshTangents& meshTangentsIn, const hfm::DynamicTransform* dynamicTransform, const std::vector<const hfm::Deformer*> meshDeformers) {
|
|
||||||
auto graphicsMesh = std::make_shared<graphics::Mesh>();
|
auto graphicsMesh = std::make_shared<graphics::Mesh>();
|
||||||
|
|
||||||
// Fill tangents with a dummy value to force tangents to be present if there are normals
|
// Fill tangents with a dummy value to force tangents to be present if there are normals
|
||||||
|
@ -162,19 +86,16 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
|
||||||
|
|
||||||
// Support for 4 skinning clusters:
|
// Support for 4 skinning clusters:
|
||||||
// 4 Indices are uint8 ideally, uint16 if more than 256.
|
// 4 Indices are uint8 ideally, uint16 if more than 256.
|
||||||
const auto clusterIndiceElement = ((meshDeformers.size() < (size_t)UINT8_MAX) ? gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW) : gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW));
|
const auto clusterIndiceElement = ((numDeformerControllers < (uint16_t)UINT8_MAX) ? gpu::Element(gpu::VEC4, gpu::UINT8, gpu::XYZW) : gpu::Element(gpu::VEC4, gpu::UINT16, gpu::XYZW));
|
||||||
// 4 Weights are normalized 16bits
|
// 4 Weights are normalized 16bits
|
||||||
const auto clusterWeightElement = gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW);
|
const auto clusterWeightElement = gpu::Element(gpu::VEC4, gpu::NUINT16, gpu::XYZW);
|
||||||
|
|
||||||
// Calculate a more condensed view of all the deformer weights
|
|
||||||
const uint16_t NUM_CLUSTERS_PER_VERT = 4;
|
|
||||||
ReweightedDeformers reweightedDeformers = getReweightedDeformers(hfmMesh.vertices.size(), dynamicTransform, meshDeformers, NUM_CLUSTERS_PER_VERT);
|
|
||||||
// Cluster indices and weights must be the same sizes
|
// Cluster indices and weights must be the same sizes
|
||||||
if (reweightedDeformers.trimmedToMatch) {
|
if (reweightedDeformers.trimmedToMatch) {
|
||||||
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "BuildGraphicsMeshTask -- The number of indices and weights for a deformer had different sizes and have been trimmed to match");
|
HIFI_FCDEBUG_ID(model_baker(), repeatMessageID, "BuildGraphicsMeshTask -- The number of indices and weights for a deformer had different sizes and have been trimmed to match");
|
||||||
}
|
}
|
||||||
// Record cluster sizes
|
// Record cluster sizes
|
||||||
const size_t numVertClusters = reweightedDeformers.indices.size() / NUM_CLUSTERS_PER_VERT;
|
const size_t numVertClusters = (reweightedDeformers.weightsPerVertex ? hfmMesh.clusterIndices.size() / reweightedDeformers.weightsPerVertex : 0);
|
||||||
const size_t clusterIndicesSize = numVertClusters * clusterIndiceElement.getSize();
|
const size_t clusterIndicesSize = numVertClusters * clusterIndiceElement.getSize();
|
||||||
const size_t clusterWeightsSize = numVertClusters * clusterWeightElement.getSize();
|
const size_t clusterWeightsSize = numVertClusters * clusterWeightElement.getSize();
|
||||||
|
|
||||||
|
@ -263,7 +184,7 @@ void buildGraphicsMesh(const hfm::Mesh& hfmMesh, graphics::MeshPointer& graphics
|
||||||
|
|
||||||
// Clusters data
|
// Clusters data
|
||||||
if (clusterIndicesSize > 0) {
|
if (clusterIndicesSize > 0) {
|
||||||
if (meshDeformers.size() < UINT8_MAX) {
|
if (numDeformerControllers < (uint16_t)UINT8_MAX) {
|
||||||
// yay! we can fit the clusterIndices within 8-bits
|
// yay! we can fit the clusterIndices within 8-bits
|
||||||
int32_t numIndices = (int32_t)reweightedDeformers.indices.size();
|
int32_t numIndices = (int32_t)reweightedDeformers.indices.size();
|
||||||
std::vector<uint8_t> packedDeformerIndices;
|
std::vector<uint8_t> packedDeformerIndices;
|
||||||
|
@ -461,7 +382,7 @@ void BuildGraphicsMeshTask::run(const baker::BakeContextPointer& context, const
|
||||||
const auto& tangentsPerMesh = input.get4();
|
const auto& tangentsPerMesh = input.get4();
|
||||||
const auto& shapes = input.get5();
|
const auto& shapes = input.get5();
|
||||||
const auto& dynamicTransforms = input.get6();
|
const auto& dynamicTransforms = input.get6();
|
||||||
const auto& deformers = input.get7();
|
const auto& reweightedDeformersPerMesh = input.get7();
|
||||||
|
|
||||||
// Currently, there is only (at most) one dynamicTransform per mesh
|
// Currently, there is only (at most) one dynamicTransform per mesh
|
||||||
// An undefined shape.dynamicTransform has the value hfm::UNDEFINED_KEY
|
// An undefined shape.dynamicTransform has the value hfm::UNDEFINED_KEY
|
||||||
|
@ -478,20 +399,19 @@ void BuildGraphicsMeshTask::run(const baker::BakeContextPointer& context, const
|
||||||
for (int i = 0; i < n; i++) {
|
for (int i = 0; i < n; i++) {
|
||||||
graphicsMeshes.emplace_back();
|
graphicsMeshes.emplace_back();
|
||||||
auto& graphicsMesh = graphicsMeshes[i];
|
auto& graphicsMesh = graphicsMeshes[i];
|
||||||
|
const auto& reweightedDeformers = reweightedDeformersPerMesh[i];
|
||||||
|
|
||||||
auto dynamicTransformIndex = dynamicTransformPerMesh[i];
|
uint16_t numDeformerControllers = 0;
|
||||||
const hfm::DynamicTransform* dynamicTransform = nullptr;
|
if (reweightedDeformers.weightsPerVertex != 0) {
|
||||||
std::vector<const hfm::Deformer*> meshDeformers;
|
uint32_t dynamicTransformIndex = dynamicTransformPerMesh[i];
|
||||||
if (dynamicTransformIndex != hfm::UNDEFINED_KEY) {
|
if (dynamicTransformIndex != hfm::UNDEFINED_KEY) {
|
||||||
dynamicTransform = &dynamicTransforms[dynamicTransformIndex];
|
const hfm::DynamicTransform& dynamicTransform = dynamicTransforms[dynamicTransformIndex];
|
||||||
for (const auto& deformerIndex : dynamicTransform->deformers) {
|
numDeformerControllers = (uint16_t)dynamicTransform.deformers.size();
|
||||||
const auto& deformer = deformers[deformerIndex];
|
|
||||||
meshDeformers.push_back(&deformer);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try to create the graphics::Mesh
|
// Try to create the graphics::Mesh
|
||||||
buildGraphicsMesh(meshes[i], graphicsMesh, baker::safeGet(normalsPerMesh, i), baker::safeGet(tangentsPerMesh, i), dynamicTransform, meshDeformers);
|
buildGraphicsMesh(meshes[i], graphicsMesh, baker::safeGet(normalsPerMesh, i), baker::safeGet(tangentsPerMesh, i), numDeformerControllers, reweightedDeformers);
|
||||||
|
|
||||||
// Choose a name for the mesh
|
// Choose a name for the mesh
|
||||||
if (graphicsMesh) {
|
if (graphicsMesh) {
|
||||||
|
|
|
@ -2,8 +2,8 @@
|
||||||
// BuildGraphicsMeshTask.h
|
// BuildGraphicsMeshTask.h
|
||||||
// model-baker/src/model-baker
|
// model-baker/src/model-baker
|
||||||
//
|
//
|
||||||
// Created by Sabrina Shanman on 2018/12/06.
|
// Created by Sabrina Shanman on 2019/09/16.
|
||||||
// Copyright 2018 High Fidelity, Inc.
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
//
|
//
|
||||||
// Distributed under the Apache License, Version 2.0.
|
// Distributed under the Apache License, Version 2.0.
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
@ -20,7 +20,7 @@
|
||||||
|
|
||||||
class BuildGraphicsMeshTask {
|
class BuildGraphicsMeshTask {
|
||||||
public:
|
public:
|
||||||
using Input = baker::VaryingSet8<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::NormalsPerMesh, baker::TangentsPerMesh, std::vector<hfm::Shape>, std::vector<hfm::DynamicTransform>, std::vector<hfm::Deformer>>;
|
using Input = baker::VaryingSet8<std::vector<hfm::Mesh>, hifi::URL, baker::MeshIndicesToModelNames, baker::NormalsPerMesh, baker::TangentsPerMesh, std::vector<hfm::Shape>, std::vector<hfm::DynamicTransform>, std::vector<baker::ReweightedDeformers>>;
|
||||||
using Output = std::vector<graphics::MeshPointer>;
|
using Output = std::vector<graphics::MeshPointer>;
|
||||||
using JobModel = baker::Job::ModelIO<BuildGraphicsMeshTask, Input, Output>;
|
using JobModel = baker::Job::ModelIO<BuildGraphicsMeshTask, Input, Output>;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
//
|
||||||
|
// CollectShapeVerticesTask.h
|
||||||
|
// model-baker/src/model-baker
|
||||||
|
//
|
||||||
|
// Created by Sabrina Shanman on 2019/09/27.
|
||||||
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "CollectShapeVerticesTask.h"
|
||||||
|
|
||||||
|
#include <glm/gtx/transform.hpp>
|
||||||
|
|
||||||
|
// Used to track and avoid duplicate shape vertices, as multiple shapes can have the same mesh and dynamicTransform
|
||||||
|
class VertexSource {
|
||||||
|
public:
|
||||||
|
uint32_t mesh;
|
||||||
|
uint32_t dynamicTransform;
|
||||||
|
|
||||||
|
bool operator==(const VertexSource& other) const {
|
||||||
|
return mesh == other.mesh &&
|
||||||
|
dynamicTransform == other.dynamicTransform;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void CollectShapeVerticesTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
|
||||||
|
const auto& meshes = input.get0();
|
||||||
|
const auto& shapes = input.get1();
|
||||||
|
const auto& joints = input.get2();
|
||||||
|
const auto& dynamicTransforms = input.get3();
|
||||||
|
const auto& reweightedDeformers = input.get4();
|
||||||
|
auto& shapeVerticesPerJoint = output;
|
||||||
|
|
||||||
|
shapeVerticesPerJoint.reserve(joints.size());
|
||||||
|
std::vector<std::vector<VertexSource>> vertexSourcesPerJoint;
|
||||||
|
vertexSourcesPerJoint.resize(joints.size());
|
||||||
|
for (size_t i = 0; i < shapes.size(); ++i) {
|
||||||
|
const auto& shape = shapes[i];
|
||||||
|
const uint32_t dynamicTransformKey = shape.dynamicTransform;
|
||||||
|
if (dynamicTransformKey == hfm::UNDEFINED_KEY) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
VertexSource vertexSource;
|
||||||
|
vertexSource.mesh = shape.mesh;
|
||||||
|
vertexSource.dynamicTransform = dynamicTransformKey;
|
||||||
|
|
||||||
|
const auto& dynamicTransform = dynamicTransforms[dynamicTransformKey];
|
||||||
|
for (size_t j = 0; j < dynamicTransform.clusters.size(); ++j) {
|
||||||
|
const auto& cluster = dynamicTransform.clusters[j];
|
||||||
|
const uint32_t jointIndex = cluster.jointIndex;
|
||||||
|
|
||||||
|
auto& vertexSources = vertexSourcesPerJoint[jointIndex];
|
||||||
|
if (std::find(vertexSources.cbegin(), vertexSources.cend(), vertexSource) == vertexSources.cend()) {
|
||||||
|
vertexSources.push_back(vertexSource);
|
||||||
|
auto& shapeVertices = shapeVerticesPerJoint[jointIndex];
|
||||||
|
|
||||||
|
const auto& mesh = meshes[shape.mesh];
|
||||||
|
const auto& vertices = mesh.vertices;
|
||||||
|
const auto& reweightedDeformer = reweightedDeformers[shape.mesh];
|
||||||
|
const glm::mat4 meshToJoint = cluster.inverseBindMatrix;
|
||||||
|
|
||||||
|
const uint16_t weightsPerVertex = reweightedDeformer.weightsPerVertex;
|
||||||
|
if (weightsPerVertex == 0) {
|
||||||
|
for (int vertexIndex = 0; vertexIndex < (int)vertices.size(); ++vertexIndex) {
|
||||||
|
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertices[vertexIndex]);
|
||||||
|
shapeVertices.push_back(extractTranslation(vertexTransform));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (int vertexIndex = 0; vertexIndex < (int)vertices.size(); ++vertexIndex) {
|
||||||
|
for (uint16_t weightIndex = 0; weightIndex < weightsPerVertex; ++weightIndex) {
|
||||||
|
const size_t index = vertexIndex*4 + weightIndex;
|
||||||
|
const uint16_t clusterIndex = reweightedDeformer.indices[index];
|
||||||
|
const uint16_t clusterWeight = reweightedDeformer.weights[index];
|
||||||
|
// Remember vertices associated with this joint with at least 1/4 weight
|
||||||
|
const uint16_t EXPANSION_WEIGHT_THRESHOLD = std::numeric_limits<uint16_t>::max() / 4;
|
||||||
|
if (clusterIndex != j || clusterWeight < EXPANSION_WEIGHT_THRESHOLD) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const glm::mat4 vertexTransform = meshToJoint * glm::translate(vertices[vertexIndex]);
|
||||||
|
shapeVertices.push_back(extractTranslation(vertexTransform));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
//
|
||||||
|
// CollectShapeVerticesTask.h
|
||||||
|
// model-baker/src/model-baker
|
||||||
|
//
|
||||||
|
// Created by Sabrina Shanman on 2019/09/27.
|
||||||
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_CollectShapeVerticesTask_h
|
||||||
|
#define hifi_CollectShapeVerticesTask_h
|
||||||
|
|
||||||
|
#include <hfm/HFM.h>
|
||||||
|
|
||||||
|
#include "Engine.h"
|
||||||
|
#include "BakerTypes.h"
|
||||||
|
|
||||||
|
class CollectShapeVerticesTask {
|
||||||
|
public:
|
||||||
|
using Input = baker::VaryingSet5<std::vector<hfm::Mesh>, std::vector<hfm::Shape>, std::vector<hfm::Joint>, std::vector<hfm::DynamicTransform>, std::vector<baker::ReweightedDeformers>>;
|
||||||
|
using Output = std::vector<ShapeVertices>;
|
||||||
|
using JobModel = baker::Job::ModelIO<CollectShapeVerticesTask, Input, Output>;
|
||||||
|
|
||||||
|
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_CollectShapeVerticesTask_h
|
||||||
|
|
123
libraries/model-baker/src/model-baker/ReweightDeformersTask.cpp
Normal file
123
libraries/model-baker/src/model-baker/ReweightDeformersTask.cpp
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
//
|
||||||
|
// ReweightDeformersTask.h
|
||||||
|
// model-baker/src/model-baker
|
||||||
|
//
|
||||||
|
// Created by Sabrina Shanman on 2019/09/26.
|
||||||
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "ReweightDeformersTask.h"
|
||||||
|
|
||||||
|
baker::ReweightedDeformers getReweightedDeformers(size_t numMeshVertices, const std::vector<const hfm::Deformer*> deformers, const uint16_t weightsPerVertex) {
|
||||||
|
baker::ReweightedDeformers reweightedDeformers;
|
||||||
|
if (deformers.size() == 0) {
|
||||||
|
return reweightedDeformers;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t numClusterIndices = numMeshVertices * weightsPerVertex;
|
||||||
|
reweightedDeformers.weightsPerVertex = weightsPerVertex;
|
||||||
|
// TODO: Consider having a rootCluster property in the DynamicTransform rather than appending the root to the end of the cluster list.
|
||||||
|
reweightedDeformers.indices.resize(numClusterIndices, (uint16_t)(deformers.size() - 1));
|
||||||
|
reweightedDeformers.weights.resize(numClusterIndices, 0);
|
||||||
|
|
||||||
|
std::vector<float> weightAccumulators;
|
||||||
|
weightAccumulators.resize(numClusterIndices, 0.0f);
|
||||||
|
for (uint16_t i = 0; i < (uint16_t)deformers.size(); ++i) {
|
||||||
|
const hfm::Deformer& deformer = *deformers[i];
|
||||||
|
|
||||||
|
if (deformer.indices.size() != deformer.weights.size()) {
|
||||||
|
reweightedDeformers.trimmedToMatch = true;
|
||||||
|
}
|
||||||
|
size_t numIndicesOrWeights = std::min(deformer.indices.size(), deformer.weights.size());
|
||||||
|
for (size_t j = 0; j < numIndicesOrWeights; ++j) {
|
||||||
|
uint32_t index = deformer.indices[j];
|
||||||
|
float weight = deformer.weights[j];
|
||||||
|
|
||||||
|
// look for an unused slot in the weights vector
|
||||||
|
uint32_t weightIndex = index * weightsPerVertex;
|
||||||
|
uint32_t lowestIndex = -1;
|
||||||
|
float lowestWeight = FLT_MAX;
|
||||||
|
uint16_t k = 0;
|
||||||
|
for (; k < weightsPerVertex; k++) {
|
||||||
|
if (weightAccumulators[weightIndex + k] == 0.0f) {
|
||||||
|
reweightedDeformers.indices[weightIndex + k] = i;
|
||||||
|
weightAccumulators[weightIndex + k] = weight;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (weightAccumulators[weightIndex + k] < lowestWeight) {
|
||||||
|
lowestIndex = k;
|
||||||
|
lowestWeight = weightAccumulators[weightIndex + k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (k == weightsPerVertex && weight > lowestWeight) {
|
||||||
|
// no space for an additional weight; we must replace the lowest
|
||||||
|
weightAccumulators[weightIndex + lowestIndex] = weight;
|
||||||
|
reweightedDeformers.indices[weightIndex + lowestIndex] = i;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// now that we've accumulated the most relevant weights for each vertex
|
||||||
|
// normalize and compress to 16-bits
|
||||||
|
for (size_t i = 0; i < numMeshVertices; ++i) {
|
||||||
|
size_t j = i * weightsPerVertex;
|
||||||
|
|
||||||
|
// normalize weights into uint16_t
|
||||||
|
float totalWeight = 0.0f;
|
||||||
|
for (size_t k = j; k < j + weightsPerVertex; ++k) {
|
||||||
|
totalWeight += weightAccumulators[k];
|
||||||
|
}
|
||||||
|
|
||||||
|
const float ALMOST_HALF = 0.499f;
|
||||||
|
if (totalWeight > 0.0f) {
|
||||||
|
float weightScalingFactor = (float)(UINT16_MAX) / totalWeight;
|
||||||
|
for (size_t k = j; k < j + weightsPerVertex; ++k) {
|
||||||
|
reweightedDeformers.weights[k] = (uint16_t)(weightScalingFactor * weightAccumulators[k] + ALMOST_HALF);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
reweightedDeformers.weights[j] = (uint16_t)((float)(UINT16_MAX) + ALMOST_HALF);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return reweightedDeformers;
|
||||||
|
}
|
||||||
|
|
||||||
|
void ReweightDeformersTask::run(const baker::BakeContextPointer& context, const Input& input, Output& output) {
|
||||||
|
const uint16_t NUM_WEIGHTS_PER_VERTEX { 4 };
|
||||||
|
|
||||||
|
const auto& meshes = input.get0();
|
||||||
|
const auto& shapes = input.get1();
|
||||||
|
const auto& dynamicTransforms = input.get2();
|
||||||
|
const auto& deformers = input.get3();
|
||||||
|
auto& reweightedDeformers = output;
|
||||||
|
|
||||||
|
// Currently, there is only (at most) one dynamicTransform per mesh
|
||||||
|
// An undefined shape.dynamicTransform has the value hfm::UNDEFINED_KEY
|
||||||
|
std::vector<uint32_t> dynamicTransformPerMesh;
|
||||||
|
dynamicTransformPerMesh.resize(meshes.size(), hfm::UNDEFINED_KEY);
|
||||||
|
for (const auto& shape : shapes) {
|
||||||
|
uint32_t dynamicTransformIndex = shape.dynamicTransform;
|
||||||
|
dynamicTransformPerMesh[shape.mesh] = dynamicTransformIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
reweightedDeformers.reserve(meshes.size());
|
||||||
|
for (size_t i = 0; i < meshes.size(); ++i) {
|
||||||
|
const auto& mesh = meshes[i];
|
||||||
|
uint32_t dynamicTransformIndex = dynamicTransformPerMesh[i];
|
||||||
|
|
||||||
|
const hfm::DynamicTransform* dynamicTransform = nullptr;
|
||||||
|
std::vector<const hfm::Deformer*> meshDeformers;
|
||||||
|
if (dynamicTransformIndex != hfm::UNDEFINED_KEY) {
|
||||||
|
dynamicTransform = &dynamicTransforms[dynamicTransformIndex];
|
||||||
|
for (const auto& deformerIndex : dynamicTransform->deformers) {
|
||||||
|
const auto& deformer = deformers[deformerIndex];
|
||||||
|
meshDeformers.push_back(&deformer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
reweightedDeformers.push_back(getReweightedDeformers((size_t)mesh.vertices.size(), meshDeformers, NUM_WEIGHTS_PER_VERTEX));
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
//
|
||||||
|
// ReweightDeformersTask.h
|
||||||
|
// model-baker/src/model-baker
|
||||||
|
//
|
||||||
|
// Created by Sabrina Shanman on 2019/09/26.
|
||||||
|
// Copyright 2019 High Fidelity, Inc.
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef hifi_ReweightDeformersTask_h
|
||||||
|
#define hifi_ReweightDeformersTask_h
|
||||||
|
|
||||||
|
#include <hfm/HFM.h>
|
||||||
|
|
||||||
|
#include "Engine.h"
|
||||||
|
#include "BakerTypes.h"
|
||||||
|
|
||||||
|
class ReweightDeformersTask {
|
||||||
|
public:
|
||||||
|
using Input = baker::VaryingSet4<std::vector<hfm::Mesh>, std::vector<hfm::Shape>, std::vector<hfm::DynamicTransform>, std::vector<hfm::Deformer>>;
|
||||||
|
using Output = std::vector<baker::ReweightedDeformers>;
|
||||||
|
using JobModel = baker::Job::ModelIO<ReweightDeformersTask, Input, Output>;
|
||||||
|
|
||||||
|
void run(const baker::BakeContextPointer& context, const Input& input, Output& output);
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif // hifi_ReweightDeformersTask_h
|
|
@ -203,10 +203,20 @@ function maybeDeleteRemoteIndicatorTimeout() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
var reactionsBegun = [];
|
var reactionsBegun = [];
|
||||||
var pointReticle = null;
|
var pointReticle = null;
|
||||||
var mouseMoveEventsConnected = false;
|
var mouseMoveEventsConnected = false;
|
||||||
|
var targetPointInterpolateConnected = false;
|
||||||
|
var pointAtTarget = Vec3.ZERO;
|
||||||
|
var isReticleVisible = true;
|
||||||
|
|
||||||
|
function targetPointInterpolate() {
|
||||||
|
if (reticlePosition) {
|
||||||
|
pointAtTarget = Vec3.mix(pointAtTarget, reticlePosition, POINT_AT_MIX_ALPHA);
|
||||||
|
isReticleVisible = MyAvatar.setPointAt(pointAtTarget);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function beginReactionWrapper(reaction) {
|
function beginReactionWrapper(reaction) {
|
||||||
maybeDeleteRemoteIndicatorTimeout();
|
maybeDeleteRemoteIndicatorTimeout();
|
||||||
|
|
||||||
|
@ -227,14 +237,18 @@ function beginReactionWrapper(reaction) {
|
||||||
break;
|
break;
|
||||||
case ("point"):
|
case ("point"):
|
||||||
deleteOldReticles();
|
deleteOldReticles();
|
||||||
|
pointAtTarget = MyAvatar.getHeadLookAt();
|
||||||
if (!mouseMoveEventsConnected) {
|
if (!mouseMoveEventsConnected) {
|
||||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||||
mouseMoveEventsConnected = true;
|
mouseMoveEventsConnected = true;
|
||||||
}
|
}
|
||||||
|
if (!targetPointInterpolateConnected) {
|
||||||
|
Script.update.connect(targetPointInterpolate);
|
||||||
|
targetPointInterpolateConnected = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Checks to see if there are any reticle entities already to delete
|
// Checks to see if there are any reticle entities already to delete
|
||||||
function deleteOldReticles() {
|
function deleteOldReticles() {
|
||||||
MyAvatar.getAvatarEntitiesVariant()
|
MyAvatar.getAvatarEntitiesVariant()
|
||||||
|
@ -250,6 +264,8 @@ function deleteOldReticles() {
|
||||||
var MAX_INTERSECTION_DISTANCE_M = 50;
|
var MAX_INTERSECTION_DISTANCE_M = 50;
|
||||||
var reticleUpdateRateLimiterTimer = false;
|
var reticleUpdateRateLimiterTimer = false;
|
||||||
var RETICLE_UPDATE_RATE_LIMITER_TIMER_MS = 75;
|
var RETICLE_UPDATE_RATE_LIMITER_TIMER_MS = 75;
|
||||||
|
var POINT_AT_MIX_ALPHA = 0.15;
|
||||||
|
var reticlePosition = Vec3.ZERO;
|
||||||
function mouseMoveEvent(event) {
|
function mouseMoveEvent(event) {
|
||||||
if (!reticleUpdateRateLimiterTimer) {
|
if (!reticleUpdateRateLimiterTimer) {
|
||||||
reticleUpdateRateLimiterTimer = Script.setTimeout(function() {
|
reticleUpdateRateLimiterTimer = Script.setTimeout(function() {
|
||||||
|
@ -261,11 +277,10 @@ function mouseMoveEvent(event) {
|
||||||
|
|
||||||
|
|
||||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||||
var avatarIntersectionData = AvatarManager.findRayIntersection(pickRay);
|
var avatarIntersectionData = AvatarManager.findRayIntersection(pickRay, [], [MyAvatar.sessionUUID], false);
|
||||||
var entityIntersectionData = Entities.findRayIntersection(pickRay, true);
|
var entityIntersectionData = Entities.findRayIntersection(pickRay, true);
|
||||||
var avatarIntersectionDistanceM = avatarIntersectionData.intersects && avatarIntersectionData.distance < MAX_INTERSECTION_DISTANCE_M ? avatarIntersectionData.distance : null;
|
var avatarIntersectionDistanceM = avatarIntersectionData.intersects && avatarIntersectionData.distance < MAX_INTERSECTION_DISTANCE_M ? avatarIntersectionData.distance : null;
|
||||||
var entityIntersectionDistanceM = entityIntersectionData.intersects && entityIntersectionData.distance < MAX_INTERSECTION_DISTANCE_M ? entityIntersectionData.distance : null;
|
var entityIntersectionDistanceM = entityIntersectionData.intersects && entityIntersectionData.distance < MAX_INTERSECTION_DISTANCE_M ? entityIntersectionData.distance : null;
|
||||||
var reticlePosition;
|
|
||||||
|
|
||||||
if (avatarIntersectionDistanceM && entityIntersectionDistanceM) {
|
if (avatarIntersectionDistanceM && entityIntersectionDistanceM) {
|
||||||
if (avatarIntersectionDistanceM < entityIntersectionDistanceM) {
|
if (avatarIntersectionDistanceM < entityIntersectionDistanceM) {
|
||||||
|
@ -283,7 +298,7 @@ function mouseMoveEvent(event) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (pointReticle && reticlePosition) {
|
if (pointReticle && reticlePosition) {
|
||||||
Entities.editEntity(pointReticle, { position: reticlePosition });
|
Entities.editEntity(pointReticle, { position: reticlePosition, visible: isReticleVisible });
|
||||||
} else if (reticlePosition) {
|
} else if (reticlePosition) {
|
||||||
pointReticle = Entities.addEntity({
|
pointReticle = Entities.addEntity({
|
||||||
type: "Box",
|
type: "Box",
|
||||||
|
@ -349,6 +364,10 @@ function endReactionWrapper(reaction) {
|
||||||
Controller.mouseMoveEvent.disconnect(mouseMoveEvent);
|
Controller.mouseMoveEvent.disconnect(mouseMoveEvent);
|
||||||
mouseMoveEventsConnected = false;
|
mouseMoveEventsConnected = false;
|
||||||
}
|
}
|
||||||
|
if (targetPointInterpolateConnected) {
|
||||||
|
Script.update.disconnect(targetPointInterpolate);
|
||||||
|
targetPointInterpolateConnected = false;
|
||||||
|
}
|
||||||
maybeClearReticleUpdateLimiterTimeout();
|
maybeClearReticleUpdateLimiterTimeout();
|
||||||
deleteOldReticles();
|
deleteOldReticles();
|
||||||
break;
|
break;
|
||||||
|
@ -757,7 +776,6 @@ function toggleEmojiApp() {
|
||||||
emojiAPI.registerAvimojiQMLWindow(emojiAppWindow);
|
emojiAPI.registerAvimojiQMLWindow(emojiAppWindow);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// #endregion
|
// #endregion
|
||||||
// *************************************
|
// *************************************
|
||||||
// END EMOJI_MAIN
|
// END EMOJI_MAIN
|
||||||
|
|
Loading…
Reference in a new issue