mirror of
https://github.com/overte-org/overte.git
synced 2025-08-09 15:29:32 +02:00
clean up the Transform class and the rendering path in Model.cpp
This commit is contained in:
commit
668c5e298d
26 changed files with 6196 additions and 3960 deletions
|
@ -141,11 +141,6 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
// if the stream should be muted, bail
|
|
||||||
if (shouldMute(streamToAdd->getQuietestFrameLoudness())) {
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
float bearingRelativeAngleToSource = 0.0f;
|
float bearingRelativeAngleToSource = 0.0f;
|
||||||
float attenuationCoefficient = 1.0f;
|
float attenuationCoefficient = 1.0f;
|
||||||
int numSamplesDelay = 0;
|
int numSamplesDelay = 0;
|
||||||
|
@ -722,6 +717,29 @@ void AudioMixer::run() {
|
||||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||||
nodeData->checkBuffersBeforeFrameSend();
|
nodeData->checkBuffersBeforeFrameSend();
|
||||||
|
|
||||||
|
// if the stream should be muted, send mute packet
|
||||||
|
if (shouldMute(nodeData->getAvatarAudioStream()->getQuietestFrameLoudness())) {
|
||||||
|
static const int TIME_BETWEEN_MUTES = 5; // in secs
|
||||||
|
if (usecTimestampNow() - nodeData->getAvatarAudioStream()->getLastMuted() >
|
||||||
|
TIME_BETWEEN_MUTES * USECS_PER_SECOND) {
|
||||||
|
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketTypeMuteEnvironment);
|
||||||
|
int packetSize = headerSize + sizeof(glm::vec3) + sizeof(float);
|
||||||
|
|
||||||
|
// Fake data to force mute
|
||||||
|
glm::vec3 position = nodeData->getAvatarAudioStream()->getPosition();
|
||||||
|
float radius = 1.0f;
|
||||||
|
|
||||||
|
char* packet = (char*)malloc(packetSize);
|
||||||
|
populatePacketHeader(packet, PacketTypeMuteEnvironment);
|
||||||
|
memcpy(packet + headerSize, &position, sizeof(glm::vec3));
|
||||||
|
memcpy(packet + headerSize + sizeof(glm::vec3), &radius, sizeof(float));
|
||||||
|
|
||||||
|
nodeList->writeDatagram(packet, packetSize, node);
|
||||||
|
nodeData->getAvatarAudioStream()->setLastMutedNow();
|
||||||
|
free(packet);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()
|
if (node->getType() == NodeType::Agent && node->getActiveSocket()
|
||||||
&& nodeData->getAvatarAudioStream()) {
|
&& nodeData->getAvatarAudioStream()) {
|
||||||
|
|
||||||
|
|
|
@ -14,7 +14,8 @@
|
||||||
#include "AvatarAudioStream.h"
|
#include "AvatarAudioStream.h"
|
||||||
|
|
||||||
AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings) :
|
AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings) :
|
||||||
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, settings)
|
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, settings),
|
||||||
|
_lastMuted(usecTimestampNow())
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,13 +19,18 @@
|
||||||
class AvatarAudioStream : public PositionalAudioStream {
|
class AvatarAudioStream : public PositionalAudioStream {
|
||||||
public:
|
public:
|
||||||
AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings);
|
AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings);
|
||||||
|
|
||||||
|
qint64 getLastMuted() const { return _lastMuted; }
|
||||||
|
void setLastMutedNow() { _lastMuted = usecTimestampNow(); }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
// disallow copying of AvatarAudioStream objects
|
// disallow copying of AvatarAudioStream objects
|
||||||
AvatarAudioStream(const AvatarAudioStream&);
|
AvatarAudioStream(const AvatarAudioStream&);
|
||||||
AvatarAudioStream& operator= (const AvatarAudioStream&);
|
AvatarAudioStream& operator= (const AvatarAudioStream&);
|
||||||
|
|
||||||
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
|
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
|
||||||
|
|
||||||
|
qint64 _lastMuted;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_AvatarAudioStream_h
|
#endif // hifi_AvatarAudioStream_h
|
||||||
|
|
412
examples/libraries/walkApi.js
Normal file
412
examples/libraries/walkApi.js
Normal file
|
@ -0,0 +1,412 @@
|
||||||
|
//
|
||||||
|
// walkObjects.js
|
||||||
|
//
|
||||||
|
// version 1.001
|
||||||
|
//
|
||||||
|
// Created by David Wooldridge, Autumn 2014
|
||||||
|
//
|
||||||
|
// Motion, state and Transition objects for use by the walk.js script v1.1
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
// constructor for the Motion object
|
||||||
|
Motion = function() {
|
||||||
|
|
||||||
|
this.setGender = function(gender) {
|
||||||
|
|
||||||
|
this.avatarGender = gender;
|
||||||
|
|
||||||
|
switch(this.avatarGender) {
|
||||||
|
|
||||||
|
case MALE:
|
||||||
|
|
||||||
|
this.selWalk = walkAssets.maleStandardWalk;
|
||||||
|
this.selStand = walkAssets.maleStandOne;
|
||||||
|
this.selFlyUp = walkAssets.maleFlyingUp;
|
||||||
|
this.selFly = walkAssets.maleFlying;
|
||||||
|
this.selFlyDown = walkAssets.maleFlyingDown;
|
||||||
|
this.selSideStepLeft = walkAssets.maleSideStepLeft;
|
||||||
|
this.selSideStepRight = walkAssets.maleSideStepRight;
|
||||||
|
this.curAnim = this.selStand;
|
||||||
|
return;
|
||||||
|
|
||||||
|
case FEMALE:
|
||||||
|
|
||||||
|
this.selWalk = walkAssets.femaleStandardWalk;
|
||||||
|
this.selStand = walkAssets.femaleStandOne;
|
||||||
|
this.selFlyUp = walkAssets.femaleFlyingUp;
|
||||||
|
this.selFly = walkAssets.femaleFlying;
|
||||||
|
this.selFlyDown = walkAssets.femaleFlyingDown;
|
||||||
|
this.selSideStepLeft = walkAssets.femaleSideStepLeft;
|
||||||
|
this.selSideStepRight = walkAssets.femaleSideStepRight;
|
||||||
|
this.curAnim = this.selStand;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.hydraCheck = function() {
|
||||||
|
|
||||||
|
// function courtesy of Thijs Wenker, frisbee.js
|
||||||
|
var numberOfButtons = Controller.getNumberOfButtons();
|
||||||
|
var numberOfTriggers = Controller.getNumberOfTriggers();
|
||||||
|
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
|
||||||
|
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
|
||||||
|
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
|
||||||
|
return hydrasConnected;
|
||||||
|
}
|
||||||
|
|
||||||
|
// settings
|
||||||
|
this.armsFree = this.hydraCheck(); // automatically sets true for Hydra support - temporary fix
|
||||||
|
this.makesFootStepSounds = true;
|
||||||
|
this.avatarGender = MALE;
|
||||||
|
this.motionPitchMax = 60;
|
||||||
|
this.motionRollMax = 40;
|
||||||
|
|
||||||
|
// timing
|
||||||
|
this.frameStartTime = 0; // used for measuring frame execution times
|
||||||
|
this.frameExecutionTimeMax = 0; // keep track of the longest frame execution time
|
||||||
|
this.cumulativeTime = 0.0;
|
||||||
|
this.lastWalkStartTime = 0;
|
||||||
|
|
||||||
|
// selected animations
|
||||||
|
this.selWalk = walkAssets.maleStandardWalk;
|
||||||
|
this.selStand = walkAssets.maleStandOne;
|
||||||
|
this.selFlyUp = walkAssets.maleFlyingUp;
|
||||||
|
this.selFly = walkAssets.maleFlying;
|
||||||
|
this.selFlyDown = walkAssets.maleFlyingDown;
|
||||||
|
this.selSideStepLeft = walkAssets.maleSideStepLeft;
|
||||||
|
this.selSideStepRight = walkAssets.maleSideStepRight;
|
||||||
|
|
||||||
|
// the currently selected animation, joint and transition
|
||||||
|
this.curAnim = this.selStand;
|
||||||
|
this.curJointIndex = 0;
|
||||||
|
this.curTransition = null;
|
||||||
|
|
||||||
|
// zero out avi's joints, curl the fingers nicely then take some measurements
|
||||||
|
this.avatarJointNames = MyAvatar.getJointNames();
|
||||||
|
if (!this.armsFree) {
|
||||||
|
|
||||||
|
for (var i = 0; i < this.avatarJointNames.length; i++) {
|
||||||
|
|
||||||
|
if (i > 17 || i < 34) {
|
||||||
|
// left hand fingers
|
||||||
|
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
|
||||||
|
} else if (i > 33 || i < 38) {
|
||||||
|
// left hand thumb
|
||||||
|
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
|
||||||
|
} else if (i > 41 || i < 58) {
|
||||||
|
// right hand fingers
|
||||||
|
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
|
||||||
|
} else if (i > 57 || i < 62) {
|
||||||
|
// right hand thumb
|
||||||
|
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
|
||||||
|
} else {
|
||||||
|
// zero out the remaining joints
|
||||||
|
MyAvatar.clearJointData(this.avatarJointNames[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.footRPos = MyAvatar.getJointPosition("RightFoot");
|
||||||
|
this.hipsToFeet = MyAvatar.getJointPosition("Hips").y - this.footRPos.y;
|
||||||
|
|
||||||
|
// walkwheel (foot / ground speed matching)
|
||||||
|
this.direction = FORWARDS;
|
||||||
|
this.nextStep = RIGHT;
|
||||||
|
this.nFrames = 0;
|
||||||
|
this.strideLength = this.selWalk.calibration.strideLengthForwards;
|
||||||
|
this.walkWheelPos = 0;
|
||||||
|
|
||||||
|
this.advanceWalkWheel = function(angle){
|
||||||
|
this.walkWheelPos += angle;
|
||||||
|
if (motion.walkWheelPos >= 360) {
|
||||||
|
this.walkWheelPos = this.walkWheelPos % 360;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// last frame history
|
||||||
|
this.lastDirection = 0;
|
||||||
|
this.lastVelocity = 0;
|
||||||
|
this.lastStrideLength = 0; // kept for use during transitions
|
||||||
|
|
||||||
|
}; // end Motion constructor
|
||||||
|
|
||||||
|
// finite state machine
|
||||||
|
state = (function () {
|
||||||
|
|
||||||
|
return {
|
||||||
|
|
||||||
|
// the finite list of states
|
||||||
|
STANDING: 1,
|
||||||
|
WALKING: 2,
|
||||||
|
SIDE_STEP: 3,
|
||||||
|
FLYING: 4,
|
||||||
|
EDIT_WALK_STYLES: 5,
|
||||||
|
EDIT_WALK_TWEAKS: 6,
|
||||||
|
EDIT_WALK_JOINTS: 7,
|
||||||
|
EDIT_STANDING: 8,
|
||||||
|
EDIT_FLYING: 9,
|
||||||
|
EDIT_FLYING_UP: 10,
|
||||||
|
EDIT_FLYING_DOWN: 11,
|
||||||
|
EDIT_SIDESTEP_LEFT: 12,
|
||||||
|
EDIT_SIDESTEP_RIGHT: 14,
|
||||||
|
currentState: this.STANDING,
|
||||||
|
|
||||||
|
// status vars
|
||||||
|
powerOn: true,
|
||||||
|
minimised: true,
|
||||||
|
editing: false,
|
||||||
|
editingTranslation: false,
|
||||||
|
|
||||||
|
setInternalState: function(newInternalState) {
|
||||||
|
|
||||||
|
switch (newInternalState) {
|
||||||
|
|
||||||
|
case this.WALKING:
|
||||||
|
|
||||||
|
this.currentState = this.WALKING;
|
||||||
|
this.editing = false;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.FLYING:
|
||||||
|
|
||||||
|
this.currentState = this.FLYING;
|
||||||
|
this.editing = false;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.SIDE_STEP:
|
||||||
|
|
||||||
|
this.currentState = this.SIDE_STEP;
|
||||||
|
this.editing = false;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_WALK_STYLES:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_WALK_STYLES;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
motion.curAnim = motion.selWalk;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_WALK_TWEAKS:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_WALK_TWEAKS;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
motion.curAnim = motion.selWalk;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_WALK_JOINTS:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_WALK_JOINTS;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
motion.curAnim = motion.selWalk;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_STANDING:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_STANDING;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
motion.curAnim = motion.selStand;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_SIDESTEP_LEFT:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_SIDESTEP_LEFT;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
motion.curAnim = motion.selSideStepLeft;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_SIDESTEP_RIGHT:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_SIDESTEP_RIGHT;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = new Date().getTime();
|
||||||
|
motion.curAnim = motion.selSideStepRight;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_FLYING:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_FLYING;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
motion.curAnim = motion.selFly;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_FLYING_UP:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_FLYING_UP;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
motion.curAnim = motion.selFlyUp;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.EDIT_FLYING_DOWN:
|
||||||
|
|
||||||
|
this.currentState = this.EDIT_FLYING_DOWN;
|
||||||
|
this.editing = true;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
motion.curAnim = motion.selFlyDown;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
return;
|
||||||
|
|
||||||
|
case this.STANDING:
|
||||||
|
default:
|
||||||
|
|
||||||
|
this.currentState = this.STANDING;
|
||||||
|
this.editing = false;
|
||||||
|
motion.lastWalkStartTime = 0;
|
||||||
|
motion.curAnim = motion.selStand;
|
||||||
|
walkInterface.updateMenu();
|
||||||
|
|
||||||
|
// initialisation - runs at script startup only
|
||||||
|
if (motion.strideLength === 0) {
|
||||||
|
|
||||||
|
motion.setGender(MALE);
|
||||||
|
if (motion.direction === BACKWARDS) {
|
||||||
|
motion.strideLength = motion.selWalk.calibration.strideLengthBackwards;
|
||||||
|
} else {
|
||||||
|
motion.strideLength = motion.selWalk.calibration.strideLengthForwards;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})(); // end state object literal
|
||||||
|
|
||||||
|
// constructor for animation Transition
|
||||||
|
Transition = function(lastAnimation, nextAnimation, reachPoses, transitionDuration, easingLower, easingUpper) {
|
||||||
|
|
||||||
|
this.lastAnim = lastAnimation; // name of last animation
|
||||||
|
this.nextAnimation = nextAnimation; // name of next animation
|
||||||
|
if (lastAnimation === motion.selWalk ||
|
||||||
|
nextAnimation === motion.selSideStepLeft ||
|
||||||
|
nextAnimation === motion.selSideStepRight) {
|
||||||
|
// boolean - is the last animation a walking animation?
|
||||||
|
this.walkingAtStart = true;
|
||||||
|
} else {
|
||||||
|
this.walkingAtStart = false;
|
||||||
|
}
|
||||||
|
if (nextAnimation === motion.selWalk ||
|
||||||
|
nextAnimation === motion.selSideStepLeft ||
|
||||||
|
nextAnimation === motion.selSideStepRight) {
|
||||||
|
// boolean - is the next animation a walking animation?
|
||||||
|
this.walkingAtEnd = true;
|
||||||
|
} else {
|
||||||
|
this.walkingAtEnd = false;
|
||||||
|
}
|
||||||
|
this.reachPoses = reachPoses; // placeholder / stub: array of reach poses for squash and stretch techniques
|
||||||
|
this.transitionDuration = transitionDuration; // length of transition (seconds)
|
||||||
|
this.easingLower = easingLower; // Bezier curve handle (normalised)
|
||||||
|
this.easingUpper = easingUpper; // Bezier curve handle (normalised)
|
||||||
|
this.startTime = new Date().getTime(); // Starting timestamp (seconds)
|
||||||
|
this.progress = 0; // how far are we through the transition?
|
||||||
|
this.walkWheelIncrement = 3; // how much to turn the walkwheel each frame when transitioning to / from walking
|
||||||
|
this.walkWheelAdvance = 0; // how many degrees the walk wheel has been advanced during the transition
|
||||||
|
this.walkStopAngle = 0; // what angle should we stop the walk cycle?
|
||||||
|
|
||||||
|
}; // end Transition constructor
|
||||||
|
|
||||||
|
|
||||||
|
walkAssets = (function () {
|
||||||
|
|
||||||
|
// path to the sounds used for the footsteps
|
||||||
|
var _pathToSounds = 'https://s3.amazonaws.com/hifi-public/sounds/Footsteps/';
|
||||||
|
|
||||||
|
// read in the sounds
|
||||||
|
var _footsteps = [];
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Left-12db.wav"));
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Right-12db.wav"));
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Left-12db.wav"));
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Right-12db.wav"));
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Left-12db.wav"));
|
||||||
|
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Right-12db.wav"));
|
||||||
|
|
||||||
|
// load the animation datafiles
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-standard-walk-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-flying-up-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-flying-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-flying-down-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-standing-one-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-sidestep-left-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-female-sidestep-right-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-standard-walk-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-flying-up-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-flying-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-flying-down-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-standing-one-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-sidestep-left-animation.js");
|
||||||
|
Script.include(pathToAssets+"animations/dd-male-sidestep-right-animation.js");
|
||||||
|
|
||||||
|
// read in the animation files
|
||||||
|
var _FemaleStandardWalkFile = new FemaleStandardWalk();
|
||||||
|
var _femaleStandardWalk = _FemaleStandardWalkFile.loadAnimation();
|
||||||
|
var _FemaleFlyingUpFile = new FemaleFlyingUp();
|
||||||
|
var _femaleFlyingUp = _FemaleFlyingUpFile.loadAnimation();
|
||||||
|
var _FemaleFlyingFile = new FemaleFlying();
|
||||||
|
var _femaleFlying = _FemaleFlyingFile.loadAnimation();
|
||||||
|
var _FemaleFlyingDownFile = new FemaleFlyingDown();
|
||||||
|
var _femaleFlyingDown = _FemaleFlyingDownFile.loadAnimation();
|
||||||
|
var _FemaleStandOneFile = new FemaleStandingOne();
|
||||||
|
var _femaleStandOne = _FemaleStandOneFile.loadAnimation();
|
||||||
|
var _FemaleSideStepLeftFile = new FemaleSideStepLeft();
|
||||||
|
var _femaleSideStepLeft = _FemaleSideStepLeftFile.loadAnimation();
|
||||||
|
var _FemaleSideStepRightFile = new FemaleSideStepRight();
|
||||||
|
var _femaleSideStepRight = _FemaleSideStepRightFile.loadAnimation();
|
||||||
|
var _MaleStandardWalkFile = new MaleStandardWalk(filter);
|
||||||
|
var _maleStandardWalk = _MaleStandardWalkFile.loadAnimation();
|
||||||
|
var _MaleFlyingUpFile = new MaleFlyingUp();
|
||||||
|
var _maleFlyingUp = _MaleFlyingUpFile.loadAnimation();
|
||||||
|
var _MaleFlyingFile = new MaleFlying();
|
||||||
|
var _maleFlying = _MaleFlyingFile.loadAnimation();
|
||||||
|
var _MaleFlyingDownFile = new MaleFlyingDown();
|
||||||
|
var _maleFlyingDown = _MaleFlyingDownFile.loadAnimation();
|
||||||
|
var _MaleStandOneFile = new MaleStandingOne();
|
||||||
|
var _maleStandOne = _MaleStandOneFile.loadAnimation();
|
||||||
|
var _MaleSideStepLeftFile = new MaleSideStepLeft();
|
||||||
|
var _maleSideStepLeft = _MaleSideStepLeftFile.loadAnimation();
|
||||||
|
var _MaleSideStepRightFile = new MaleSideStepRight();
|
||||||
|
var _maleSideStepRight = _MaleSideStepRightFile.loadAnimation();
|
||||||
|
|
||||||
|
return {
|
||||||
|
|
||||||
|
// expose the sound assets
|
||||||
|
footsteps: _footsteps,
|
||||||
|
|
||||||
|
// expose the animation assets
|
||||||
|
femaleStandardWalk: _femaleStandardWalk,
|
||||||
|
femaleFlyingUp: _femaleFlyingUp,
|
||||||
|
femaleFlying: _femaleFlying,
|
||||||
|
femaleFlyingDown: _femaleFlyingDown,
|
||||||
|
femaleStandOne: _femaleStandOne,
|
||||||
|
femaleSideStepLeft: _femaleSideStepLeft,
|
||||||
|
femaleSideStepRight: _femaleSideStepRight,
|
||||||
|
maleStandardWalk: _maleStandardWalk,
|
||||||
|
maleFlyingUp: _maleFlyingUp,
|
||||||
|
maleFlying: _maleFlying,
|
||||||
|
maleFlyingDown: _maleFlyingDown,
|
||||||
|
maleStandOne: _maleStandOne,
|
||||||
|
maleSideStepLeft: _maleSideStepLeft,
|
||||||
|
maleSideStepRight: _maleSideStepRight,
|
||||||
|
}
|
||||||
|
})();
|
225
examples/libraries/walkFilters.js
Normal file
225
examples/libraries/walkFilters.js
Normal file
|
@ -0,0 +1,225 @@
|
||||||
|
//
|
||||||
|
// walkFilters.js
|
||||||
|
//
|
||||||
|
// version 1.001
|
||||||
|
//
|
||||||
|
// Created by David Wooldridge, Autumn 2014
|
||||||
|
//
|
||||||
|
// Provides a variety of filters for use by the walk.js script v1.1
|
||||||
|
//
|
||||||
|
// Distributed under the Apache License, Version 2.0.
|
||||||
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
|
//
|
||||||
|
|
||||||
|
AveragingFilter = function(length) {
|
||||||
|
|
||||||
|
//this.name = name;
|
||||||
|
this.pastValues = [];
|
||||||
|
|
||||||
|
for(var i = 0; i < length; i++) {
|
||||||
|
this.pastValues.push(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// single arg is the nextInputValue
|
||||||
|
this.process = function() {
|
||||||
|
|
||||||
|
if (this.pastValues.length === 0 && arguments[0]) {
|
||||||
|
return arguments[0];
|
||||||
|
} else if (arguments[0]) {
|
||||||
|
// apply quick and simple LP filtering
|
||||||
|
this.pastValues.push(arguments[0]);
|
||||||
|
this.pastValues.shift();
|
||||||
|
var nextOutputValue = 0;
|
||||||
|
for (var ea in this.pastValues) nextOutputValue += this.pastValues[ea];
|
||||||
|
return nextOutputValue / this.pastValues.length;
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// 2nd order Butterworth LP filter - calculate coeffs here: http://www-users.cs.york.ac.uk/~fisher/mkfilter/trad.html
|
||||||
|
// provides LP filtering with a more stable frequency / phase response
|
||||||
|
ButterworthFilter = function(cutOff) {
|
||||||
|
|
||||||
|
// cut off frequency = 5Hz
|
||||||
|
this.gain = 20.20612010;
|
||||||
|
this.coeffOne = -0.4775922501;
|
||||||
|
this.coeffTwo = 1.2796324250;
|
||||||
|
|
||||||
|
// initialise the arrays
|
||||||
|
this.xv = [];
|
||||||
|
this.yv = [];
|
||||||
|
for(var i = 0; i < 3; i++) {
|
||||||
|
this.xv.push(0);
|
||||||
|
this.yv.push(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// process values
|
||||||
|
this.process = function(nextInputValue) {
|
||||||
|
|
||||||
|
this.xv[0] = this.xv[1];
|
||||||
|
this.xv[1] = this.xv[2];
|
||||||
|
this.xv[2] = nextInputValue / this.gain;
|
||||||
|
|
||||||
|
this.yv[0] = this.yv[1];
|
||||||
|
this.yv[1] = this.yv[2];
|
||||||
|
this.yv[2] = (this.xv[0] + this.xv[2]) +
|
||||||
|
2 * this.xv[1] +
|
||||||
|
(this.coeffOne * this.yv[0]) +
|
||||||
|
(this.coeffTwo * this.yv[1]);
|
||||||
|
|
||||||
|
return this.yv[2];
|
||||||
|
};
|
||||||
|
}; // end Butterworth filter contructor
|
||||||
|
|
||||||
|
// Add harmonics to a given sine wave to form square, sawtooth or triangle waves
|
||||||
|
// Geometric wave synthesis fundamentals taken from: http://hyperphysics.phy-astr.gsu.edu/hbase/audio/geowv.html
|
||||||
|
WaveSynth = function(waveShape, numHarmonics, smoothing) {
|
||||||
|
|
||||||
|
this.numHarmonics = numHarmonics;
|
||||||
|
this.waveShape = waveShape;
|
||||||
|
this.averagingFilter = new AveragingFilter(smoothing);
|
||||||
|
|
||||||
|
// NB: frequency in radians
|
||||||
|
this.shapeWave = function(frequency) {
|
||||||
|
|
||||||
|
// make some shapes
|
||||||
|
var harmonics = 0;
|
||||||
|
var multiplier = 0;
|
||||||
|
var iterations = this.numHarmonics * 2 + 2;
|
||||||
|
if (this.waveShape === TRIANGLE) {
|
||||||
|
iterations++;
|
||||||
|
}
|
||||||
|
|
||||||
|
for(var n = 2; n < iterations; n++) {
|
||||||
|
|
||||||
|
switch(this.waveShape) {
|
||||||
|
|
||||||
|
case SAWTOOTH: {
|
||||||
|
|
||||||
|
multiplier = 1 / n;
|
||||||
|
harmonics += multiplier * Math.sin(n * frequency);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case TRIANGLE: {
|
||||||
|
|
||||||
|
if (n % 2 === 1) {
|
||||||
|
var mulitplier = 1 / (n * n);
|
||||||
|
// multiply (4n-1)th harmonics by -1
|
||||||
|
if (n === 3 || n === 7 || n === 11 || n === 15) {
|
||||||
|
mulitplier *= -1;
|
||||||
|
}
|
||||||
|
harmonics += mulitplier * Math.sin(n * frequency);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
case SQUARE: {
|
||||||
|
|
||||||
|
if (n % 2 === 1) {
|
||||||
|
multiplier = 1 / n;
|
||||||
|
harmonics += multiplier * Math.sin(n * frequency);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// smooth the result and return
|
||||||
|
return this.averagingFilter.process(harmonics);
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a wave shape by summing pre-calcualted sinusoidal harmonics
|
||||||
|
HarmonicsFilter = function(magnitudes, phaseAngles) {
|
||||||
|
|
||||||
|
this.magnitudes = magnitudes;
|
||||||
|
this.phaseAngles = phaseAngles;
|
||||||
|
|
||||||
|
this.calculate = function(twoPiFT) {
|
||||||
|
|
||||||
|
var harmonics = 0;
|
||||||
|
var numHarmonics = magnitudes.length;
|
||||||
|
|
||||||
|
for(var n = 0; n < numHarmonics; n++) {
|
||||||
|
harmonics += this.magnitudes[n] * Math.cos(n * twoPiFT - this.phaseAngles[n]);
|
||||||
|
}
|
||||||
|
return harmonics;
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
// the main filter object literal
|
||||||
|
filter = (function() {
|
||||||
|
|
||||||
|
// Bezier private functions
|
||||||
|
function _B1(t) { return t * t * t };
|
||||||
|
function _B2(t) { return 3 * t * t * (1 - t) };
|
||||||
|
function _B3(t) { return 3 * t * (1 - t) * (1 - t) };
|
||||||
|
function _B4(t) { return (1 - t) * (1 - t) * (1 - t) };
|
||||||
|
|
||||||
|
return {
|
||||||
|
|
||||||
|
// helper methods
|
||||||
|
degToRad: function(degrees) {
|
||||||
|
|
||||||
|
var convertedValue = degrees * Math.PI / 180;
|
||||||
|
return convertedValue;
|
||||||
|
},
|
||||||
|
|
||||||
|
radToDeg: function(radians) {
|
||||||
|
|
||||||
|
var convertedValue = radians * 180 / Math.PI;
|
||||||
|
return convertedValue;
|
||||||
|
},
|
||||||
|
|
||||||
|
// these filters need instantiating, as they hold arrays of previous values
|
||||||
|
createAveragingFilter: function(length) {
|
||||||
|
|
||||||
|
var newAveragingFilter = new AveragingFilter(length);
|
||||||
|
return newAveragingFilter;
|
||||||
|
},
|
||||||
|
|
||||||
|
createButterworthFilter: function(cutoff) {
|
||||||
|
|
||||||
|
var newButterworthFilter = new ButterworthFilter(cutoff);
|
||||||
|
return newButterworthFilter;
|
||||||
|
},
|
||||||
|
|
||||||
|
createWaveSynth: function(waveShape, numHarmonics, smoothing) {
|
||||||
|
|
||||||
|
var newWaveSynth = new WaveSynth(waveShape, numHarmonics, smoothing);
|
||||||
|
return newWaveSynth;
|
||||||
|
},
|
||||||
|
|
||||||
|
createHarmonicsFilter: function(magnitudes, phaseAngles) {
|
||||||
|
|
||||||
|
var newHarmonicsFilter = new HarmonicsFilter(magnitudes, phaseAngles);
|
||||||
|
return newHarmonicsFilter;
|
||||||
|
},
|
||||||
|
|
||||||
|
|
||||||
|
// the following filters do not need separate instances, as they hold no previous values
|
||||||
|
bezier: function(percent, C1, C2, C3, C4) {
|
||||||
|
|
||||||
|
// Bezier functions for more natural transitions
|
||||||
|
// based on script by Dan Pupius (www.pupius.net) http://13thparallel.com/archive/bezier-curves/
|
||||||
|
var pos = {x: 0, y: 0};
|
||||||
|
pos.x = C1.x * _B1(percent) + C2.x * _B2(percent) + C3.x * _B3(percent) + C4.x * _B4(percent);
|
||||||
|
pos.y = C1.y * _B1(percent) + C2.y * _B2(percent) + C3.y * _B3(percent) + C4.y * _B4(percent);
|
||||||
|
return pos;
|
||||||
|
},
|
||||||
|
|
||||||
|
// simple clipping filter (clips bottom of wave only, special case for hips y-axis skeleton offset)
|
||||||
|
clipTrough: function(inputValue, peak, strength) {
|
||||||
|
|
||||||
|
var outputValue = inputValue * strength;
|
||||||
|
if (outputValue < -peak) {
|
||||||
|
outputValue = -peak;
|
||||||
|
}
|
||||||
|
return outputValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
})();
|
2690
examples/libraries/walkInterface.js
Normal file
2690
examples/libraries/walkInterface.js
Normal file
File diff suppressed because it is too large
Load diff
6177
examples/walk.js
6177
examples/walk.js
File diff suppressed because it is too large
Load diff
|
@ -726,11 +726,11 @@ void Application::paintGL() {
|
||||||
displaySide(*whichCamera);
|
displaySide(*whichCamera);
|
||||||
glPopMatrix();
|
glPopMatrix();
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||||
renderRearViewMirror(_mirrorViewRect);
|
|
||||||
|
|
||||||
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
|
||||||
_rearMirrorTools->render(true);
|
_rearMirrorTools->render(true);
|
||||||
|
|
||||||
|
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||||
|
renderRearViewMirror(_mirrorViewRect);
|
||||||
}
|
}
|
||||||
|
|
||||||
_glowEffect.render();
|
_glowEffect.render();
|
||||||
|
@ -788,7 +788,7 @@ void Application::updateProjectionMatrix(Camera& camera, bool updateViewFrustum)
|
||||||
// Tell our viewFrustum about this change, using the application camera
|
// Tell our viewFrustum about this change, using the application camera
|
||||||
if (updateViewFrustum) {
|
if (updateViewFrustum) {
|
||||||
loadViewFrustum(camera, _viewFrustum);
|
loadViewFrustum(camera, _viewFrustum);
|
||||||
computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
|
_viewFrustum.computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
|
||||||
|
|
||||||
// If we're in Display Frustum mode, then we want to use the slightly adjust near/far clip values of the
|
// If we're in Display Frustum mode, then we want to use the slightly adjust near/far clip values of the
|
||||||
// _viewFrustumOffsetCamera, so that we can see more of the application content in the application's frustum
|
// _viewFrustumOffsetCamera, so that we can see more of the application content in the application's frustum
|
||||||
|
@ -2011,25 +2011,17 @@ void Application::init() {
|
||||||
|
|
||||||
void Application::closeMirrorView() {
|
void Application::closeMirrorView() {
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
Menu::getInstance()->triggerOption(MenuOption::Mirror);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::restoreMirrorView() {
|
void Application::restoreMirrorView() {
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
|
||||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
if (!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||||
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Application::shrinkMirrorView() {
|
void Application::shrinkMirrorView() {
|
||||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
|
||||||
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
|
||||||
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
|
||||||
}
|
}
|
||||||
|
@ -3050,13 +3042,16 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
bool mirrorMode = (whichCamera.getMode() == CAMERA_MODE_MIRROR);
|
bool mirrorMode = (whichCamera.getMode() == CAMERA_MODE_MIRROR);
|
||||||
{
|
{
|
||||||
PerformanceTimer perfTimer("avatars");
|
PerformanceTimer perfTimer("avatars");
|
||||||
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
_avatarManager.renderAvatars(mirrorMode ? Avatar::MIRROR_RENDER_MODE : Avatar::NORMAL_RENDER_MODE,
|
||||||
false, selfAvatarOnly);
|
false, selfAvatarOnly);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
{
|
{
|
||||||
PROFILE_RANGE("DeferredLighting");
|
PROFILE_RANGE("DeferredLighting");
|
||||||
PerformanceTimer perfTimer("lighting");
|
PerformanceTimer perfTimer("lighting");
|
||||||
|
@ -3115,7 +3110,7 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
|
||||||
emit renderingInWorldInterface();
|
emit renderingInWorldInterface();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Wireframe)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::Wireframe)) {
|
||||||
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
|
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
|
||||||
}
|
}
|
||||||
|
@ -4325,8 +4320,6 @@ bool Application::isVSyncOn() const {
|
||||||
if (wglewGetExtension("WGL_EXT_swap_control")) {
|
if (wglewGetExtension("WGL_EXT_swap_control")) {
|
||||||
int swapInterval = wglGetSwapIntervalEXT();
|
int swapInterval = wglGetSwapIntervalEXT();
|
||||||
return (swapInterval > 0);
|
return (swapInterval > 0);
|
||||||
} else {
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
#elif defined(Q_OS_LINUX)
|
#elif defined(Q_OS_LINUX)
|
||||||
// TODO: write the poper code for linux
|
// TODO: write the poper code for linux
|
||||||
|
@ -4337,10 +4330,9 @@ bool Application::isVSyncOn() const {
|
||||||
} else {
|
} else {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
#else
|
|
||||||
return true;
|
|
||||||
#endif
|
#endif
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Application::isVSyncEditable() const {
|
bool Application::isVSyncEditable() const {
|
||||||
|
@ -4355,7 +4347,6 @@ bool Application::isVSyncEditable() const {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
#else
|
|
||||||
#endif
|
#endif
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,6 +97,9 @@ Audio::Audio(QObject* parent) :
|
||||||
_muted(false),
|
_muted(false),
|
||||||
_reverb(false),
|
_reverb(false),
|
||||||
_reverbOptions(&_scriptReverbOptions),
|
_reverbOptions(&_scriptReverbOptions),
|
||||||
|
_gverb(NULL),
|
||||||
|
_iconColor(1.0f),
|
||||||
|
_iconPulseTimeReference(usecTimestampNow()),
|
||||||
_processSpatialAudio(false),
|
_processSpatialAudio(false),
|
||||||
_spatialAudioStart(0),
|
_spatialAudioStart(0),
|
||||||
_spatialAudioFinish(0),
|
_spatialAudioFinish(0),
|
||||||
|
@ -544,7 +547,7 @@ void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioF
|
||||||
gverb_do(_gverb, value, &lValue, &rValue);
|
gverb_do(_gverb, value, &lValue, &rValue);
|
||||||
|
|
||||||
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
|
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
|
||||||
for (unsigned int j = sample; j < sample + audioFormat.channelCount(); j++) {
|
for (int j = sample; j < sample + audioFormat.channelCount(); j++) {
|
||||||
if (j == sample) {
|
if (j == sample) {
|
||||||
// left channel
|
// left channel
|
||||||
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
|
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
|
||||||
|
@ -1342,8 +1345,11 @@ void Audio::handleAudioByteArray(const QByteArray& audioByteArray, const AudioIn
|
||||||
QAudioOutput* localSoundOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName), localFormat, this);
|
QAudioOutput* localSoundOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName), localFormat, this);
|
||||||
|
|
||||||
QIODevice* localIODevice = localSoundOutput->start();
|
QIODevice* localIODevice = localSoundOutput->start();
|
||||||
qDebug() << "Writing" << audioByteArray.size() << "to" << localIODevice;
|
if (localIODevice) {
|
||||||
localIODevice->write(audioByteArray);
|
localIODevice->write(audioByteArray);
|
||||||
|
} else {
|
||||||
|
qDebug() << "Unable to handle audio byte array. Error:" << localSoundOutput->error();
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
qDebug() << "Audio::handleAudioByteArray called with an empty byte array. Sound is likely still downloading.";
|
qDebug() << "Audio::handleAudioByteArray called with an empty byte array. Sound is likely still downloading.";
|
||||||
}
|
}
|
||||||
|
@ -1390,23 +1396,37 @@ void Audio::renderToolBox(int x, int y, bool boxed) {
|
||||||
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
_iconBounds = QRect(x, y, MUTE_ICON_SIZE, MUTE_ICON_SIZE);
|
||||||
if (!_muted) {
|
if (!_muted) {
|
||||||
glBindTexture(GL_TEXTURE_2D, _micTextureId);
|
glBindTexture(GL_TEXTURE_2D, _micTextureId);
|
||||||
|
_iconColor = 1.0f;
|
||||||
} else {
|
} else {
|
||||||
glBindTexture(GL_TEXTURE_2D, _muteTextureId);
|
glBindTexture(GL_TEXTURE_2D, _muteTextureId);
|
||||||
|
|
||||||
|
// Make muted icon pulsate
|
||||||
|
static const float PULSE_MIN = 0.4f;
|
||||||
|
static const float PULSE_MAX = 1.0f;
|
||||||
|
static const float PULSE_FREQUENCY = 1.0f; // in Hz
|
||||||
|
qint64 now = usecTimestampNow();
|
||||||
|
if (now - _iconPulseTimeReference > USECS_PER_SECOND) {
|
||||||
|
// Prevents t from getting too big, which would diminish glm::cos precision
|
||||||
|
_iconPulseTimeReference = now - ((now - _iconPulseTimeReference) % USECS_PER_SECOND);
|
||||||
|
}
|
||||||
|
float t = (float)(now - _iconPulseTimeReference) / (float)USECS_PER_SECOND;
|
||||||
|
float pulseFactor = (glm::cos(t * PULSE_FREQUENCY * 2.0f * PI) + 1.0f) / 2.0f;
|
||||||
|
_iconColor = PULSE_MIN + (PULSE_MAX - PULSE_MIN) * pulseFactor;
|
||||||
}
|
}
|
||||||
|
|
||||||
glColor3f(1,1,1);
|
glColor3f(_iconColor, _iconColor, _iconColor);
|
||||||
glBegin(GL_QUADS);
|
glBegin(GL_QUADS);
|
||||||
|
|
||||||
glTexCoord2f(1, 1);
|
glTexCoord2f(1.0f, 1.0f);
|
||||||
glVertex2f(_iconBounds.left(), _iconBounds.top());
|
glVertex2f(_iconBounds.left(), _iconBounds.top());
|
||||||
|
|
||||||
glTexCoord2f(0, 1);
|
glTexCoord2f(0.0f, 1.0f);
|
||||||
glVertex2f(_iconBounds.right(), _iconBounds.top());
|
glVertex2f(_iconBounds.right(), _iconBounds.top());
|
||||||
|
|
||||||
glTexCoord2f(0, 0);
|
glTexCoord2f(0.0f, 0.0f);
|
||||||
glVertex2f(_iconBounds.right(), _iconBounds.bottom());
|
glVertex2f(_iconBounds.right(), _iconBounds.bottom());
|
||||||
|
|
||||||
glTexCoord2f(1, 0);
|
glTexCoord2f(1.0f, 0.0f);
|
||||||
glVertex2f(_iconBounds.left(), _iconBounds.bottom());
|
glVertex2f(_iconBounds.left(), _iconBounds.bottom());
|
||||||
|
|
||||||
glEnd();
|
glEnd();
|
||||||
|
|
|
@ -248,11 +248,13 @@ private:
|
||||||
AudioEffectOptions _scriptReverbOptions;
|
AudioEffectOptions _scriptReverbOptions;
|
||||||
AudioEffectOptions _zoneReverbOptions;
|
AudioEffectOptions _zoneReverbOptions;
|
||||||
AudioEffectOptions* _reverbOptions;
|
AudioEffectOptions* _reverbOptions;
|
||||||
ty_gverb *_gverb;
|
ty_gverb* _gverb;
|
||||||
GLuint _micTextureId;
|
GLuint _micTextureId;
|
||||||
GLuint _muteTextureId;
|
GLuint _muteTextureId;
|
||||||
GLuint _boxTextureId;
|
GLuint _boxTextureId;
|
||||||
QRect _iconBounds;
|
QRect _iconBounds;
|
||||||
|
float _iconColor;
|
||||||
|
qint64 _iconPulseTimeReference;
|
||||||
|
|
||||||
/// Audio callback in class context.
|
/// Audio callback in class context.
|
||||||
inline void performIO(int16_t* inputLeft, int16_t* outputLeft, int16_t* outputRight);
|
inline void performIO(int16_t* inputLeft, int16_t* outputLeft, int16_t* outputRight);
|
||||||
|
|
|
@ -386,7 +386,7 @@ Menu::Menu() :
|
||||||
|
|
||||||
#if defined(Q_OS_MAC)
|
#if defined(Q_OS_MAC)
|
||||||
#else
|
#else
|
||||||
QAction* vsyncAction = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
|
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -49,7 +49,7 @@ const float PITCH_SPEED = 100.0f; // degrees/sec
|
||||||
const float COLLISION_RADIUS_SCALAR = 1.2f; // pertains to avatar-to-avatar collisions
|
const float COLLISION_RADIUS_SCALAR = 1.2f; // pertains to avatar-to-avatar collisions
|
||||||
const float COLLISION_RADIUS_SCALE = 0.125f;
|
const float COLLISION_RADIUS_SCALE = 0.125f;
|
||||||
|
|
||||||
const float MAX_WALKING_SPEED = 4.5f;
|
const float MAX_WALKING_SPEED = 2.5f; // human walking speed
|
||||||
const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // keyboard motor gets additive boost below this speed
|
const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // keyboard motor gets additive boost below this speed
|
||||||
const float MIN_AVATAR_SPEED = 0.05f; // speed is set to zero below this
|
const float MIN_AVATAR_SPEED = 0.05f; // speed is set to zero below this
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ void Batch::setInputStream(Slot startChannel, const BufferStream& stream) {
|
||||||
const Buffers& buffers = stream.getBuffers();
|
const Buffers& buffers = stream.getBuffers();
|
||||||
const Offsets& offsets = stream.getOffsets();
|
const Offsets& offsets = stream.getOffsets();
|
||||||
const Offsets& strides = stream.getStrides();
|
const Offsets& strides = stream.getStrides();
|
||||||
for (int i = 0; i < buffers.size(); i++) {
|
for (unsigned int i = 0; i < buffers.size(); i++) {
|
||||||
setInputBuffer(startChannel + i, buffers[i], offsets[i], strides[i]);
|
setInputBuffer(startChannel + i, buffers[i], offsets[i], strides[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,15 +33,15 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glEnable),
|
(&::gpu::GLBackend::do_glEnable),
|
||||||
(&::gpu::GLBackend::do_glDisable),
|
(&::gpu::GLBackend::do_glDisable),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glEnableClientState),
|
(&::gpu::GLBackend::do_glEnableClientState),
|
||||||
(&::gpu::GLBackend::do_glDisableClientState),
|
(&::gpu::GLBackend::do_glDisableClientState),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glCullFace),
|
(&::gpu::GLBackend::do_glCullFace),
|
||||||
(&::gpu::GLBackend::do_glAlphaFunc),
|
(&::gpu::GLBackend::do_glAlphaFunc),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glDepthFunc),
|
(&::gpu::GLBackend::do_glDepthFunc),
|
||||||
(&::gpu::GLBackend::do_glDepthMask),
|
(&::gpu::GLBackend::do_glDepthMask),
|
||||||
(&::gpu::GLBackend::do_glDepthRange),
|
(&::gpu::GLBackend::do_glDepthRange),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glBindBuffer),
|
(&::gpu::GLBackend::do_glBindBuffer),
|
||||||
|
@ -59,18 +59,18 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
||||||
(&::gpu::GLBackend::do_glPushMatrix),
|
(&::gpu::GLBackend::do_glPushMatrix),
|
||||||
(&::gpu::GLBackend::do_glPopMatrix),
|
(&::gpu::GLBackend::do_glPopMatrix),
|
||||||
(&::gpu::GLBackend::do_glMultMatrixf),
|
(&::gpu::GLBackend::do_glMultMatrixf),
|
||||||
(&::gpu::GLBackend::do_glLoadMatrixf),
|
(&::gpu::GLBackend::do_glLoadMatrixf),
|
||||||
(&::gpu::GLBackend::do_glLoadIdentity),
|
(&::gpu::GLBackend::do_glLoadIdentity),
|
||||||
(&::gpu::GLBackend::do_glRotatef),
|
(&::gpu::GLBackend::do_glRotatef),
|
||||||
(&::gpu::GLBackend::do_glScalef),
|
(&::gpu::GLBackend::do_glScalef),
|
||||||
(&::gpu::GLBackend::do_glTranslatef),
|
(&::gpu::GLBackend::do_glTranslatef),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glDrawArrays),
|
(&::gpu::GLBackend::do_glDrawArrays),
|
||||||
(&::gpu::GLBackend::do_glDrawRangeElements),
|
(&::gpu::GLBackend::do_glDrawRangeElements),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glColorPointer),
|
(&::gpu::GLBackend::do_glColorPointer),
|
||||||
(&::gpu::GLBackend::do_glNormalPointer),
|
(&::gpu::GLBackend::do_glNormalPointer),
|
||||||
(&::gpu::GLBackend::do_glTexCoordPointer),
|
(&::gpu::GLBackend::do_glTexCoordPointer),
|
||||||
(&::gpu::GLBackend::do_glVertexPointer),
|
(&::gpu::GLBackend::do_glVertexPointer),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glVertexAttribPointer),
|
(&::gpu::GLBackend::do_glVertexAttribPointer),
|
||||||
|
@ -79,7 +79,7 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glColor4f),
|
(&::gpu::GLBackend::do_glColor4f),
|
||||||
|
|
||||||
(&::gpu::GLBackend::do_glMaterialf),
|
(&::gpu::GLBackend::do_glMaterialf),
|
||||||
(&::gpu::GLBackend::do_glMaterialfv),
|
(&::gpu::GLBackend::do_glMaterialfv),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -113,20 +113,16 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
|
||||||
|
|
||||||
|
|
||||||
GLBackend::GLBackend() :
|
GLBackend::GLBackend() :
|
||||||
_inputFormat(0),
|
|
||||||
_inputAttributeActivation(0),
|
|
||||||
_needInputFormatUpdate(true),
|
_needInputFormatUpdate(true),
|
||||||
|
_inputFormat(0),
|
||||||
_inputBuffersState(0),
|
_inputBuffersState(0),
|
||||||
_inputBuffers(_inputBuffersState.size(), BufferPointer(0)),
|
_inputBuffers(_inputBuffersState.size(), BufferPointer(0)),
|
||||||
_inputBufferOffsets(_inputBuffersState.size(), 0),
|
_inputBufferOffsets(_inputBuffersState.size(), 0),
|
||||||
_inputBufferStrides(_inputBuffersState.size(), 0),
|
_inputBufferStrides(_inputBuffersState.size(), 0),
|
||||||
|
|
||||||
_indexBuffer(0),
|
_indexBuffer(0),
|
||||||
_indexBufferOffset(0),
|
_indexBufferOffset(0),
|
||||||
|
_inputAttributeActivation(0),
|
||||||
_transform()
|
_transform()
|
||||||
|
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -142,7 +138,7 @@ void GLBackend::renderBatch(Batch& batch) {
|
||||||
|
|
||||||
GLBackend backend;
|
GLBackend backend;
|
||||||
|
|
||||||
for (int i = 0; i < numCommands; i++) {
|
for (unsigned int i = 0; i < numCommands; i++) {
|
||||||
CommandCall call = _commandCalls[(*command)];
|
CommandCall call = _commandCalls[(*command)];
|
||||||
(backend.*(call))(batch, *offset);
|
(backend.*(call))(batch, *offset);
|
||||||
command++;
|
command++;
|
||||||
|
@ -209,7 +205,7 @@ void GLBackend::do_drawIndexed(Batch& batch, uint32 paramOffset) {
|
||||||
|
|
||||||
GLenum glType = _elementTypeToGLType[_indexBufferType];
|
GLenum glType = _elementTypeToGLType[_indexBufferType];
|
||||||
|
|
||||||
glDrawElements(mode, numIndices, glType, (GLvoid*)(startIndex + _indexBufferOffset));
|
glDrawElements(mode, numIndices, glType, reinterpret_cast<GLvoid*>(startIndex + _indexBufferOffset));
|
||||||
CHECK_GL_ERROR();
|
CHECK_GL_ERROR();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -271,7 +267,7 @@ void GLBackend::updateInput() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Manage Activation what was and what is expected now
|
// Manage Activation what was and what is expected now
|
||||||
for (int i = 0; i < newActivation.size(); i++) {
|
for (unsigned int i = 0; i < newActivation.size(); i++) {
|
||||||
bool newState = newActivation[i];
|
bool newState = newActivation[i];
|
||||||
if (newState != _inputAttributeActivation[i]) {
|
if (newState != _inputAttributeActivation[i]) {
|
||||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||||
|
@ -320,7 +316,7 @@ void GLBackend::updateInput() {
|
||||||
CHECK_GL_ERROR();
|
CHECK_GL_ERROR();
|
||||||
_inputBuffersState[bufferNum] = false;
|
_inputBuffersState[bufferNum] = false;
|
||||||
|
|
||||||
for (int i = 0; i < channel._slots.size(); i++) {
|
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||||
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
|
||||||
GLuint slot = attrib._slot;
|
GLuint slot = attrib._slot;
|
||||||
GLuint count = attrib._element.getDimensionCount();
|
GLuint count = attrib._element.getDimensionCount();
|
||||||
|
@ -331,16 +327,16 @@ void GLBackend::updateInput() {
|
||||||
if (slot < NUM_CLASSIC_ATTRIBS) {
|
if (slot < NUM_CLASSIC_ATTRIBS) {
|
||||||
switch (slot) {
|
switch (slot) {
|
||||||
case Stream::POSITION:
|
case Stream::POSITION:
|
||||||
glVertexPointer(count, type, stride, (GLvoid*)pointer);
|
glVertexPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||||
break;
|
break;
|
||||||
case Stream::NORMAL:
|
case Stream::NORMAL:
|
||||||
glNormalPointer(type, stride, (GLvoid*)pointer);
|
glNormalPointer(type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||||
break;
|
break;
|
||||||
case Stream::COLOR:
|
case Stream::COLOR:
|
||||||
glColorPointer(count, type, stride, (GLvoid*)pointer);
|
glColorPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||||
break;
|
break;
|
||||||
case Stream::TEXCOORD:
|
case Stream::TEXCOORD:
|
||||||
glTexCoordPointer(count, type, stride, (GLvoid*)pointer);
|
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||||
break;
|
break;
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
|
@ -348,7 +344,8 @@ void GLBackend::updateInput() {
|
||||||
{
|
{
|
||||||
#endif
|
#endif
|
||||||
GLboolean isNormalized = attrib._element.isNormalized();
|
GLboolean isNormalized = attrib._element.isNormalized();
|
||||||
glVertexAttribPointer(slot, count, type, isNormalized, stride, (GLvoid*)pointer);
|
glVertexAttribPointer(slot, count, type, isNormalized, stride,
|
||||||
|
reinterpret_cast<GLvoid*>(pointer));
|
||||||
}
|
}
|
||||||
CHECK_GL_ERROR();
|
CHECK_GL_ERROR();
|
||||||
}
|
}
|
||||||
|
@ -475,21 +472,24 @@ void GLBackend::updateTransform() {
|
||||||
glMatrixMode(GL_MODELVIEW);
|
glMatrixMode(GL_MODELVIEW);
|
||||||
_transform._lastMode = GL_MODELVIEW;
|
_transform._lastMode = GL_MODELVIEW;
|
||||||
}
|
}
|
||||||
|
Transform::Mat4 modelView;
|
||||||
if (!_transform._view.isNull()) {
|
if (!_transform._view.isNull()) {
|
||||||
Transform mvx;
|
Transform mvx;
|
||||||
Transform::mult(mvx, (*_transform._view), (*_transform._model));
|
Transform::mult(mvx, (*_transform._view), (*_transform._model));
|
||||||
Transform::Mat4 mv = mvx.getMatrix();
|
mvx.getMatrix(modelView);
|
||||||
glLoadMatrixf((const GLfloat*) &mv[0]);
|
|
||||||
} else {
|
} else {
|
||||||
glLoadMatrixf((const GLfloat*) &_transform._model->getMatrix());
|
_transform._model->getMatrix(modelView);
|
||||||
}
|
}
|
||||||
|
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
|
||||||
} else {
|
} else {
|
||||||
if (!_transform._view.isNull()) {
|
if (!_transform._view.isNull()) {
|
||||||
if (_transform._lastMode != GL_MODELVIEW) {
|
if (_transform._lastMode != GL_MODELVIEW) {
|
||||||
glMatrixMode(GL_MODELVIEW);
|
glMatrixMode(GL_MODELVIEW);
|
||||||
_transform._lastMode = GL_MODELVIEW;
|
_transform._lastMode = GL_MODELVIEW;
|
||||||
}
|
}
|
||||||
glLoadMatrixf((const GLfloat*) & _transform._view->getMatrix());
|
Transform::Mat4 modelView;
|
||||||
|
_transform._model->getMatrix(modelView);
|
||||||
|
glLoadMatrixf(reinterpret_cast< const GLfloat* >(&modelView));
|
||||||
} else {
|
} else {
|
||||||
// glLoadIdentity();
|
// glLoadIdentity();
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,8 @@
|
||||||
// Distributed under the Apache License, Version 2.0.
|
// Distributed under the Apache License, Version 2.0.
|
||||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||||
//
|
//
|
||||||
|
|
||||||
|
#include "Context.h"
|
||||||
#include "Resource.h"
|
#include "Resource.h"
|
||||||
|
|
||||||
#include <QDebug>
|
#include <QDebug>
|
||||||
|
|
|
@ -479,9 +479,7 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
|
||||||
float bestDistance = std::numeric_limits<float>::max();
|
float bestDistance = std::numeric_limits<float>::max();
|
||||||
float distanceToSubMesh;
|
float distanceToSubMesh;
|
||||||
BoxFace subMeshFace;
|
BoxFace subMeshFace;
|
||||||
BoxFace bestSubMeshFace;
|
|
||||||
int subMeshIndex = 0;
|
int subMeshIndex = 0;
|
||||||
int bestSubMeshIndex = -1;
|
|
||||||
|
|
||||||
// If we hit the models box, then consider the submeshes...
|
// If we hit the models box, then consider the submeshes...
|
||||||
foreach(const AABox& subMeshBox, _calculatedMeshBoxes) {
|
foreach(const AABox& subMeshBox, _calculatedMeshBoxes) {
|
||||||
|
@ -489,10 +487,9 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
|
||||||
|
|
||||||
if (subMeshBox.findRayIntersection(origin, direction, distanceToSubMesh, subMeshFace)) {
|
if (subMeshBox.findRayIntersection(origin, direction, distanceToSubMesh, subMeshFace)) {
|
||||||
if (distanceToSubMesh < bestDistance) {
|
if (distanceToSubMesh < bestDistance) {
|
||||||
bestSubMeshIndex = subMeshIndex;
|
|
||||||
bestDistance = distanceToSubMesh;
|
bestDistance = distanceToSubMesh;
|
||||||
bestSubMeshFace = subMeshFace;
|
|
||||||
intersectedSomething = true;
|
intersectedSomething = true;
|
||||||
|
face = subMeshFace;
|
||||||
extraInfo = geometry.getModelNameOfMesh(subMeshIndex);
|
extraInfo = geometry.getModelNameOfMesh(subMeshIndex);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -562,16 +559,7 @@ bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
|
||||||
if (_transforms.empty()) {
|
if (_transforms.empty()) {
|
||||||
_transforms.push_back(gpu::TransformPointer(new gpu::Transform()));
|
_transforms.push_back(gpu::TransformPointer(new gpu::Transform()));
|
||||||
}
|
}
|
||||||
_transforms[0]->evalFromRawMatrix(Application::getInstance()->getUntranslatedViewMatrix());
|
(*_transforms[0]) = gpu::Transform((*Application::getInstance()->getViewTransform()));
|
||||||
|
|
||||||
gpu::TransformPointer currentView(Application::getInstance()->getViewTransform());
|
|
||||||
currentView->getMatrix();
|
|
||||||
|
|
||||||
gpu::Transform::Mat4 glview = Application::getInstance()->getUntranslatedViewMatrix();
|
|
||||||
|
|
||||||
_transforms[0]->setTranslation(currentView->getTranslation());
|
|
||||||
_transforms[0]->setRotation(currentView->getRotation());
|
|
||||||
_transforms[0]->setScale(currentView->getScale());
|
|
||||||
_transforms[0]->postTranslate(_translation);
|
_transforms[0]->postTranslate(_translation);
|
||||||
|
|
||||||
batch.setViewTransform(_transforms[0]);
|
batch.setViewTransform(_transforms[0]);
|
||||||
|
@ -1872,14 +1860,6 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
|
||||||
}
|
}
|
||||||
|
|
||||||
GLBATCH(glPushMatrix)();
|
GLBATCH(glPushMatrix)();
|
||||||
// Application::getInstance()->loadTranslatedViewMatrix(_translation);
|
|
||||||
// GLBATCH(glLoadMatrixf)((const GLfloat*)&Application::getInstance()->getUntranslatedViewMatrix());
|
|
||||||
|
|
||||||
glm::vec3 viewMatTranslation = Application::getInstance()->getViewMatrixTranslation();
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// GLBATCH(glTranslatef)(_translation.x + viewMatTranslation.x, _translation.y + viewMatTranslation.y, _translation.z + viewMatTranslation.z);
|
|
||||||
|
|
||||||
const MeshState& state = _meshStates.at(i);
|
const MeshState& state = _meshStates.at(i);
|
||||||
if (state.clusterMatrices.size() > 1) {
|
if (state.clusterMatrices.size() > 1) {
|
||||||
|
@ -1889,7 +1869,6 @@ int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, fl
|
||||||
gpu::TransformPointer modelTransform(new gpu::Transform());
|
gpu::TransformPointer modelTransform(new gpu::Transform());
|
||||||
batch.setModelTransform(modelTransform);
|
batch.setModelTransform(modelTransform);
|
||||||
} else {
|
} else {
|
||||||
// GLBATCH(glMultMatrixf)((const GLfloat*)&state.clusterMatrices[0]);
|
|
||||||
|
|
||||||
gpu::TransformPointer modelTransform(new gpu::Transform(state.clusterMatrices[0]));
|
gpu::TransformPointer modelTransform(new gpu::Transform(state.clusterMatrices[0]));
|
||||||
batch.setModelTransform(modelTransform);
|
batch.setModelTransform(modelTransform);
|
||||||
|
|
|
@ -984,7 +984,9 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_INSET + AUDIO_METER_GAP;
|
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_INSET + AUDIO_METER_GAP;
|
||||||
|
|
||||||
int audioMeterY;
|
int audioMeterY;
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
bool boxed = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) &&
|
||||||
|
!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
|
||||||
|
if (boxed) {
|
||||||
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||||
} else {
|
} else {
|
||||||
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
|
||||||
|
@ -1022,9 +1024,7 @@ void ApplicationOverlay::renderAudioMeter() {
|
||||||
renderCollisionOverlay(glWidget->width(), glWidget->height(), magnitude, 1.0f);
|
renderCollisionOverlay(glWidget->width(), glWidget->height(), magnitude, 1.0f);
|
||||||
}
|
}
|
||||||
|
|
||||||
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP,
|
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
|
||||||
audioMeterY,
|
|
||||||
Menu::getInstance()->isOptionChecked(MenuOption::Mirror));
|
|
||||||
|
|
||||||
audio->renderScope(glWidget->width(), glWidget->height());
|
audio->renderScope(glWidget->width(), glWidget->height());
|
||||||
|
|
||||||
|
|
|
@ -156,6 +156,9 @@ void RearMirrorTools::displayIcon(QRect bounds, QRect iconBounds, GLuint texture
|
||||||
}
|
}
|
||||||
glEnd();
|
glEnd();
|
||||||
glPopMatrix();
|
glPopMatrix();
|
||||||
|
|
||||||
|
glMatrixMode(GL_MODELVIEW);
|
||||||
|
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
glDisable(GL_TEXTURE_2D);
|
glDisable(GL_TEXTURE_2D);
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ int TextRenderer::draw(int x, int y, const char* str) {
|
||||||
leftBottom.x, rightTop.y, ls, tt, };
|
leftBottom.x, rightTop.y, ls, tt, };
|
||||||
|
|
||||||
const int NUM_COLOR_SCALARS_PER_GLYPH = 4;
|
const int NUM_COLOR_SCALARS_PER_GLYPH = 4;
|
||||||
unsigned int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
|
int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
|
||||||
|
|
||||||
gpu::Buffer::Size offset = sizeof(vertexBuffer) * _numGlyphsBatched;
|
gpu::Buffer::Size offset = sizeof(vertexBuffer) * _numGlyphsBatched;
|
||||||
gpu::Buffer::Size colorOffset = sizeof(colorBuffer) * _numGlyphsBatched;
|
gpu::Buffer::Size colorOffset = sizeof(colorBuffer) * _numGlyphsBatched;
|
||||||
|
@ -181,9 +181,9 @@ TextRenderer::TextRenderer(const Properties& properties) :
|
||||||
_color(properties.color),
|
_color(properties.color),
|
||||||
_glyphsBuffer(new gpu::Buffer()),
|
_glyphsBuffer(new gpu::Buffer()),
|
||||||
_glyphsColorBuffer(new gpu::Buffer()),
|
_glyphsColorBuffer(new gpu::Buffer()),
|
||||||
_numGlyphsBatched(0),
|
|
||||||
_glyphsStreamFormat(new gpu::Stream::Format()),
|
_glyphsStreamFormat(new gpu::Stream::Format()),
|
||||||
_glyphsStream(new gpu::BufferStream())
|
_glyphsStream(new gpu::BufferStream()),
|
||||||
|
_numGlyphsBatched(0)
|
||||||
{
|
{
|
||||||
_glyphsStreamFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::POS_XYZ), 0);
|
_glyphsStreamFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::POS_XYZ), 0);
|
||||||
const int NUM_POS_COORDS = 2;
|
const int NUM_POS_COORDS = 2;
|
||||||
|
|
|
@ -297,7 +297,7 @@ void Player::play() {
|
||||||
_injector->setOptions(_options);
|
_injector->setOptions(_options);
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::setCurrentFrame(unsigned int currentFrame) {
|
void Player::setCurrentFrame(int currentFrame) {
|
||||||
if (_recording && currentFrame >= _recording->getFrameNumber()) {
|
if (_recording && currentFrame >= _recording->getFrameNumber()) {
|
||||||
stopPlaying();
|
stopPlaying();
|
||||||
return;
|
return;
|
||||||
|
@ -314,7 +314,7 @@ void Player::setCurrentFrame(unsigned int currentFrame) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Player::setCurrentTime(unsigned int currentTime) {
|
void Player::setCurrentTime(int currentTime) {
|
||||||
if (currentTime >= _recording->getLength()) {
|
if (currentTime >= _recording->getLength()) {
|
||||||
stopPlaying();
|
stopPlaying();
|
||||||
return;
|
return;
|
||||||
|
@ -393,7 +393,7 @@ bool Player::computeCurrentFrame() {
|
||||||
_currentFrame = 0;
|
_currentFrame = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
quint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
|
qint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
|
||||||
while(_currentFrame >= 0 &&
|
while(_currentFrame >= 0 &&
|
||||||
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
|
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
|
||||||
--_currentFrame;
|
--_currentFrame;
|
||||||
|
|
|
@ -44,8 +44,8 @@ public slots:
|
||||||
void loadRecording(RecordingPointer recording);
|
void loadRecording(RecordingPointer recording);
|
||||||
void play();
|
void play();
|
||||||
|
|
||||||
void setCurrentFrame(unsigned int currentFrame);
|
void setCurrentFrame(int currentFrame);
|
||||||
void setCurrentTime(unsigned int currentTime);
|
void setCurrentTime(int currentTime);
|
||||||
|
|
||||||
void setVolume(float volume);
|
void setVolume(float volume);
|
||||||
void setAudioOffset(int audioOffset);
|
void setAudioOffset(int audioOffset);
|
||||||
|
@ -87,4 +87,4 @@ private:
|
||||||
bool _useSkeletonURL;
|
bool _useSkeletonURL;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_Player_h
|
#endif // hifi_Player_h
|
||||||
|
|
|
@ -383,8 +383,8 @@ private:
|
||||||
// NOTE: The following are pseudo client only properties. They are only used in clients which can access
|
// NOTE: The following are pseudo client only properties. They are only used in clients which can access
|
||||||
// properties of model geometry. But these properties are not serialized like other properties.
|
// properties of model geometry. But these properties are not serialized like other properties.
|
||||||
QVector<SittingPoint> _sittingPoints;
|
QVector<SittingPoint> _sittingPoints;
|
||||||
glm::vec3 _naturalDimensions;
|
|
||||||
QStringList _textureNames;
|
QStringList _textureNames;
|
||||||
|
glm::vec3 _naturalDimensions;
|
||||||
};
|
};
|
||||||
Q_DECLARE_METATYPE(EntityItemProperties);
|
Q_DECLARE_METATYPE(EntityItemProperties);
|
||||||
QScriptValue EntityItemPropertiesToScriptValue(QScriptEngine* engine, const EntityItemProperties& properties);
|
QScriptValue EntityItemPropertiesToScriptValue(QScriptEngine* engine, const EntityItemProperties& properties);
|
||||||
|
|
|
@ -547,11 +547,12 @@ void Octree::deleteOctalCodeFromTreeRecursion(OctreeElement* element, void* extr
|
||||||
|
|
||||||
void Octree::eraseAllOctreeElements(bool createNewRoot) {
|
void Octree::eraseAllOctreeElements(bool createNewRoot) {
|
||||||
delete _rootElement; // this will recurse and delete all children
|
delete _rootElement; // this will recurse and delete all children
|
||||||
|
_rootElement = NULL;
|
||||||
|
|
||||||
if (createNewRoot) {
|
if (createNewRoot) {
|
||||||
_rootElement = createNewElement();
|
_rootElement = createNewElement();
|
||||||
} else {
|
|
||||||
_rootElement = NULL;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_isDirty = true;
|
_isDirty = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,27 +12,11 @@
|
||||||
#include "Transform.h"
|
#include "Transform.h"
|
||||||
|
|
||||||
|
|
||||||
Transform::Transform() :
|
void Transform::evalRotationScale(Quat& rotation, Vec3& scale, const Mat3& rotationScaleMatrix) {
|
||||||
_translation(0),
|
|
||||||
_rotation(1.f, 0, 0, 0),
|
|
||||||
_scale(1.f),
|
|
||||||
_flags(1) // invalid cache
|
|
||||||
{
|
|
||||||
}
|
|
||||||
Transform::Transform(const Mat4& raw) {
|
|
||||||
evalFromRawMatrix(raw);
|
|
||||||
}
|
|
||||||
|
|
||||||
Transform::Mat4& Transform::evalRelativeTransform( Mat4& result, const Vec3& origin) {
|
|
||||||
updateCache();
|
|
||||||
result = _matrix;
|
|
||||||
result[3] = Vec4(_translation - origin, 1.f);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Transform::evalRotationScale(const Mat3& rotationScaleMatrix) {
|
|
||||||
const float ACCURACY_THREASHOLD = 0.00001f;
|
const float ACCURACY_THREASHOLD = 0.00001f;
|
||||||
|
|
||||||
|
// Following technique taken from:
|
||||||
|
// http://callumhay.blogspot.com/2010/10/decomposing-affine-transforms.html
|
||||||
// Extract the rotation component - this is done using polar decompostion, where
|
// Extract the rotation component - this is done using polar decompostion, where
|
||||||
// we successively average the matrix with its inverse transpose until there is
|
// we successively average the matrix with its inverse transpose until there is
|
||||||
// no/a very small difference between successive averages
|
// no/a very small difference between successive averages
|
||||||
|
@ -67,8 +51,7 @@ void Transform::evalRotationScale(const Mat3& rotationScaleMatrix) {
|
||||||
// extract scale of the matrix as the length of each axis
|
// extract scale of the matrix as the length of each axis
|
||||||
Mat3 scaleMat = glm::inverse(rotationMat) * rotationScaleMatrix;
|
Mat3 scaleMat = glm::inverse(rotationMat) * rotationScaleMatrix;
|
||||||
|
|
||||||
Vec3 scale2(glm::length(rotationScaleMatrix[0]), glm::length(rotationScaleMatrix[1]), glm::length(rotationScaleMatrix[2]));
|
scale = Vec3(scaleMat[0][0], scaleMat[1][1], scaleMat[2][2]);
|
||||||
Vec3 scale(scaleMat[0][0], scaleMat[1][1], scaleMat[2][2]);
|
|
||||||
if (scale.x < ACCURACY_THREASHOLD) scale.x = ACCURACY_THREASHOLD;
|
if (scale.x < ACCURACY_THREASHOLD) scale.x = ACCURACY_THREASHOLD;
|
||||||
if (scale.y < ACCURACY_THREASHOLD) scale.y = ACCURACY_THREASHOLD;
|
if (scale.y < ACCURACY_THREASHOLD) scale.y = ACCURACY_THREASHOLD;
|
||||||
if (scale.z < ACCURACY_THREASHOLD) scale.z = ACCURACY_THREASHOLD;
|
if (scale.z < ACCURACY_THREASHOLD) scale.z = ACCURACY_THREASHOLD;
|
||||||
|
@ -85,34 +68,15 @@ void Transform::evalRotationScale(const Mat3& rotationScaleMatrix) {
|
||||||
float determinant = glm::determinant(matRot);
|
float determinant = glm::determinant(matRot);
|
||||||
if (determinant < 0.f) {
|
if (determinant < 0.f) {
|
||||||
scale.x = -scale.x;
|
scale.x = -scale.x;
|
||||||
// matRot[0] *= -1.f;
|
matRot[0] *= -1.f;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Beware: even though the matRot is supposed to be normalized at that point,
|
// Beware: even though the matRot is supposed to be normalized at that point,
|
||||||
// glm::quat_cast doesn't always return a normalized quaternion...
|
// glm::quat_cast doesn't always return a normalized quaternion...
|
||||||
setRotation(glm::normalize(glm::quat_cast(matRot)));
|
rotation = glm::normalize(glm::quat_cast(matRot));
|
||||||
|
|
||||||
// and assign the scale
|
|
||||||
setScale(scale);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Transform::evalFromRawMatrix(const Mat4& matrix) {
|
|
||||||
// for now works only in the case of TRS transformation
|
|
||||||
if ((matrix[0][3] == 0) && (matrix[1][3] == 0) && (matrix[2][3] == 0) && (matrix[3][3] == 1.f)) {
|
|
||||||
setTranslation(Vec3(matrix[3]));
|
|
||||||
evalRotationScale(Mat3(matrix));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Transform& Transform::evalInverseTranspose(Transform& result) {
|
|
||||||
result.setTranslation(-_translation);
|
|
||||||
result.setRotation(-_rotation);
|
|
||||||
|
|
||||||
if (isScaling()) {
|
|
||||||
result.setScale(Vec3(1.f/_scale.x, 1.f/_scale.y, 1.f/_scale.z));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,13 @@ public:
|
||||||
typedef glm::vec2 Vec2;
|
typedef glm::vec2 Vec2;
|
||||||
typedef glm::quat Quat;
|
typedef glm::quat Quat;
|
||||||
|
|
||||||
Transform();
|
Transform() :
|
||||||
|
_translation(0),
|
||||||
|
_rotation(1.f, 0, 0, 0),
|
||||||
|
_scale(1.f),
|
||||||
|
_flags(1) // invalid cache
|
||||||
|
{
|
||||||
|
}
|
||||||
Transform(const Transform& transform) :
|
Transform(const Transform& transform) :
|
||||||
_translation(transform._translation),
|
_translation(transform._translation),
|
||||||
_rotation(transform._rotation),
|
_rotation(transform._rotation),
|
||||||
|
@ -38,86 +44,46 @@ public:
|
||||||
{
|
{
|
||||||
invalidCache();
|
invalidCache();
|
||||||
}
|
}
|
||||||
Transform(const Mat4& raw);
|
Transform(const Mat4& raw) {
|
||||||
|
evalFromRawMatrix(raw);
|
||||||
|
}
|
||||||
~Transform() {}
|
~Transform() {}
|
||||||
|
|
||||||
void setTranslation(const Vec3& translation) { invalidCache(); flagTranslation(); _translation = translation; }
|
void setIdentity();
|
||||||
const Vec3& getTranslation() const { return _translation; }
|
|
||||||
|
|
||||||
void preTranslate(const Vec3& translation) { invalidCache(); flagTranslation(); _translation += translation; }
|
const Vec3& getTranslation() const;
|
||||||
void postTranslate(const Vec3& translation) {
|
void setTranslation(const Vec3& translation);
|
||||||
invalidCache();
|
void preTranslate(const Vec3& translation);
|
||||||
flagTranslation();
|
void postTranslate(const Vec3& translation);
|
||||||
if (isRotating()) {
|
|
||||||
_translation += glm::rotate(_rotation, translation * _scale);
|
|
||||||
} else {
|
|
||||||
_translation += translation * _scale;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void setRotation(const Quat& rotation) { invalidCache(); flagRotation(); _rotation = rotation; }
|
const Quat& getRotation() const;
|
||||||
const Quat& getRotation() const { return _rotation; }
|
void setRotation(const Quat& rotation);
|
||||||
|
void preRotate(const Quat& rotation);
|
||||||
|
void postRotate(const Quat& rotation);
|
||||||
|
|
||||||
void preRotate(const Quat& rotation) {
|
const Vec3& getScale() const;
|
||||||
invalidCache();
|
void setScale(float scale);
|
||||||
if (isRotating()) {
|
void setScale(const Vec3& scale);
|
||||||
_rotation = rotation * _rotation;
|
void postScale(float scale);
|
||||||
} else {
|
void postScale(const Vec3& scale);
|
||||||
_rotation = rotation;
|
|
||||||
}
|
|
||||||
flagRotation();
|
|
||||||
_translation = glm::rotate(rotation, _translation);
|
|
||||||
}
|
|
||||||
void postRotate(const Quat& rotation) {
|
|
||||||
invalidCache();
|
|
||||||
if (isRotating()) {
|
|
||||||
_rotation *= rotation;
|
|
||||||
} else {
|
|
||||||
_rotation = rotation;
|
|
||||||
}
|
|
||||||
flagRotation();
|
|
||||||
}
|
|
||||||
|
|
||||||
void setScale(float scale) { invalidCache(); flagScaling(); _scale = Vec3(scale); }
|
|
||||||
void setScale(const Vec3& scale) { invalidCache(); flagScaling(); _scale = scale; }
|
|
||||||
const Vec3& getScale() const { return _scale; }
|
|
||||||
|
|
||||||
void postScale(const Vec3& scale) {
|
|
||||||
invalidCache();
|
|
||||||
if (isScaling()) {
|
|
||||||
_scale *= scale;
|
|
||||||
} else {
|
|
||||||
_scale = scale;
|
|
||||||
}
|
|
||||||
flagScaling();
|
|
||||||
}
|
|
||||||
|
|
||||||
const Mat4& getMatrix() const { updateCache(); return _matrix; }
|
|
||||||
|
|
||||||
Mat4& evalRelativeTransform(Mat4& result, const Vec3& origin);
|
|
||||||
|
|
||||||
Transform& evalInverseTranspose(Transform& result);
|
|
||||||
void evalFromRawMatrix(const Mat4& matrix);
|
|
||||||
void evalRotationScale(const Mat3& rotationScalematrix);
|
|
||||||
|
|
||||||
static Transform& mult( Transform& result, const Transform& left, const Transform& right) {
|
|
||||||
result = left;
|
|
||||||
if ( right.isTranslating()) result.postTranslate(right.getTranslation());
|
|
||||||
if ( right.isRotating()) result.postRotate(right.getRotation());
|
|
||||||
if (right.isScaling()) result.postScale(right.getScale());
|
|
||||||
|
|
||||||
Transform::Mat4 mv = left.getMatrix() * right.getMatrix();
|
|
||||||
Transform::Mat4 mv2 = result.getMatrix();
|
|
||||||
|
|
||||||
result.evalFromRawMatrix(mv);
|
|
||||||
Transform::Mat4 mv3 = result.getMatrix();
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
bool isIdentity() const { return (_flags & ~Flags(1)).none(); }
|
||||||
bool isTranslating() const { return _flags[FLAG_TRANSLATION]; }
|
bool isTranslating() const { return _flags[FLAG_TRANSLATION]; }
|
||||||
bool isRotating() const { return _flags[FLAG_ROTATION]; }
|
bool isRotating() const { return _flags[FLAG_ROTATION]; }
|
||||||
bool isScaling() const { return _flags[FLAG_SCALING]; }
|
bool isScaling() const { return _flags[FLAG_SCALING]; }
|
||||||
|
bool isUniform() const { return !isNonUniform(); }
|
||||||
|
bool isNonUniform() const { return _flags[FLAG_NON_UNIFORM]; }
|
||||||
|
|
||||||
|
void evalFromRawMatrix(const Mat4& matrix);
|
||||||
|
void evalFromRawMatrix(const Mat3& rotationScalematrix);
|
||||||
|
|
||||||
|
Mat4& getMatrix(Mat4& result) const;
|
||||||
|
|
||||||
|
Transform& evalInverse(Transform& result) const;
|
||||||
|
|
||||||
|
static void evalRotationScale(Quat& rotation, Vec3& scale, const Mat3& rotationScaleMatrix);
|
||||||
|
static Transform& mult(Transform& result, const Transform& left, const Transform& right);
|
||||||
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
|
@ -127,6 +93,8 @@ protected:
|
||||||
FLAG_TRANSLATION,
|
FLAG_TRANSLATION,
|
||||||
FLAG_ROTATION,
|
FLAG_ROTATION,
|
||||||
FLAG_SCALING,
|
FLAG_SCALING,
|
||||||
|
FLAG_NON_UNIFORM,
|
||||||
|
FLAG_ZERO_SCALE,
|
||||||
|
|
||||||
FLAG_PROJECTION,
|
FLAG_PROJECTION,
|
||||||
|
|
||||||
|
@ -145,40 +113,234 @@ protected:
|
||||||
|
|
||||||
// Cached transform
|
// Cached transform
|
||||||
mutable Mat4 _matrix;
|
mutable Mat4 _matrix;
|
||||||
|
|
||||||
bool isCacheInvalid() const { return _flags[FLAG_CACHE_INVALID]; }
|
bool isCacheInvalid() const { return _flags[FLAG_CACHE_INVALID]; }
|
||||||
void validCache() const { _flags.set(FLAG_CACHE_INVALID, false); }
|
void validCache() const { _flags.set(FLAG_CACHE_INVALID, false); }
|
||||||
void invalidCache() const { _flags.set(FLAG_CACHE_INVALID, true); }
|
void invalidCache() const { _flags.set(FLAG_CACHE_INVALID, true); }
|
||||||
|
|
||||||
void flagTranslation() { _flags.set(FLAG_TRANSLATION, true); }
|
void flagTranslation() { _flags.set(FLAG_TRANSLATION, true); }
|
||||||
void flagRotation() { _flags.set(FLAG_ROTATION, true); }
|
void flagRotation() { _flags.set(FLAG_ROTATION, true); }
|
||||||
|
|
||||||
void flagScaling() { _flags.set(FLAG_SCALING, true); }
|
void flagScaling() { _flags.set(FLAG_SCALING, true); }
|
||||||
|
void unflagScaling() { _flags.set(FLAG_SCALING, false); }
|
||||||
|
|
||||||
void updateCache() const {
|
|
||||||
if (isCacheInvalid()) {
|
|
||||||
if (isRotating()) {
|
|
||||||
glm::mat3x3 rot = glm::mat3_cast(_rotation);
|
|
||||||
|
|
||||||
if ((_scale.x != 1.f) || (_scale.y != 1.f) || (_scale.z != 1.f)) {
|
void flagUniform() { _flags.set(FLAG_NON_UNIFORM, false); }
|
||||||
rot[0] *= _scale.x;
|
void flagNonUniform() { _flags.set(FLAG_NON_UNIFORM, true); }
|
||||||
rot[1] *= _scale.y;
|
|
||||||
rot[2] *= _scale.z;
|
|
||||||
}
|
|
||||||
|
|
||||||
_matrix[0] = Vec4(rot[0], 0.f);
|
void updateCache() const;
|
||||||
_matrix[1] = Vec4(rot[1], 0.f);
|
|
||||||
_matrix[2] = Vec4(rot[2], 0.f);
|
|
||||||
} else {
|
|
||||||
_matrix[0] = Vec4(_scale.x, 0.f, 0.f, 0.f);
|
|
||||||
_matrix[1] = Vec4(0.f, _scale.y, 0.f, 0.f);
|
|
||||||
_matrix[2] = Vec4(0.f, 0.f, _scale.z, 0.f);
|
|
||||||
}
|
|
||||||
|
|
||||||
_matrix[3] = Vec4(_translation, 1.f);
|
|
||||||
validCache();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
inline void Transform::setIdentity() {
|
||||||
|
_translation = Vec3(0);
|
||||||
|
_rotation = Quat(1.f, 0, 0, 0);
|
||||||
|
_scale = Vec3(1.f);
|
||||||
|
_flags = Flags(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline const Transform::Vec3& Transform::getTranslation() const {
|
||||||
|
return _translation;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::setTranslation(const Vec3& translation) {
|
||||||
|
invalidCache();
|
||||||
|
flagTranslation();
|
||||||
|
_translation = translation;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::preTranslate(const Vec3& translation) {
|
||||||
|
invalidCache();
|
||||||
|
flagTranslation();
|
||||||
|
_translation += translation;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::postTranslate(const Vec3& translation) {
|
||||||
|
invalidCache();
|
||||||
|
flagTranslation();
|
||||||
|
|
||||||
|
Vec3 scaledT = translation;
|
||||||
|
if (isScaling()) scaledT *= _scale;
|
||||||
|
|
||||||
|
if (isRotating()) {
|
||||||
|
_translation += glm::rotate(_rotation, scaledT);
|
||||||
|
} else {
|
||||||
|
_translation += scaledT;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline const Transform::Quat& Transform::getRotation() const {
|
||||||
|
return _rotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::setRotation(const Quat& rotation) {
|
||||||
|
invalidCache();
|
||||||
|
flagRotation();
|
||||||
|
_rotation = rotation;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::preRotate(const Quat& rotation) {
|
||||||
|
invalidCache();
|
||||||
|
if (isRotating()) {
|
||||||
|
_rotation = rotation * _rotation;
|
||||||
|
} else {
|
||||||
|
_rotation = rotation;
|
||||||
|
}
|
||||||
|
flagRotation();
|
||||||
|
_translation = glm::rotate(rotation, _translation);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::postRotate(const Quat& rotation) {
|
||||||
|
invalidCache();
|
||||||
|
|
||||||
|
if (isNonUniform()) {
|
||||||
|
Quat newRot;
|
||||||
|
Vec3 newScale;
|
||||||
|
Mat3 scaleRot(glm::mat3_cast(rotation));
|
||||||
|
scaleRot[0] *= _scale;
|
||||||
|
scaleRot[1] *= _scale;
|
||||||
|
scaleRot[2] *= _scale;
|
||||||
|
evalRotationScale(newRot, newScale, scaleRot);
|
||||||
|
|
||||||
|
if (isRotating()) {
|
||||||
|
_rotation *= newRot;
|
||||||
|
} else {
|
||||||
|
_rotation = newRot;
|
||||||
|
}
|
||||||
|
setScale(newScale);
|
||||||
|
} else {
|
||||||
|
if (isRotating()) {
|
||||||
|
_rotation *= rotation;
|
||||||
|
} else {
|
||||||
|
_rotation = rotation;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flagRotation();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline const Transform::Vec3& Transform::getScale() const {
|
||||||
|
return _scale;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::setScale(float scale) {
|
||||||
|
invalidCache();
|
||||||
|
flagUniform();
|
||||||
|
if (scale == 1.f) {
|
||||||
|
unflagScaling();
|
||||||
|
} else {
|
||||||
|
flagScaling();
|
||||||
|
}
|
||||||
|
_scale = Vec3(scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::setScale(const Vec3& scale) {
|
||||||
|
if ((scale.x == scale.y) && (scale.x == scale.z)) {
|
||||||
|
setScale(scale.x);
|
||||||
|
} else {
|
||||||
|
invalidCache();
|
||||||
|
flagScaling();
|
||||||
|
flagNonUniform();
|
||||||
|
_scale = scale;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::postScale(float scale) {
|
||||||
|
if (scale == 1.f) return;
|
||||||
|
if (isScaling()) {
|
||||||
|
// if already scaling, just invalid cache and aply uniform scale
|
||||||
|
invalidCache();
|
||||||
|
_scale *= scale;
|
||||||
|
} else {
|
||||||
|
setScale(scale);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::postScale(const Vec3& scale) {
|
||||||
|
invalidCache();
|
||||||
|
if (isScaling()) {
|
||||||
|
_scale *= scale;
|
||||||
|
} else {
|
||||||
|
_scale = scale;
|
||||||
|
}
|
||||||
|
flagScaling();
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Transform::Mat4& Transform::getMatrix(Transform::Mat4& result) const {
|
||||||
|
updateCache();
|
||||||
|
result = _matrix;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::evalFromRawMatrix(const Mat4& matrix) {
|
||||||
|
// for now works only in the case of TRS transformation
|
||||||
|
if ((matrix[0][3] == 0) && (matrix[1][3] == 0) && (matrix[2][3] == 0) && (matrix[3][3] == 1.f)) {
|
||||||
|
setTranslation(Vec3(matrix[3]));
|
||||||
|
evalFromRawMatrix(Mat3(matrix));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::evalFromRawMatrix(const Mat3& rotationScaleMatrix) {
|
||||||
|
Quat rotation;
|
||||||
|
Vec3 scale;
|
||||||
|
evalRotationScale(rotation, scale, rotationScaleMatrix);
|
||||||
|
setRotation(rotation);
|
||||||
|
setScale(scale);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Transform& Transform::evalInverse(Transform& inverse) const {
|
||||||
|
inverse.setIdentity();
|
||||||
|
if (isScaling()) {
|
||||||
|
if (isNonUniform()) {
|
||||||
|
inverse.setScale(Vec3(1.f/_scale.x, 1.f/_scale.y, 1.f/_scale.z));
|
||||||
|
} else {
|
||||||
|
inverse.setScale(1.f/_scale.x);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (isRotating()) {
|
||||||
|
inverse.postRotate(-_rotation);
|
||||||
|
}
|
||||||
|
if (isTranslating()) {
|
||||||
|
inverse.postTranslate(-_translation);
|
||||||
|
}
|
||||||
|
return inverse;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Transform& Transform::mult( Transform& result, const Transform& left, const Transform& right) {
|
||||||
|
result = left;
|
||||||
|
if ( right.isTranslating()) result.postTranslate(right.getTranslation());
|
||||||
|
if ( right.isRotating()) result.postRotate(right.getRotation());
|
||||||
|
if (right.isScaling()) result.postScale(right.getScale());
|
||||||
|
|
||||||
|
// HACK: In case of an issue in the Transform multiplication results, to make sure this code is
|
||||||
|
// working properly uncomment the next 2 lines and compare the results, they should be the same...
|
||||||
|
// Transform::Mat4 mv = left.getMatrix() * right.getMatrix();
|
||||||
|
// Transform::Mat4 mv2 = result.getMatrix();
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void Transform::updateCache() const {
|
||||||
|
if (isCacheInvalid()) {
|
||||||
|
if (isRotating()) {
|
||||||
|
glm::mat3x3 rot = glm::mat3_cast(_rotation);
|
||||||
|
|
||||||
|
if (isScaling()) {
|
||||||
|
rot[0] *= _scale.x;
|
||||||
|
rot[1] *= _scale.y;
|
||||||
|
rot[2] *= _scale.z;
|
||||||
|
}
|
||||||
|
|
||||||
|
_matrix[0] = Vec4(rot[0], 0.f);
|
||||||
|
_matrix[1] = Vec4(rot[1], 0.f);
|
||||||
|
_matrix[2] = Vec4(rot[2], 0.f);
|
||||||
|
} else {
|
||||||
|
_matrix[0] = Vec4(_scale.x, 0.f, 0.f, 0.f);
|
||||||
|
_matrix[1] = Vec4(0.f, _scale.y, 0.f, 0.f);
|
||||||
|
_matrix[2] = Vec4(0.f, 0.f, _scale.z, 0.f);
|
||||||
|
}
|
||||||
|
|
||||||
|
_matrix[3] = Vec4(_translation, 1.f);
|
||||||
|
validCache();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -79,8 +79,8 @@ public:
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
|
||||||
uint32_t _glBufferIndex : 24, /// Client only, vbo index for this voxel if being rendered, 3 bytes
|
uint32_t _glBufferIndex : 24; /// Client only, vbo index for this voxel if being rendered, 3 bytes
|
||||||
_voxelSystemIndex : 8; /// Client only, index to the VoxelSystem rendering this voxel, 1 bytes
|
uint32_t _voxelSystemIndex : 8; /// Client only, index to the VoxelSystem rendering this voxel, 1 bytes
|
||||||
|
|
||||||
// Support for _voxelSystemIndex, we use these static member variables to track the VoxelSystems that are
|
// Support for _voxelSystemIndex, we use these static member variables to track the VoxelSystems that are
|
||||||
// in use by various voxel nodes. We map the VoxelSystem pointers into an 1 byte key, this limits us to at
|
// in use by various voxel nodes. We map the VoxelSystem pointers into an 1 byte key, this limits us to at
|
||||||
|
|
Loading…
Reference in a new issue