mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 10:33:40 +02:00
Merge pull request #3843 from PhilipRosedale/master
Fix jumpy eyes when turning toward or away from someone
This commit is contained in:
commit
a8d718be82
11 changed files with 35 additions and 120 deletions
examples
interface/src
libraries/script-engine/src
|
@ -6,7 +6,7 @@
|
|||
// Modified by Brad Hefta-Gaub to use Entities on Sept. 3, 2014
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This sample script creates a swarm of butterfly entities that fly around the avatar.
|
||||
// This sample script creates a swarm of butterfly entities that fly around your avatar.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
@ -40,17 +40,17 @@ function vInterpolate(a, b, fraction) {
|
|||
|
||||
var startTimeInSeconds = new Date().getTime() / 1000;
|
||||
|
||||
var NATURAL_SIZE_OF_BUTTERFLY = { x: 9.512, y: 4.427, z: 1.169 };
|
||||
var NATURAL_SIZE_OF_BUTTERFLY = { x: 1.76, y: 0.825, z: 0.20 };
|
||||
var lifeTime = 600; // lifetime of the butterflies in seconds
|
||||
var range = 1.0; // Over what distance in meters do you want the flock to fly around
|
||||
var range = 3.0; // Over what distance in meters do you want the flock to fly around
|
||||
var frame = 0;
|
||||
|
||||
var CHANCE_OF_MOVING = 0.9;
|
||||
var BUTTERFLY_GRAVITY = 0;//-0.06;
|
||||
var BUTTERFLY_FLAP_SPEED = 1.0;
|
||||
var BUTTERFLY_GRAVITY = 0;
|
||||
var BUTTERFLY_FLAP_SPEED = 0.5;
|
||||
var BUTTERFLY_VELOCITY = 0.55;
|
||||
var DISTANCE_IN_FRONT_OF_ME = 1.5;
|
||||
var DISTANCE_ABOVE_ME = 1.0;
|
||||
var DISTANCE_ABOVE_ME = 1.5;
|
||||
var flockPosition = Vec3.sum(MyAvatar.position,Vec3.sum(
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_ABOVE_ME),
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_IN_FRONT_OF_ME)));
|
||||
|
@ -81,11 +81,11 @@ function addButterfly() {
|
|||
var color = { red: 100, green: 100, blue: 100 };
|
||||
var size = 0;
|
||||
|
||||
var minSize = 0.06;
|
||||
var randomSize = 0.2;
|
||||
var maxSize = minSize + randomSize;
|
||||
var MINSIZE = 0.06;
|
||||
var RANGESIZE = 0.2;
|
||||
var maxSize = MINSIZE + RANGESIZE;
|
||||
|
||||
size = 0.06 + Math.random() * 0.2;
|
||||
size = MINSIZE + Math.random() * RANGESIZE;
|
||||
|
||||
var dimensions = Vec3.multiply(NATURAL_SIZE_OF_BUTTERFLY, (size / maxSize));
|
||||
|
||||
|
@ -103,11 +103,10 @@ function addButterfly() {
|
|||
dimensions: dimensions,
|
||||
color: color,
|
||||
rotation: rotation,
|
||||
animationURL: "http://business.ozblog.me/objects/butterfly/newButterfly2.fbx",
|
||||
animationURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/models/content/butterfly/butterfly.fbx",
|
||||
animationIsPlaying: true,
|
||||
modelURL: "http://business.ozblog.me/objects/butterfly/newButterfly2.fbx"
|
||||
modelURL: "https://s3-us-west-1.amazonaws.com/highfidelity-public/models/content/butterfly/butterfly.fbx"
|
||||
};
|
||||
properties.position.z = properties.position.z+1;
|
||||
butterflies.push(new defineButterfly(Entities.addEntity(properties), properties.position));
|
||||
}
|
||||
|
||||
|
@ -121,17 +120,15 @@ function updateButterflies(deltaTime) {
|
|||
// Check to see if we've been running long enough that our butterflies are dead
|
||||
var nowTimeInSeconds = new Date().getTime() / 1000;
|
||||
if ((nowTimeInSeconds - startTimeInSeconds) >= lifeTime) {
|
||||
// print("our butterflies are dying, stop our script");
|
||||
Script.stop();
|
||||
return;
|
||||
}
|
||||
|
||||
frame++;
|
||||
// Only update every third frame
|
||||
// Only update every third frame because we don't need to do it too quickly
|
||||
if ((frame % 3) == 0) {
|
||||
flockPosition = Vec3.sum(MyAvatar.position,Vec3.sum(
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_ABOVE_ME),
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_IN_FRONT_OF_ME)));
|
||||
flockPosition = Vec3.sum(MyAvatar.position,Vec3.sum(Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_ABOVE_ME),
|
||||
Vec3.multiply(Quat.getFront(MyAvatar.orientation), DISTANCE_IN_FRONT_OF_ME)));
|
||||
|
||||
// Update all the butterflies
|
||||
for (var i = 0; i < numButterflies; i++) {
|
||||
|
|
|
@ -64,9 +64,7 @@ function checkSticks(deltaTime) {
|
|||
if ((palmVelocity.y > 0.0) || (speed < STOP_SPEED)) {
|
||||
state[palm] = 0;
|
||||
|
||||
var options = {
|
||||
position: Controller.getSpatialControlPosition(palm * 2 + 1);
|
||||
}
|
||||
var options = { position: Controller.getSpatialControlPosition(palm * 2 + 1) };
|
||||
|
||||
if (strokeSpeed[palm] > 1.0) { strokeSpeed[palm] = 1.0; }
|
||||
options.volume = strokeSpeed[palm];
|
||||
|
|
|
@ -26,7 +26,7 @@
|
|||
|
||||
var birdsInFlock = 20;
|
||||
|
||||
var birdLifetime = 60; // 2 minutes
|
||||
var birdLifetime = 300; // 1 minute
|
||||
var count=0; // iterations
|
||||
|
||||
var enableFlyTowardPoints = true; // some birds have a point they want to fly to
|
||||
|
|
|
@ -115,7 +115,6 @@ Audio::Audio(QObject* parent) :
|
|||
_samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope),
|
||||
_noiseSourceEnabled(false),
|
||||
_toneSourceEnabled(true),
|
||||
_peqEnabled(false),
|
||||
_scopeInput(0),
|
||||
_scopeOutputLeft(0),
|
||||
_scopeOutputRight(0),
|
||||
|
@ -153,7 +152,6 @@ void Audio::init(QGLWidget *parent) {
|
|||
void Audio::reset() {
|
||||
_receivedAudioStream.reset();
|
||||
resetStats();
|
||||
_peq.reset();
|
||||
_noiseSource.reset();
|
||||
_toneSource.reset();
|
||||
_sourceGain.reset();
|
||||
|
@ -457,7 +455,6 @@ void Audio::start() {
|
|||
}
|
||||
|
||||
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 8 );
|
||||
_peq.initialize( _inputFormat.sampleRate() );
|
||||
_inputGain.initialize();
|
||||
_sourceGain.initialize();
|
||||
_noiseSource.initialize();
|
||||
|
@ -469,7 +466,6 @@ void Audio::start() {
|
|||
void Audio::stop() {
|
||||
|
||||
_inputFrameBuffer.finalize();
|
||||
_peq.finalize();
|
||||
_inputGain.finalize();
|
||||
_sourceGain.finalize();
|
||||
_noiseSource.finalize();
|
||||
|
@ -664,7 +660,7 @@ void Audio::handleAudioInput() {
|
|||
|
||||
QByteArray inputByteArray = _inputDevice->readAll();
|
||||
|
||||
if (!_muted && (_audioSourceInjectEnabled || _peqEnabled)) {
|
||||
if (!_muted && _audioSourceInjectEnabled) {
|
||||
|
||||
int16_t* inputFrameData = (int16_t*)inputByteArray.data();
|
||||
const uint32_t inputFrameCount = inputByteArray.size() / sizeof(int16_t);
|
||||
|
@ -685,10 +681,6 @@ void Audio::handleAudioInput() {
|
|||
}
|
||||
_sourceGain.render(_inputFrameBuffer); // post gain
|
||||
}
|
||||
if (_peqEnabled) {
|
||||
_peq.render(_inputFrameBuffer); // 3-band parametric eq
|
||||
}
|
||||
|
||||
_inputFrameBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
|
||||
}
|
||||
|
||||
|
@ -1473,31 +1465,6 @@ void Audio::renderToolBox(int x, int y, bool boxed) {
|
|||
glDisable(GL_TEXTURE_2D);
|
||||
}
|
||||
|
||||
void Audio::toggleAudioFilter() {
|
||||
_peqEnabled = !_peqEnabled;
|
||||
}
|
||||
|
||||
void Audio::selectAudioFilterFlat() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioFilterFlat)) {
|
||||
_peq.loadProfile(0);
|
||||
}
|
||||
}
|
||||
void Audio::selectAudioFilterTrebleCut() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioFilterTrebleCut)) {
|
||||
_peq.loadProfile(1);
|
||||
}
|
||||
}
|
||||
void Audio::selectAudioFilterBassCut() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioFilterBassCut)) {
|
||||
_peq.loadProfile(2);
|
||||
}
|
||||
}
|
||||
void Audio::selectAudioFilterSmiley() {
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::AudioFilterSmiley)) {
|
||||
_peq.loadProfile(3);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::toggleScope() {
|
||||
_scopeEnabled = !_scopeEnabled;
|
||||
if (_scopeEnabled) {
|
||||
|
|
|
@ -26,8 +26,6 @@
|
|||
#include "AudioSourceTone.h"
|
||||
#include "AudioSourceNoise.h"
|
||||
#include "AudioGain.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
#include <QAudio>
|
||||
#include <QAudioInput>
|
||||
|
@ -149,11 +147,6 @@ public slots:
|
|||
void addLastFrameRepeatedWithFadeToScope(int samplesPerChannel);
|
||||
void addStereoSamplesToScope(const QByteArray& samples);
|
||||
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||
void toggleAudioFilter();
|
||||
void selectAudioFilterFlat();
|
||||
void selectAudioFilterTrebleCut();
|
||||
void selectAudioFilterBassCut();
|
||||
void selectAudioFilterSmiley();
|
||||
|
||||
virtual bool outputLocalInjector(bool isStereo, qreal volume, AudioInjector* injector);
|
||||
|
||||
|
@ -332,9 +325,6 @@ private:
|
|||
bool _toneSourceEnabled;
|
||||
AudioSourceTone _toneSource;
|
||||
|
||||
// Multi-band parametric EQ
|
||||
bool _peqEnabled;
|
||||
AudioFilterPEQ3m _peq;
|
||||
|
||||
QMutex _guard;
|
||||
QByteArray* _scopeInput;
|
||||
|
|
|
@ -545,47 +545,6 @@ Menu::Menu() :
|
|||
appInstance->getAudio(),
|
||||
SLOT(toggleAudioNoiseReduction()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioFilter,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleAudioFilter()));
|
||||
|
||||
QMenu* audioFilterMenu = audioDebugMenu->addMenu("Audio Filter");
|
||||
addDisabledActionAndSeparator(audioFilterMenu, "Filter Response");
|
||||
{
|
||||
QAction *flat = addCheckableActionToQMenuAndActionHash(audioFilterMenu, MenuOption::AudioFilterFlat,
|
||||
0,
|
||||
true,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioFilterFlat()));
|
||||
|
||||
QAction *trebleCut = addCheckableActionToQMenuAndActionHash(audioFilterMenu, MenuOption::AudioFilterTrebleCut,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioFilterTrebleCut()));
|
||||
|
||||
QAction *bassCut = addCheckableActionToQMenuAndActionHash(audioFilterMenu, MenuOption::AudioFilterBassCut,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioFilterBassCut()));
|
||||
|
||||
QAction *smiley = addCheckableActionToQMenuAndActionHash(audioFilterMenu, MenuOption::AudioFilterSmiley,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(selectAudioFilterSmiley()));
|
||||
|
||||
|
||||
QActionGroup* audioFilterGroup = new QActionGroup(audioFilterMenu);
|
||||
audioFilterGroup->addAction(flat);
|
||||
audioFilterGroup->addAction(trebleCut);
|
||||
audioFilterGroup->addAction(bassCut);
|
||||
audioFilterGroup->addAction(smiley);
|
||||
}
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoServerAudio);
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::EchoLocalAudio);
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::StereoAudio, 0, false,
|
||||
|
@ -625,17 +584,16 @@ Menu::Menu() :
|
|||
audioSourceGroup->addAction(sine440);
|
||||
}
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScope,
|
||||
QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope");
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScope,
|
||||
Qt::CTRL | Qt::Key_P, false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleScope()));
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioScopePause,
|
||||
addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopePause,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_P ,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleScopePause()));
|
||||
|
||||
QMenu* audioScopeMenu = audioDebugMenu->addMenu("Audio Scope");
|
||||
addDisabledActionAndSeparator(audioScopeMenu, "Display Frames");
|
||||
{
|
||||
QAction *fiveFrames = addCheckableActionToQMenuAndActionHash(audioScopeMenu, MenuOption::AudioScopeFiveFrames,
|
||||
|
|
|
@ -332,17 +332,12 @@ namespace MenuOption {
|
|||
const QString Animations = "Animations...";
|
||||
const QString Atmosphere = "Atmosphere";
|
||||
const QString Attachments = "Attachments...";
|
||||
const QString AudioFilter = "Audio Filter Bank";
|
||||
const QString AudioFilterFlat = "Flat Response";
|
||||
const QString AudioFilterTrebleCut= "Treble Cut";
|
||||
const QString AudioFilterBassCut = "Bass Cut";
|
||||
const QString AudioFilterSmiley = "Smiley Curve";
|
||||
const QString AudioNoiseReduction = "Audio Noise Reduction";
|
||||
const QString AudioScope = "Audio Scope";
|
||||
const QString AudioScope = "Show Scope";
|
||||
const QString AudioScopeFiftyFrames = "Fifty";
|
||||
const QString AudioScopeFiveFrames = "Five";
|
||||
const QString AudioScopeFrames = "Display Frames";
|
||||
const QString AudioScopePause = "Pause Audio Scope";
|
||||
const QString AudioScopePause = "Pause Scope";
|
||||
const QString AudioScopeTwentyFrames = "Twenty";
|
||||
const QString AudioStats = "Audio Stats";
|
||||
const QString AudioStatsShowInjectedStreams = "Audio Stats Show Injected Streams";
|
||||
|
|
|
@ -87,6 +87,7 @@ public:
|
|||
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
|
||||
void setMouseRay(const glm::vec3 &origin, const glm::vec3 &direction);
|
||||
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
|
||||
bool getIsLookAtTarget() const { return _isLookAtTarget; }
|
||||
//getters
|
||||
bool isInitialized() const { return _initialized; }
|
||||
SkeletonModel& getSkeletonModel() { return _skeletonModel; }
|
||||
|
|
|
@ -929,15 +929,20 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
}
|
||||
glm::vec3 lookForward = faceRotation * IDENTITY_FRONT;
|
||||
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
|
||||
|
||||
float smallestAngleTo = glm::radians(Application::getInstance()->getCamera()->getFieldOfView()) / 2.0f;
|
||||
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
|
||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
||||
|
||||
int howManyLookingAtMe = 0;
|
||||
foreach (const AvatarSharedPointer& avatarPointer, Application::getInstance()->getAvatarManager().getAvatarHash()) {
|
||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
|
||||
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
||||
float distanceTo = glm::length(avatar->getHead()->getEyePosition() - cameraPosition);
|
||||
avatar->setIsLookAtTarget(false);
|
||||
if (!avatar->isMyAvatar() && avatar->isInitialized()) {
|
||||
if (!avatar->isMyAvatar() && avatar->isInitialized() && (distanceTo < GREATEST_LOOKING_AT_DISTANCE * getScale())) {
|
||||
float angleTo = glm::angle(lookForward, glm::normalize(avatar->getHead()->getEyePosition() - cameraPosition));
|
||||
if (angleTo < smallestAngleTo) {
|
||||
if (angleTo < (smallestAngleTo * (isCurrentTarget ? KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR : 1.0f))) {
|
||||
_lookAtTargetAvatar = avatarPointer;
|
||||
_targetAvatarPosition = avatarPointer->getPosition();
|
||||
smallestAngleTo = angleTo;
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
|
||||
#include "Vec3.h"
|
||||
|
||||
glm::vec3 Vec3::reflect(const glm::vec3& v1, const glm::vec3& v2) {
|
||||
return glm::reflect(v1, v2);
|
||||
}
|
||||
glm::vec3 Vec3::cross(const glm::vec3& v1, const glm::vec3& v2) {
|
||||
return glm::cross(v1,v2);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ class Vec3 : public QObject {
|
|||
Q_OBJECT
|
||||
|
||||
public slots:
|
||||
glm::vec3 reflect(const glm::vec3& v1, const glm::vec3& v2);
|
||||
glm::vec3 cross(const glm::vec3& v1, const glm::vec3& v2);
|
||||
float dot(const glm::vec3& v1, const glm::vec3& v2);
|
||||
glm::vec3 multiply(const glm::vec3& v1, float f);
|
||||
|
|
Loading…
Reference in a new issue