Merge branch 'master' of https://github.com/highfidelity/hifi into 20017

Conflicts:
	interface/src/scripting/WindowScriptingInterface.cpp
This commit is contained in:
Thijs Wenker 2014-09-12 22:04:21 +02:00
commit dd47b7afc6
53 changed files with 1972 additions and 408 deletions

View file

@ -0,0 +1,737 @@
// botProceduralWayPoints.js
// modified by Adrian McCarlie for worklist job hifi: #19977
// 1 September 2014.
// this script enables a user to define unlimited number of way points on the Domain,
// these points form a path that the bot will follow, once the final way point is reached,
// the bot will return to the start and continue until script is stopped.
// pause times for each way point can be set individually.
// User must input the x, y, z co-ords for wayPoints[] and time in milliseconds for pauseTimes[].
//
// original script
// bot_procedural.js
// hifi
// Created by Ben Arnold on 7/29/2013
//
// Copyright (c) 2014 HighFidelity, Inc. All rights reserved.
//
// This is an example script that demonstrates an NPC avatar.
//
//
//For procedural walk animation
Script.include("http://s3-us-west-1.amazonaws.com/highfidelity-public/scripts/proceduralAnimationAPI.js");
var procAnimAPI = new ProcAnimAPI();
function getRandomFloat(min, max) {
return Math.random() * (max - min) + min;
}
function getRandomInt (min, max) {
return Math.floor(Math.random() * (max - min + 1)) + min;
}
function printVector(string, vector) {
print(string + " " + vector.x + ", " + vector.y + ", " + vector.z);
}
//input co-ords for start position and 7 other positions
var AVATAR_PELVIS_HEIGHT = 0.84;//only change this if you have an odd size avatar
var wayPoints = []; //input locations for all the waypoints
wayPoints[0] = {x:8131.5, y:202.0, z:8261.5}; //input the location of the start position
wayPoints[1] = {x: 8160.5, y: 202.0, z: 8261.5}; //input the location of the first way point
wayPoints[2] = {x: 8160.5, y: 203.0, z: 8270.5};
wayPoints[3] = {x: 8142.5, y: 204.0, z: 8270.5};
wayPoints[4] = {x: 8142.5, y: 204.0, z: 8272.5};
wayPoints[5] = {x: 8160.5, y: 203.0, z: 8272.5};
wayPoints[6] = {x: 8160.5, y: 202.0, z: 8284.5};
wayPoints[7] = {x: 8111.5, y: 202.0, z: 8284.5};// continue to add locations and add or remove lines as needed.
var pauseTimes = []; // the number of pauseTimes must equal the number of wayPoints. Time is in milliseconds
pauseTimes[0] = 5000; //waiting to go to wayPoint0 (startPoint)
pauseTimes[1] = 10000; //waiting to go to wayPoint1
pauseTimes[2] = 3000;
pauseTimes[3] = 3000;
pauseTimes[4] = 8000;
pauseTimes[5] = 6000;
pauseTimes[6] = 3000;
pauseTimes[7] = 3000;// add or delete to match way points.
wayPoints[0].y = wayPoints[0].y + AVATAR_PELVIS_HEIGHT;
wayPoints[1].y = wayPoints[1].y + AVATAR_PELVIS_HEIGHT;
wayPoints[2].y = wayPoints[2].y + AVATAR_PELVIS_HEIGHT;
wayPoints[3].y = wayPoints[3].y + AVATAR_PELVIS_HEIGHT;
wayPoints[4].y = wayPoints[4].y + AVATAR_PELVIS_HEIGHT;
wayPoints[5].y = wayPoints[5].y + AVATAR_PELVIS_HEIGHT;
wayPoints[6].y = wayPoints[6].y + AVATAR_PELVIS_HEIGHT;
wayPoints[7].y = wayPoints[7].y + AVATAR_PELVIS_HEIGHT;
var CHANCE_OF_MOVING = 0.005;
var CHANCE_OF_SOUND = 0.005;
var CHANCE_OF_HEAD_TURNING = 0.01;
var CHANCE_OF_BIG_MOVE = 1.0;
var isMoving = false;
var isTurningHead = false;
var isPlayingAudio = false;
var AVATAR_PELVIS_HEIGHT = 1.84;
var MAX_PELVIS_DELTA = 2.5;
var MOVE_RANGE_SMALL = 3.0;
var MOVE_RANGE_BIG = 10.0;
var TURN_RANGE = 70.0;
var STOP_TOLERANCE = 0.05;
var MOVE_RATE = 0.05;
var TURN_RATE = 0.2;
var HEAD_TURN_RATE = 0.05;
var PITCH_RANGE = 15.0;
var YAW_RANGE = 35.0;
var firstPosition = wayPoints[0];
var targetPosition = { x: 0, y: 0, z: 0 };
var targetOrientation = { x: 0, y: 0, z: 0, w: 0 };
var currentOrientation = { x: 0, y: 0, z: 0, w: 0 };
var targetHeadPitch = 0.0;
var targetHeadYaw = 0.0;
var basePelvisHeight = 0.0;
var pelvisOscillatorPosition = 0.0;
var pelvisOscillatorVelocity = 0.0;
function clamp(val, min, max){
return Math.max(min, Math.min(max, val))
}
//Array of all valid bot numbers
var validBotNumbers = [];
// right now we only use bot 63, since many other bots have messed up skeletons and LOD issues
var botNumber = 63;//getRandomInt(0, 99);
var newFaceFilePrefix = "ron";
var newBodyFilePrefix = "bot" + botNumber;
// set the face model fst using the bot number
// there is no need to change the body model - we're using the default
Avatar.faceModelURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/" + newFaceFilePrefix + ".fst";
Avatar.skeletonModelURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/" + newBodyFilePrefix + "_a.fst";
Avatar.billboardURL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/meshes/billboards/bot" + botNumber + ".png";
Agent.isAvatar = true;
Agent.isListeningToAudioStream = true;
// change the avatar's position to wayPoints[0]
Avatar.position = firstPosition;
basePelvisHeight = firstPosition.y;
printVector("New dancer, position = ", Avatar.position);
function loadSounds() {
var sound_filenames = ["AB1.raw", "Anchorman2.raw", "B1.raw", "B1.raw", "Bale1.raw", "Bandcamp.raw",
"Big1.raw", "Big2.raw", "Brian1.raw", "Buster1.raw", "CES1.raw", "CES2.raw", "CES3.raw", "CES4.raw",
"Carrie1.raw", "Carrie3.raw", "Charlotte1.raw", "EN1.raw", "EN2.raw", "EN3.raw", "Eugene1.raw", "Francesco1.raw",
"Italian1.raw", "Japanese1.raw", "Leigh1.raw", "Lucille1.raw", "Lucille2.raw", "MeanGirls.raw", "Murray2.raw",
"Nigel1.raw", "PennyLane.raw", "Pitt1.raw", "Ricardo.raw", "SN.raw", "Sake1.raw", "Samantha1.raw", "Samantha2.raw",
"Spicoli1.raw", "Supernatural.raw", "Swearengen1.raw", "TheDude.raw", "Tony.raw", "Triumph1.raw", "Uma1.raw",
"Walken1.raw", "Walken2.raw", "Z1.raw", "Z2.raw"
];
var footstep_filenames = ["FootstepW2Left-12db.wav", "FootstepW2Right-12db.wav", "FootstepW3Left-12db.wav", "FootstepW3Right-12db.wav",
"FootstepW5Left-12db.wav", "FootstepW5Right-12db.wav"];
var SOUND_BASE_URL = "https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Cocktail+Party+Snippets/Raws/";
var FOOTSTEP_BASE_URL = "http://highfidelity-public.s3-us-west-1.amazonaws.com/sounds/Footsteps/";
for (var i = 0; i < sound_filenames.length; i++) {
sounds.push(new Sound(SOUND_BASE_URL + sound_filenames[i]));
}
for (var i = 0; i < footstep_filenames.length; i++) {
footstepSounds.push(new Sound(FOOTSTEP_BASE_URL + footstep_filenames[i]));
}
}
var sounds = [];
var footstepSounds = [];
loadSounds();
function playRandomSound() {
if (!Agent.isPlayingAvatarSound) {
var whichSound = Math.floor((Math.random() * sounds.length));
Agent.playAvatarSound(sounds[whichSound]);
}
}
function playRandomFootstepSound() {
var whichSound = Math.floor((Math.random() * footstepSounds.length));
var options = new AudioInjectionOptions();
options.position = Avatar.position;
options.volume = 1.0;
Audio.playSound(footstepSounds[whichSound], options);
}
// Facial Animation
var allBlendShapes = [];
var targetBlendCoefficient = [];
var currentBlendCoefficient = [];
//Blendshape constructor
function addBlendshapeToPose(pose, shapeIndex, val) {
var index = pose.blendShapes.length;
pose.blendShapes[index] = {shapeIndex: shapeIndex, val: val };
}
//The mood of the avatar, determines face. 0 = happy, 1 = angry, 2 = sad.
//Randomly pick avatar mood. 80% happy, 10% mad 10% sad
var randMood = Math.floor(Math.random() * 11);
var avatarMood;
if (randMood == 0) {
avatarMood = 1;
} else if (randMood == 2) {
avatarMood = 2;
} else {
avatarMood = 0;
}
var currentExpression = -1;
//Face pose constructor
var happyPoses = [];
happyPoses[0] = {blendShapes: []};
addBlendshapeToPose(happyPoses[0], 28, 0.7); //MouthSmile_L
addBlendshapeToPose(happyPoses[0], 29, 0.7); //MouthSmile_R
happyPoses[1] = {blendShapes: []};
addBlendshapeToPose(happyPoses[1], 28, 1.0); //MouthSmile_L
addBlendshapeToPose(happyPoses[1], 29, 1.0); //MouthSmile_R
addBlendshapeToPose(happyPoses[1], 21, 0.2); //JawOpen
happyPoses[2] = {blendShapes: []};
addBlendshapeToPose(happyPoses[2], 28, 1.0); //MouthSmile_L
addBlendshapeToPose(happyPoses[2], 29, 1.0); //MouthSmile_R
addBlendshapeToPose(happyPoses[2], 21, 0.5); //JawOpen
addBlendshapeToPose(happyPoses[2], 46, 1.0); //CheekSquint_L
addBlendshapeToPose(happyPoses[2], 47, 1.0); //CheekSquint_R
addBlendshapeToPose(happyPoses[2], 17, 1.0); //BrowsU_L
addBlendshapeToPose(happyPoses[2], 18, 1.0); //BrowsU_R
var angryPoses = [];
angryPoses[0] = {blendShapes: []};
addBlendshapeToPose(angryPoses[0], 26, 0.6); //MouthFrown_L
addBlendshapeToPose(angryPoses[0], 27, 0.6); //MouthFrown_R
addBlendshapeToPose(angryPoses[0], 14, 0.6); //BrowsD_L
addBlendshapeToPose(angryPoses[0], 15, 0.6); //BrowsD_R
angryPoses[1] = {blendShapes: []};
addBlendshapeToPose(angryPoses[1], 26, 0.9); //MouthFrown_L
addBlendshapeToPose(angryPoses[1], 27, 0.9); //MouthFrown_R
addBlendshapeToPose(angryPoses[1], 14, 0.9); //BrowsD_L
addBlendshapeToPose(angryPoses[1], 15, 0.9); //BrowsD_R
angryPoses[2] = {blendShapes: []};
addBlendshapeToPose(angryPoses[2], 26, 1.0); //MouthFrown_L
addBlendshapeToPose(angryPoses[2], 27, 1.0); //MouthFrown_R
addBlendshapeToPose(angryPoses[2], 14, 1.0); //BrowsD_L
addBlendshapeToPose(angryPoses[2], 15, 1.0); //BrowsD_R
addBlendshapeToPose(angryPoses[2], 21, 0.5); //JawOpen
addBlendshapeToPose(angryPoses[2], 46, 1.0); //CheekSquint_L
addBlendshapeToPose(angryPoses[2], 47, 1.0); //CheekSquint_R
var sadPoses = [];
sadPoses[0] = {blendShapes: []};
addBlendshapeToPose(sadPoses[0], 26, 0.6); //MouthFrown_L
addBlendshapeToPose(sadPoses[0], 27, 0.6); //MouthFrown_R
addBlendshapeToPose(sadPoses[0], 16, 0.2); //BrowsU_C
addBlendshapeToPose(sadPoses[0], 2, 0.6); //EyeSquint_L
addBlendshapeToPose(sadPoses[0], 3, 0.6); //EyeSquint_R
sadPoses[1] = {blendShapes: []};
addBlendshapeToPose(sadPoses[1], 26, 0.9); //MouthFrown_L
addBlendshapeToPose(sadPoses[1], 27, 0.9); //MouthFrown_R
addBlendshapeToPose(sadPoses[1], 16, 0.6); //BrowsU_C
addBlendshapeToPose(sadPoses[1], 2, 0.9); //EyeSquint_L
addBlendshapeToPose(sadPoses[1], 3, 0.9); //EyeSquint_R
sadPoses[2] = {blendShapes: []};
addBlendshapeToPose(sadPoses[2], 26, 1.0); //MouthFrown_L
addBlendshapeToPose(sadPoses[2], 27, 1.0); //MouthFrown_R
addBlendshapeToPose(sadPoses[2], 16, 0.1); //BrowsU_C
addBlendshapeToPose(sadPoses[2], 2, 1.0); //EyeSquint_L
addBlendshapeToPose(sadPoses[2], 3, 1.0); //EyeSquint_R
addBlendshapeToPose(sadPoses[2], 21, 0.3); //JawOpen
var facePoses = [];
facePoses[0] = happyPoses;
facePoses[1] = angryPoses;
facePoses[2] = sadPoses;
function addBlendShape(s) {
allBlendShapes[allBlendShapes.length] = s;
}
//It is imperative that the following blendshapes are all present and are in the correct order
addBlendShape("EyeBlink_L"); //0
addBlendShape("EyeBlink_R"); //1
addBlendShape("EyeSquint_L"); //2
addBlendShape("EyeSquint_R"); //3
addBlendShape("EyeDown_L"); //4
addBlendShape("EyeDown_R"); //5
addBlendShape("EyeIn_L"); //6
addBlendShape("EyeIn_R"); //7
addBlendShape("EyeOpen_L"); //8
addBlendShape("EyeOpen_R"); //9
addBlendShape("EyeOut_L"); //10
addBlendShape("EyeOut_R"); //11
addBlendShape("EyeUp_L"); //12
addBlendShape("EyeUp_R"); //13
addBlendShape("BrowsD_L"); //14
addBlendShape("BrowsD_R"); //15
addBlendShape("BrowsU_C"); //16
addBlendShape("BrowsU_L"); //17
addBlendShape("BrowsU_R"); //18
addBlendShape("JawFwd"); //19
addBlendShape("JawLeft"); //20
addBlendShape("JawOpen"); //21
addBlendShape("JawChew"); //22
addBlendShape("JawRight"); //23
addBlendShape("MouthLeft"); //24
addBlendShape("MouthRight"); //25
addBlendShape("MouthFrown_L"); //26
addBlendShape("MouthFrown_R"); //27
addBlendShape("MouthSmile_L"); //28
addBlendShape("MouthSmile_R"); //29
addBlendShape("MouthDimple_L"); //30
addBlendShape("MouthDimple_R"); //31
addBlendShape("LipsStretch_L"); //32
addBlendShape("LipsStretch_R"); //33
addBlendShape("LipsUpperClose"); //34
addBlendShape("LipsLowerClose"); //35
addBlendShape("LipsUpperUp"); //36
addBlendShape("LipsLowerDown"); //37
addBlendShape("LipsUpperOpen"); //38
addBlendShape("LipsLowerOpen"); //39
addBlendShape("LipsFunnel"); //40
addBlendShape("LipsPucker"); //41
addBlendShape("ChinLowerRaise"); //42
addBlendShape("ChinUpperRaise"); //43
addBlendShape("Sneer"); //44
addBlendShape("Puff"); //45
addBlendShape("CheekSquint_L"); //46
addBlendShape("CheekSquint_R"); //47
for (var i = 0; i < allBlendShapes.length; i++) {
targetBlendCoefficient[i] = 0;
currentBlendCoefficient[i] = 0;
}
function setRandomExpression() {
//Clear all expression data for current expression
if (currentExpression != -1) {
var expression = facePoses[avatarMood][currentExpression];
for (var i = 0; i < expression.blendShapes.length; i++) {
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = 0.0;
}
}
//Get a new current expression
currentExpression = Math.floor(Math.random() * facePoses[avatarMood].length);
var expression = facePoses[avatarMood][currentExpression];
for (var i = 0; i < expression.blendShapes.length; i++) {
targetBlendCoefficient[expression.blendShapes[i].shapeIndex] = expression.blendShapes[i].val;
}
}
var expressionChangeSpeed = 0.1;
function updateBlendShapes(deltaTime) {
for (var i = 0; i < allBlendShapes.length; i++) {
currentBlendCoefficient[i] += (targetBlendCoefficient[i] - currentBlendCoefficient[i]) * expressionChangeSpeed;
Avatar.setBlendshape(allBlendShapes[i], currentBlendCoefficient[i]);
}
}
var BLINK_SPEED = 0.15;
var CHANCE_TO_BLINK = 0.0025;
var MAX_BLINK = 0.85;
var blink = 0.0;
var isBlinking = false;
function updateBlinking(deltaTime) {
if (isBlinking == false) {
if (Math.random() < CHANCE_TO_BLINK) {
isBlinking = true;
} else {
blink -= BLINK_SPEED;
if (blink < 0.0) blink = 0.0;
}
} else {
blink += BLINK_SPEED;
if (blink > MAX_BLINK) {
blink = MAX_BLINK;
isBlinking = false;
}
}
currentBlendCoefficient[0] = blink;
currentBlendCoefficient[1] = blink;
targetBlendCoefficient[0] = blink;
targetBlendCoefficient[1] = blink;
}
//
//Procedural walk animation using two keyframes
//We use a separate array for front and back joints
//Pitch, yaw, and roll for the joints
var rightAngles = [];
var leftAngles = [];
//for non mirrored joints such as the spine
var middleAngles = [];
//Actual joint mappings
var SHOULDER_JOINT_NUMBER = 15;
var ELBOW_JOINT_NUMBER = 16;
var JOINT_R_HIP = 1;
var JOINT_R_KNEE = 2;
var JOINT_L_HIP = 6;
var JOINT_L_KNEE = 7;
var JOINT_R_ARM = 15;
var JOINT_R_FOREARM = 16;
var JOINT_L_ARM = 39;
var JOINT_L_FOREARM = 40;
var JOINT_SPINE = 11;
var JOINT_R_FOOT = 3;
var JOINT_L_FOOT = 8;
var JOINT_R_TOE = 4;
var JOINT_L_TOE = 9;
// Animation Is Defined Below
var NUM_FRAMES = 2;
for (var i = 0; i < NUM_FRAMES; i++) {
rightAngles[i] = [];
leftAngles[i] = [];
middleAngles[i] = [];
}
//Joint order for actual joint mappings, should be interleaved R,L,R,L,...S,S,S for R = right, L = left, S = single
var JOINT_ORDER = [];
//*** right / left joints ***
var HIP = 0;
JOINT_ORDER.push(JOINT_R_HIP);
JOINT_ORDER.push(JOINT_L_HIP);
var KNEE = 1;
JOINT_ORDER.push(JOINT_R_KNEE);
JOINT_ORDER.push(JOINT_L_KNEE);
var ARM = 2;
JOINT_ORDER.push(JOINT_R_ARM);
JOINT_ORDER.push(JOINT_L_ARM);
var FOREARM = 3;
JOINT_ORDER.push(JOINT_R_FOREARM);
JOINT_ORDER.push(JOINT_L_FOREARM);
var FOOT = 4;
JOINT_ORDER.push(JOINT_R_FOOT);
JOINT_ORDER.push(JOINT_L_FOOT);
var TOE = 5;
JOINT_ORDER.push(JOINT_R_TOE);
JOINT_ORDER.push(JOINT_L_TOE);
//*** middle joints ***
var SPINE = 0;
JOINT_ORDER.push(JOINT_SPINE);
//We have to store the angles so we can invert yaw and roll when making the animation
//symmetrical
//Front refers to leg, not arm.
//Legs Extending
rightAngles[0][HIP] = [30.0, 0.0, 8.0];
rightAngles[0][KNEE] = [-15.0, 0.0, 0.0];
rightAngles[0][ARM] = [85.0, -25.0, 0.0];
rightAngles[0][FOREARM] = [0.0, 0.0, -15.0];
rightAngles[0][FOOT] = [0.0, 0.0, 0.0];
rightAngles[0][TOE] = [0.0, 0.0, 0.0];
leftAngles[0][HIP] = [-15, 0.0, 8.0];
leftAngles[0][KNEE] = [-26, 0.0, 0.0];
leftAngles[0][ARM] = [85.0, 20.0, 0.0];
leftAngles[0][FOREARM] = [10.0, 0.0, -25.0];
leftAngles[0][FOOT] = [-13.0, 0.0, 0.0];
leftAngles[0][TOE] = [34.0, 0.0, 0.0];
middleAngles[0][SPINE] = [0.0, -15.0, 5.0];
//Legs Passing
rightAngles[1][HIP] = [6.0, 0.0, 8.0];
rightAngles[1][KNEE] = [-12.0, 0.0, 0.0];
rightAngles[1][ARM] = [85.0, 0.0, 0.0];
rightAngles[1][FOREARM] = [0.0, 0.0, -15.0];
rightAngles[1][FOOT] = [6.0, -8.0, 0.0];
rightAngles[1][TOE] = [0.0, 0.0, 0.0];
leftAngles[1][HIP] = [10.0, 0.0, 8.0];
leftAngles[1][KNEE] = [-60.0, 0.0, 0.0];
leftAngles[1][ARM] = [85.0, 0.0, 0.0];
leftAngles[1][FOREARM] = [0.0, 0.0, -15.0];
leftAngles[1][FOOT] = [0.0, 0.0, 0.0];
leftAngles[1][TOE] = [0.0, 0.0, 0.0];
middleAngles[1][SPINE] = [0.0, 0.0, 0.0];
//Actual keyframes for the animation
var walkKeyFrames = procAnimAPI.generateKeyframes(rightAngles, leftAngles, middleAngles, NUM_FRAMES);
// Animation Is Defined Above
// Standing Key Frame
//We don't have to do any mirroring or anything, since this is just a single pose.
var rightQuats = [];
var leftQuats = [];
var middleQuats = [];
rightQuats[HIP] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 7.0);
rightQuats[KNEE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
rightQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
rightQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, -10.0);
rightQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, -8.0, 0.0);
rightQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
leftQuats[HIP] = Quat.fromPitchYawRollDegrees(0, 0.0, -7.0);
leftQuats[KNEE] = Quat.fromPitchYawRollDegrees(0, 0.0, 0.0);
leftQuats[ARM] = Quat.fromPitchYawRollDegrees(85.0, 0.0, 0.0);
leftQuats[FOREARM] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 10.0);
leftQuats[FOOT] = Quat.fromPitchYawRollDegrees(0.0, 8.0, 0.0);
leftQuats[TOE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
middleQuats[SPINE] = Quat.fromPitchYawRollDegrees(0.0, 0.0, 0.0);
var standingKeyFrame = new procAnimAPI.KeyFrame(rightQuats, leftQuats, middleQuats);
//
var currentFrame = 0;
var walkTime = 0.0;
var walkWheelRadius = 0.5;
var walkWheelRate = 2.0 * 3.141592 * walkWheelRadius / 8.0;
var avatarAcceleration = 0.75;
var avatarVelocity = 0.0;
var avatarMaxVelocity = 1.4;
function handleAnimation(deltaTime) {
updateBlinking(deltaTime);
updateBlendShapes(deltaTime);
if (Math.random() < 0.01) {
setRandomExpression();
}
if (avatarVelocity == 0.0) {
walkTime = 0.0;
currentFrame = 0;
} else {
walkTime += avatarVelocity * deltaTime;
if (walkTime > walkWheelRate) {
walkTime = 0.0;
currentFrame++;
if (currentFrame % 2 == 1) {
playRandomFootstepSound();
}
if (currentFrame > 3) {
currentFrame = 0;
}
}
}
var frame = walkKeyFrames[currentFrame];
var walkInterp = walkTime / walkWheelRate;
var animInterp = avatarVelocity / (avatarMaxVelocity / 1.3);
if (animInterp > 1.0) animInterp = 1.0;
for (var i = 0; i < JOINT_ORDER.length; i++) {
var walkJoint = procAnimAPI.deCasteljau(frame.rotations[i], frame.nextFrame.rotations[i], frame.controlPoints[i][0], frame.controlPoints[i][1], walkInterp);
var standJoint = standingKeyFrame.rotations[i];
var finalJoint = Quat.mix(standJoint, walkJoint, animInterp);
Avatar.setJointData(JOINT_ORDER[i], finalJoint);
}
}
function jumpWithLoudness(deltaTime) {
// potentially change pelvis height depending on trailing average loudness
pelvisOscillatorVelocity += deltaTime * Agent.lastReceivedAudioLoudness * 700.0 ;
pelvisOscillatorVelocity -= pelvisOscillatorPosition * 0.75;
pelvisOscillatorVelocity *= 0.97;
pelvisOscillatorPosition += deltaTime * pelvisOscillatorVelocity;
Avatar.headPitch = pelvisOscillatorPosition * 60.0;
var pelvisPosition = Avatar.position;
pelvisPosition.y = (Y_PELVIS - 0.35) + pelvisOscillatorPosition;
if (pelvisPosition.y < Y_PELVIS) {
pelvisPosition.y = Y_PELVIS;
} else if (pelvisPosition.y > Y_PELVIS + 1.0) {
pelvisPosition.y = Y_PELVIS + 1.0;
}
Avatar.position = pelvisPosition;
}
var forcedMove = false;
var wasMovingLastFrame = false;
function handleHeadTurn() {
if (!isTurningHead && (Math.random() < CHANCE_OF_HEAD_TURNING)) {
targetHeadPitch = getRandomFloat(-PITCH_RANGE, PITCH_RANGE);
targetHeadYaw = getRandomFloat(-YAW_RANGE, YAW_RANGE);
isTurningHead = true;
} else {
Avatar.headPitch = Avatar.headPitch + (targetHeadPitch - Avatar.headPitch) * HEAD_TURN_RATE;
Avatar.headYaw = Avatar.headYaw + (targetHeadYaw - Avatar.headYaw) * HEAD_TURN_RATE;
if (Math.abs(Avatar.headPitch - targetHeadPitch) < STOP_TOLERANCE &&
Math.abs(Avatar.headYaw - targetHeadYaw) < STOP_TOLERANCE) {
isTurningHead = false;
}
}
}
function stopWalking() {
avatarVelocity = 0.0;
isMoving = false;
}
var pauseTimer;
function pause(checkPoint, rotation, delay){
pauseTimer = Script.setTimeout(function() {
targetPosition = checkPoint;
targetOrientation = rotation;
isMoving = true;
}, delay);
}
function handleWalking(deltaTime) {
if (!isMoving){
if(targetPosition.x == 0){targetPosition = wayPoints[1]; isMoving = true;} //Start by heading for wayPoint1
else{
for (var j = 0; j <= wayPoints.length; j++) {
if (targetPosition == wayPoints[j]) {
if(j == wayPoints.length -1){ j= -1;}
var k = j + 1;
var toTarget = Vec3.normalize(Vec3.subtract(wayPoints[k], Avatar.position));
var localVector = Vec3.multiplyQbyV(Avatar.orientation, { x: 0, y: 0, z: -1 });
toTarget.y = 0; // I recommend doing that if you don't want weird rotation to occur that are not around Y.
var axis = Vec3.normalize(Vec3.cross(toTarget, localVector));
var angle = Math.acos(Vec3.dot(toTarget, localVector)) * 180 / Math.PI;
if (Vec3.dot(Vec3.cross(axis, localVector), toTarget) < 0) {
angle = -angle;
}
var delta = 1;
var quat = Quat.angleAxis(angle, axis);
Avatar.orientation = Quat.multiply(quat, Avatar.orientation);
pause(wayPoints[k], Avatar.orientation, pauseTimes[k]);
break;
}
}
}
}
else
if (isMoving) {
var targetVector = Vec3.subtract(targetPosition, Avatar.position);
var distance = Vec3.length(targetVector);
if (distance <= avatarVelocity * deltaTime) {
Avatar.position = targetPosition;
stopWalking();
} else {
var direction = Vec3.normalize(targetVector);
//Figure out if we should be slowing down
var t = avatarVelocity / avatarAcceleration;
var d = (avatarVelocity / 2.0) * t;
if (distance < d) {
avatarVelocity -= avatarAcceleration * deltaTime;
if (avatarVelocity <= 0) {
stopWalking();
}
} else {
avatarVelocity += avatarAcceleration * deltaTime;
if (avatarVelocity > avatarMaxVelocity) avatarVelocity = avatarMaxVelocity;
}
Avatar.position = Vec3.sum(Avatar.position, Vec3.multiply(direction, avatarVelocity * deltaTime));
wasMovingLastFrame = true;
}
}
}
function handleTalking() {
if (Math.random() < CHANCE_OF_SOUND) {
playRandomSound();
}
}
function changePelvisHeight(newHeight) {
var newPosition = Avatar.position;
newPosition.y = newHeight;
Avatar.position = newPosition;
}
function updateBehavior(deltaTime) {
if (AvatarList.containsAvatarWithDisplayName("mrdj")) {
if (wasMovingLastFrame) {
isMoving = false;
}
// we have a DJ, shouldn't we be dancing?
jumpWithLoudness(deltaTime);
} else {
// no DJ, let's just chill on the dancefloor - randomly walking and talking
handleHeadTurn();
handleAnimation(deltaTime);
handleWalking(deltaTime);
handleTalking();
}
}
Script.update.connect(updateBehavior);

View file

@ -20,41 +20,103 @@ var HEAD_MOVE_DEAD_ZONE = 0.0;
var HEAD_STRAFE_DEAD_ZONE = 0.0;
var HEAD_ROTATE_DEAD_ZONE = 0.0;
var HEAD_THRUST_FWD_SCALE = 12000.0;
var HEAD_THRUST_STRAFE_SCALE = 1000.0;
var HEAD_YAW_RATE = 2.0;
var HEAD_THRUST_STRAFE_SCALE = 2000.0;
var HEAD_YAW_RATE = 1.0;
var HEAD_PITCH_RATE = 1.0;
var HEAD_ROLL_THRUST_SCALE = 75.0;
var HEAD_PITCH_LIFT_THRUST = 3.0;
var WALL_BOUNCE = 4000.0;
// If these values are set to something
var maxVelocity = 1.25;
var noFly = true;
//var roomLimits = { xMin: 618, xMax: 635.5, zMin: 528, zMax: 552.5 };
var roomLimits = { xMin: -1, xMax: 0, zMin: 0, zMax: 0 };
function isInRoom(position) {
var BUFFER = 2.0;
if (roomLimits.xMin < 0) {
return false;
}
if ((position.x > (roomLimits.xMin - BUFFER)) &&
(position.x < (roomLimits.xMax + BUFFER)) &&
(position.z > (roomLimits.zMin - BUFFER)) &&
(position.z < (roomLimits.zMax + BUFFER)))
{
return true;
} else {
return false;
}
}
function moveWithHead(deltaTime) {
var thrust = { x: 0, y: 0, z: 0 };
var position = MyAvatar.position;
if (movingWithHead) {
var deltaYaw = MyAvatar.getHeadFinalYaw() - headStartYaw;
var deltaPitch = MyAvatar.getHeadDeltaPitch() - headStartDeltaPitch;
var deltaRoll = MyAvatar.getHeadFinalRoll() - headStartRoll;
var velocity = MyAvatar.getVelocity();
var bodyLocalCurrentHeadVector = Vec3.subtract(MyAvatar.getHeadPosition(), MyAvatar.position);
bodyLocalCurrentHeadVector = Vec3.multiplyQbyV(Quat.angleAxis(-deltaYaw, {x:0, y: 1, z:0}), bodyLocalCurrentHeadVector);
var headDelta = Vec3.subtract(bodyLocalCurrentHeadVector, headStartPosition);
headDelta = Vec3.multiplyQbyV(Quat.inverse(Camera.getOrientation()), headDelta);
headDelta.y = 0.0; // Don't respond to any of the vertical component of head motion
var forward = Quat.getFront(Camera.getOrientation());
var right = Quat.getRight(Camera.getOrientation());
var up = Quat.getUp(Camera.getOrientation());
if (noFly) {
forward.y = 0.0;
forward = Vec3.normalize(forward);
right.y = 0.0;
right = Vec3.normalize(right);
up = { x: 0, y: 1, z: 0};
}
// Thrust based on leaning forward and side-to-side
if (Math.abs(headDelta.z) > HEAD_MOVE_DEAD_ZONE) {
MyAvatar.addThrust(Vec3.multiply(Quat.getFront(Camera.getOrientation()), -headDelta.z * HEAD_THRUST_FWD_SCALE * deltaTime));
if (Math.abs(Vec3.dot(velocity, forward)) < maxVelocity) {
thrust = Vec3.sum(thrust, Vec3.multiply(forward, -headDelta.z * HEAD_THRUST_FWD_SCALE * deltaTime));
}
}
if (Math.abs(headDelta.x) > HEAD_STRAFE_DEAD_ZONE) {
MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), headDelta.x * HEAD_THRUST_STRAFE_SCALE * deltaTime));
if (Math.abs(Vec3.dot(velocity, right)) < maxVelocity) {
thrust = Vec3.sum(thrust, Vec3.multiply(right, headDelta.x * HEAD_THRUST_STRAFE_SCALE * deltaTime));
}
}
if (Math.abs(deltaYaw) > HEAD_ROTATE_DEAD_ZONE) {
var orientation = Quat.multiply(Quat.angleAxis(deltaYaw * HEAD_YAW_RATE * deltaTime, {x:0, y: 1, z:0}), MyAvatar.orientation);
var orientation = Quat.multiply(Quat.angleAxis((deltaYaw + deltaRoll) * HEAD_YAW_RATE * deltaTime, {x:0, y: 1, z:0}), MyAvatar.orientation);
MyAvatar.orientation = orientation;
}
// Thrust Up/Down based on head pitch
MyAvatar.addThrust(Vec3.multiply({ x:0, y:1, z:0 }, (MyAvatar.getHeadFinalPitch() - headStartFinalPitch) * HEAD_PITCH_LIFT_THRUST * deltaTime));
if (!noFly) {
if ((Math.abs(Vec3.dot(velocity, up)) < maxVelocity)) {
thrust = Vec3.sum(thrust, Vec3.multiply({ x:0, y:1, z:0 }, (MyAvatar.getHeadFinalPitch() - headStartFinalPitch) * HEAD_PITCH_LIFT_THRUST * deltaTime));
}
}
// For head trackers, adjust pitch by head pitch
MyAvatar.headPitch += deltaPitch * HEAD_PITCH_RATE * deltaTime;
// Thrust strafe based on roll ange
MyAvatar.addThrust(Vec3.multiply(Quat.getRight(Camera.getOrientation()), -(MyAvatar.getHeadFinalRoll() - headStartRoll) * HEAD_ROLL_THRUST_SCALE * deltaTime));
}
if (isInRoom(position)) {
// Impose constraints to keep you in the space
if (position.x < roomLimits.xMin) {
thrust.x += (roomLimits.xMin - position.x) * WALL_BOUNCE * deltaTime;
} else if (position.x > roomLimits.xMax) {
thrust.x += (roomLimits.xMax - position.x) * WALL_BOUNCE * deltaTime;
}
if (position.z < roomLimits.zMin) {
thrust.z += (roomLimits.zMin - position.z) * WALL_BOUNCE * deltaTime;
} else if (position.z > roomLimits.zMax) {
thrust.z += (roomLimits.zMax - position.z) * WALL_BOUNCE * deltaTime;
}
}
// Check against movement box limits
MyAvatar.addThrust(thrust);
}
Controller.keyPressEvent.connect(function(event) {

View file

@ -906,7 +906,9 @@ void Application::keyPressEvent(QKeyEvent* event) {
break;
case Qt::Key_D:
_myAvatar->setDriveKeys(ROT_RIGHT, 1.f);
if (!isMeta) {
_myAvatar->setDriveKeys(ROT_RIGHT, 1.f);
}
break;
case Qt::Key_Return:
@ -1074,7 +1076,7 @@ void Application::keyReleaseEvent(QKeyEvent* event) {
_keysPressed.remove(event->key());
_controllerScriptingInterface.emitKeyReleaseEvent(event); // send events to any registered scripts
// if one of our scripts have asked to capture this event, then stop processing it
if (_controllerScriptingInterface.isKeyCaptured(event)) {
return;
@ -1126,7 +1128,12 @@ void Application::keyReleaseEvent(QKeyEvent* event) {
_myAvatar->setDriveKeys(RIGHT, 0.f);
_myAvatar->setDriveKeys(ROT_RIGHT, 0.f);
break;
case Qt::Key_Control:
case Qt::Key_Shift:
case Qt::Key_Meta:
case Qt::Key_Alt:
_myAvatar->clearDriveKeys();
break;
default:
event->ignore();
break;
@ -2725,6 +2732,9 @@ void Application::displaySide(Camera& whichCamera, bool selfAvatarOnly) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings), "Application::displaySide()");
// transform by eye offset
// load the view frustum
loadViewFrustum(whichCamera, _displayViewFrustum);
// flip x if in mirror mode (also requires reversing winding order for backface culling)
if (whichCamera.getMode() == CAMERA_MODE_MIRROR) {
glScalef(-1.0f, 1.0f, 1.0f);
@ -2972,7 +2982,14 @@ void Application::getProjectionMatrix(glm::dmat4* projectionMatrix) {
void Application::computeOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) const {
// allow 3DTV/Oculus to override parameters from camera
_viewFrustum.computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
if (OculusManager::isConnected()) {
OculusManager::overrideOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
} else if (TV3DManager::isConnected()) {
TV3DManager::overrideOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
}
}
glm::vec2 Application::getScaledScreenPoint(glm::vec2 projectedPoint) {
@ -3358,7 +3375,10 @@ void Application::updateWindowTitle(){
title += " - ₵" + creditBalanceString;
}
#ifndef WIN32
// crashes with vs2013/win32
qDebug("Application title set to: %s", title.toStdString().c_str());
#endif //!WIN32
_window->setWindowTitle(title);
}

View file

@ -192,6 +192,7 @@ public:
const AudioReflector* getAudioReflector() const { return &_audioReflector; }
Camera* getCamera() { return &_myCamera; }
ViewFrustum* getViewFrustum() { return &_viewFrustum; }
ViewFrustum* getDisplayViewFrustum() { return &_displayViewFrustum; }
ViewFrustum* getShadowViewFrustum() { return &_shadowViewFrustum; }
VoxelImporter* getVoxelImporter() { return &_voxelImporter; }
VoxelSystem* getVoxels() { return &_voxels; }
@ -486,6 +487,7 @@ private:
ViewFrustum _viewFrustum; // current state of view frustum, perspective, orientation, etc.
ViewFrustum _lastQueriedViewFrustum; /// last view frustum used to query octree servers (voxels, particles)
ViewFrustum _displayViewFrustum;
ViewFrustum _shadowViewFrustum;
quint64 _lastQueriedTime;

View file

@ -102,9 +102,9 @@ Audio::Audio(QObject* parent) :
_scopeOutputOffset(0),
_framesPerScope(DEFAULT_FRAMES_PER_SCOPE),
_samplesPerScope(NETWORK_SAMPLES_PER_FRAME * _framesPerScope),
_peqEnabled(false),
_noiseSourceEnabled(false),
_toneSourceEnabled(true),
_peqEnabled(false),
_scopeInput(0),
_scopeOutputLeft(0),
_scopeOutputRight(0),
@ -164,14 +164,24 @@ void Audio::audioMixerKilled() {
resetStats();
}
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
QAudioDeviceInfo result;
#ifdef WIN32
// NOTE
// this is a workaround for a windows only QtBug https://bugreports.qt-project.org/browse/QTBUG-16117
// static QAudioDeviceInfo objects get deallocated when QList<QAudioDevieInfo> objects go out of scope
result = (mode == QAudio::AudioInput) ?
QAudioDeviceInfo::defaultInputDevice() :
QAudioDeviceInfo::defaultOutputDevice();
#else
foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
qDebug() << audioDevice.deviceName() << " " << deviceName;
if (audioDevice.deviceName().trimmed() == deviceName.trimmed()) {
result = audioDevice;
}
}
#endif
return result;
}
@ -277,6 +287,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
pMMDeviceEnumerator = NULL;
CoUninitialize();
}
qDebug() << "DEBUG [" << deviceName << "] [" << getNamedAudioDeviceForMode(mode, deviceName).deviceName() << "]";
return getNamedAudioDeviceForMode(mode, deviceName);
@ -431,7 +442,7 @@ void Audio::start() {
qDebug() << "Unable to set up audio output because of a problem with output format.";
}
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 4 );
_inputFrameBuffer.initialize( _inputFormat.channelCount(), _audioInput->bufferSize() * 8 );
_peq.initialize( _inputFormat.sampleRate() );
_inputGain.initialize();
_sourceGain.initialize();
@ -633,6 +644,8 @@ void Audio::handleAudioInput() {
_dcOffset = DC_OFFSET_AVERAGING * _dcOffset + (1.0f - DC_OFFSET_AVERAGING) * measuredDcOffset;
}
_lastInputLoudness = fabs(loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
// If Noise Gate is enabled, check and turn the gate on and off
if (!_audioSourceInjectEnabled && _noiseGateEnabled) {
float averageOfAllSampleFrames = 0.0f;

View file

@ -26,6 +26,7 @@
#include "AudioSourceTone.h"
#include "AudioSourceNoise.h"
#include "AudioGain.h"
#include "AudioPan.h"
#include "AudioFilter.h"
#include "AudioFilterBank.h"

View file

@ -70,6 +70,7 @@ Menu* Menu::getInstance() {
const ViewFrustumOffset DEFAULT_FRUSTUM_OFFSET = {-135.0f, 0.0f, 0.0f, 25.0f, 0.0f};
const float DEFAULT_FACESHIFT_EYE_DEFLECTION = 0.25f;
const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
const float DEFAULT_AVATAR_LOD_DISTANCE_MULTIPLIER = 1.0f;
const int ONE_SECOND_OF_FRAMES = 60;
const int FIVE_SECONDS_OF_FRAMES = 5 * ONE_SECOND_OF_FRAMES;
@ -87,6 +88,7 @@ Menu::Menu() :
_fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES),
_realWorldFieldOfView(DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_faceshiftEyeDeflection(DEFAULT_FACESHIFT_EYE_DEFLECTION),
_faceshiftHostname(DEFAULT_FACESHIFT_HOSTNAME),
_frustumDrawMode(FRUSTUM_DRAW_MODE_ALL),
_viewFrustumOffset(DEFAULT_FRUSTUM_OFFSET),
_jsConsole(NULL),
@ -713,6 +715,7 @@ void Menu::loadSettings(QSettings* settings) {
_fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES);
_realWorldFieldOfView = loadSetting(settings, "realWorldFieldOfView", DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES);
_faceshiftEyeDeflection = loadSetting(settings, "faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION);
_faceshiftHostname = settings->value("faceshiftHostname", DEFAULT_FACESHIFT_HOSTNAME).toString();
_maxVoxels = loadSetting(settings, "maxVoxels", DEFAULT_MAX_VOXELS_PER_SYSTEM);
_maxVoxelPacketsPerSecond = loadSetting(settings, "maxVoxelsPPS", DEFAULT_MAX_VOXEL_PPS);
_voxelSizeScale = loadSetting(settings, "voxelSizeScale", DEFAULT_OCTREE_SIZE_SCALE);
@ -777,6 +780,7 @@ void Menu::saveSettings(QSettings* settings) {
settings->setValue("fieldOfView", _fieldOfView);
settings->setValue("faceshiftEyeDeflection", _faceshiftEyeDeflection);
settings->setValue("faceshiftHostname", _faceshiftHostname);
settings->setValue("maxVoxels", _maxVoxels);
settings->setValue("maxVoxelsPPS", _maxVoxelPacketsPerSecond);
settings->setValue("voxelSizeScale", _voxelSizeScale);

View file

@ -104,6 +104,8 @@ public:
float getFaceshiftEyeDeflection() const { return _faceshiftEyeDeflection; }
void setFaceshiftEyeDeflection(float faceshiftEyeDeflection) { _faceshiftEyeDeflection = faceshiftEyeDeflection; bumpSettings(); }
const QString& getFaceshiftHostname() const { return _faceshiftHostname; }
void setFaceshiftHostname(const QString& hostname) { _faceshiftHostname = hostname; bumpSettings(); }
QString getSnapshotsLocation() const;
void setSnapshotsLocation(QString snapshotsLocation) { _snapshotsLocation = snapshotsLocation; bumpSettings(); }
@ -271,6 +273,7 @@ private:
float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus
float _realWorldFieldOfView; // The actual FOV set by the user's monitor size and view distance
float _faceshiftEyeDeflection;
QString _faceshiftHostname;
FrustumDrawMode _frustumDrawMode;
ViewFrustumOffset _viewFrustumOffset;
QPointer<MetavoxelEditor> _MetavoxelEditor;

View file

@ -133,7 +133,7 @@ const GLenum COLOR_NORMAL_DRAW_BUFFERS[] = { GL_COLOR_ATTACHMENT0, GL_COLOR_ATTA
void MetavoxelSystem::render() {
// update the frustum
ViewFrustum* viewFrustum = Application::getInstance()->getViewFrustum();
ViewFrustum* viewFrustum = Application::getInstance()->getDisplayViewFrustum();
_frustum.set(viewFrustum->getFarTopLeft(), viewFrustum->getFarTopRight(), viewFrustum->getFarBottomLeft(),
viewFrustum->getFarBottomRight(), viewFrustum->getNearTopLeft(), viewFrustum->getNearTopRight(),
viewFrustum->getNearBottomLeft(), viewFrustum->getNearBottomRight());
@ -181,6 +181,14 @@ void MetavoxelSystem::render() {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, Application::getInstance()->getTextureCache()->getPrimaryNormalTextureID());
// get the viewport side (left, right, both)
int viewport[4];
glGetIntegerv(GL_VIEWPORT, viewport);
const int VIEWPORT_X_INDEX = 0;
const int VIEWPORT_WIDTH_INDEX = 2;
float sMin = viewport[VIEWPORT_X_INDEX] / (float)primaryFBO->width();
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)primaryFBO->width();
if (Menu::getInstance()->getShadowsEnabled()) {
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, Application::getInstance()->getTextureCache()->getPrimaryDepthTextureID());
@ -210,10 +218,13 @@ void MetavoxelSystem::render() {
program->setUniformValue(locations->nearLocation, nearVal);
program->setUniformValue(locations->depthScale, (farVal - nearVal) / farVal);
float nearScale = -1.0f / nearVal;
program->setUniformValue(locations->depthTexCoordOffset, left * nearScale, bottom * nearScale);
program->setUniformValue(locations->depthTexCoordScale, (right - left) * nearScale, (top - bottom) * nearScale);
float sScale = 1.0f / sWidth;
float depthTexCoordScaleS = (right - left) * nearScale * sScale;
program->setUniformValue(locations->depthTexCoordOffset, left * nearScale - sMin * depthTexCoordScaleS,
bottom * nearScale);
program->setUniformValue(locations->depthTexCoordScale, depthTexCoordScaleS, (top - bottom) * nearScale);
renderFullscreenQuad();
renderFullscreenQuad(sMin, sMin + sWidth);
program->release();
@ -226,7 +237,7 @@ void MetavoxelSystem::render() {
} else {
_directionalLight.bind();
renderFullscreenQuad();
renderFullscreenQuad(sMin, sMin + sWidth);
_directionalLight.release();
}
@ -245,7 +256,7 @@ void MetavoxelSystem::render() {
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
renderFullscreenQuad();
renderFullscreenQuad(sMin, sMin + sWidth);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
@ -2176,7 +2187,7 @@ private:
SpannerRenderVisitor::SpannerRenderVisitor(const MetavoxelLOD& lod) :
SpannerVisitor(QVector<AttributePointer>() << AttributeRegistry::getInstance()->getSpannersAttribute(),
QVector<AttributePointer>(), QVector<AttributePointer>(), QVector<AttributePointer>(),
lod, encodeOrder(Application::getInstance()->getViewFrustum()->getDirection())),
lod, encodeOrder(Application::getInstance()->getDisplayViewFrustum()->getDirection())),
_containmentDepth(INT_MAX) {
}
@ -2212,7 +2223,7 @@ private:
BufferRenderVisitor::BufferRenderVisitor(const AttributePointer& attribute) :
MetavoxelVisitor(QVector<AttributePointer>() << attribute),
_order(encodeOrder(Application::getInstance()->getViewFrustum()->getDirection())),
_order(encodeOrder(Application::getInstance()->getDisplayViewFrustum()->getDirection())),
_containmentDepth(INT_MAX) {
}
@ -2246,12 +2257,12 @@ void DefaultMetavoxelRendererImplementation::render(MetavoxelData& data, Metavox
float viewportWidth = viewport[VIEWPORT_WIDTH_INDEX];
float viewportHeight = viewport[VIEWPORT_HEIGHT_INDEX];
float viewportDiagonal = sqrtf(viewportWidth * viewportWidth + viewportHeight * viewportHeight);
float worldDiagonal = glm::distance(Application::getInstance()->getViewFrustum()->getNearBottomLeft(),
Application::getInstance()->getViewFrustum()->getNearTopRight());
float worldDiagonal = glm::distance(Application::getInstance()->getDisplayViewFrustum()->getNearBottomLeft(),
Application::getInstance()->getDisplayViewFrustum()->getNearTopRight());
_pointProgram.bind();
_pointProgram.setUniformValue(_pointScaleLocation, viewportDiagonal *
Application::getInstance()->getViewFrustum()->getNearClip() / worldDiagonal);
Application::getInstance()->getDisplayViewFrustum()->getNearClip() / worldDiagonal);
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);

View file

@ -48,6 +48,7 @@ static const QString TRANSLATION_Y_FIELD = "ty";
static const QString TRANSLATION_Z_FIELD = "tz";
static const QString JOINT_FIELD = "joint";
static const QString FREE_JOINT_FIELD = "freeJoint";
static const QString BLENDSHAPE_FIELD = "bs";
static const QString S3_URL = "http://public.highfidelity.io";
static const QString MODEL_URL = "/api/v1/models";
@ -192,6 +193,45 @@ bool ModelUploader::zip() {
mapping.insertMulti(FREE_JOINT_FIELD, "RightForeArm");
}
// mixamo blendshapes
if (!mapping.contains(BLENDSHAPE_FIELD) && geometry.applicationName == "mixamo.com") {
QVariantHash blendshapes;
blendshapes.insert("EyeBlink_L", QVariantList() << "Blink_Left" << 1.0);
blendshapes.insert("EyeBlink_R", QVariantList() << "Blink_Right" << 1.0);
blendshapes.insert("EyeSquint_L", QVariantList() << "Squint_Left" << 1.0);
blendshapes.insert("EyeSquint_R", QVariantList() << "Squint_Right" << 1.0);
blendshapes.insert("EyeOpen_L", QVariantList() << "EyesWide_Left" << 1.0);
blendshapes.insert("EyeOpen_R", QVariantList() << "EyesWide_Right" << 1.0);
blendshapes.insert("BrowsD_L", QVariantList() << "BrowsDown_Left" << 1.0);
blendshapes.insert("BrowsD_R", QVariantList() << "BrowsDown_Right" << 1.0);
blendshapes.insert("BrowsU_L", QVariantList() << "BrowsUp_Left" << 1.0);
blendshapes.insert("BrowsU_R", QVariantList() << "BrowsUp_Right" << 1.0);
blendshapes.insert("JawFwd", QVariantList() << "JawForeward" << 1.0);
blendshapes.insert("JawOpen", QVariantList() << "Jaw_Down" << 1.0);
blendshapes.insert("JawLeft", QVariantList() << "Jaw_Left" << 1.0);
blendshapes.insert("JawRight", QVariantList() << "Jaw_Right" << 1.0);
blendshapes.insert("JawChew", QVariantList() << "Jaw_Up" << 1.0);
blendshapes.insert("MouthLeft", QVariantList() << "Midmouth_Left" << 1.0);
blendshapes.insert("MouthRight", QVariantList() << "Midmouth_Right" << 1.0);
blendshapes.insert("MouthFrown_L", QVariantList() << "Frown_Left" << 1.0);
blendshapes.insert("MouthFrown_R", QVariantList() << "Frown_Right" << 1.0);
blendshapes.insert("MouthSmile_L", QVariantList() << "Smile_Left" << 1.0);
blendshapes.insert("MouthSmile_R", QVariantList() << "Smile_Right" << 1.0);
blendshapes.insert("LipsUpperUp", QVariantList() << "UpperLipUp_Left" << 0.5);
blendshapes.insertMulti("LipsUpperUp", QVariantList() << "UpperLipUp_Right" << 0.5);
blendshapes.insert("Puff", QVariantList() << "CheekPuff_Left" << 0.5);
blendshapes.insertMulti("Puff", QVariantList() << "CheekPuff_Right" << 0.5);
blendshapes.insert("Sneer", QVariantList() << "NoseScrunch_Left" << 0.5);
blendshapes.insertMulti("Sneer", QVariantList() << "NoseScrunch_Right" << 0.5);
blendshapes.insert("CheekSquint_L", QVariantList() << "Squint_Left" << 1.0);
blendshapes.insert("CheekSquint_R", QVariantList() << "Squint_Right" << 1.0);
blendshapes.insert("LipsPucker", QVariantList() << "MouthNarrow_Left" << 0.5);
blendshapes.insertMulti("LipsPucker", QVariantList() << "MouthNarrow_Right" << 0.5);
blendshapes.insert("LipsLowerDown", QVariantList() << "LowerLipDown_Left" << 0.5);
blendshapes.insertMulti("LipsLowerDown", QVariantList() << "LowerLipDown_Right" << 0.5);
mapping.insert(BLENDSHAPE_FIELD, blendshapes);
}
// open the dialog to configure the rest
ModelPropertiesDialog properties(_modelType, mapping, basePath, geometry);
if (properties.exec() == QDialog::Rejected) {

View file

@ -181,9 +181,10 @@ const glm::vec3 randVector() {
}
static TextRenderer* textRenderer(int mono) {
static TextRenderer* monoRenderer = new TextRenderer(MONO_FONT_FAMILY);
static TextRenderer* proportionalRenderer = new TextRenderer(SANS_FONT_FAMILY, -1, -1, false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* inconsolataRenderer = new TextRenderer(INCONSOLATA_FONT_FAMILY, -1, QFont::Bold, false);
static TextRenderer* monoRenderer = TextRenderer::getInstance(MONO_FONT_FAMILY);
static TextRenderer* proportionalRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY,
-1, -1, false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* inconsolataRenderer = TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, -1, QFont::Bold, false);
switch (mono) {
case 1:
return monoRenderer;

View file

@ -53,8 +53,8 @@ Avatar::Avatar() :
AvatarData(),
_skeletonModel(this),
_bodyYawDelta(0.0f),
_lastPosition(_position),
_velocity(0.0f),
_positionDeltaAccumulator(0.0f),
_lastVelocity(0.0f),
_acceleration(0.0f),
_angularVelocity(0.0f),
@ -208,16 +208,30 @@ void Avatar::simulate(float deltaTime) {
}
// NOTE: we shouldn't extrapolate an Avatar instance forward in time...
// until velocity is in AvatarData update message.
// until velocity is included in AvatarData update message.
//_position += _velocity * deltaTime;
measureMotionDerivatives(deltaTime);
}
void Avatar::slamPosition(const glm::vec3& newPosition) {
AvatarData::setPosition(newPosition);
_positionDeltaAccumulator = glm::vec3(0.0f);
_velocity = glm::vec3(0.0f);
_lastVelocity = glm::vec3(0.0f);
}
void Avatar::applyPositionDelta(const glm::vec3& delta) {
_position += delta;
_positionDeltaAccumulator += delta;
}
void Avatar::measureMotionDerivatives(float deltaTime) {
// linear
float invDeltaTime = 1.0f / deltaTime;
_velocity = (_position - _lastPosition) * invDeltaTime;
_lastPosition = _position;
// Floating point error prevents us from computing velocity in a naive way
// (e.g. vel = (pos - oldPos) / dt) so instead we use _positionOffsetAccumulator.
_velocity = _positionDeltaAccumulator * invDeltaTime;
_positionDeltaAccumulator = glm::vec3(0.0f);
_acceleration = (_velocity - _lastVelocity) * invDeltaTime;
_lastVelocity = _velocity;
// angular
@ -239,8 +253,9 @@ enum TextRendererType {
};
static TextRenderer* textRenderer(TextRendererType type) {
static TextRenderer* chatRenderer = new TextRenderer(SANS_FONT_FAMILY, 24, -1, false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* displayNameRenderer = new TextRenderer(SANS_FONT_FAMILY, 12, -1, false, TextRenderer::NO_EFFECT);
static TextRenderer* chatRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, 24, -1,
false, TextRenderer::SHADOW_EFFECT);
static TextRenderer* displayNameRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, 12);
switch(type) {
case CHAT:

View file

@ -161,6 +161,13 @@ public:
/// \param vector position to be scaled. Will store the result
void scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const;
void slamPosition(const glm::vec3& position);
// Call this when updating Avatar position with a delta. This will allow us to
// _accurately_ measure position changes and compute the resulting velocity
// (otherwise floating point error will cause problems at large positions).
void applyPositionDelta(const glm::vec3& delta);
public slots:
void updateCollisionGroups();
@ -173,12 +180,15 @@ protected:
QVector<Model*> _attachmentModels;
float _bodyYawDelta;
glm::vec3 _velocity;
// These position histories and derivatives are in the world-frame.
// The derivatives are the MEASURED results of all external and internal forces
// and are therefor READ-ONLY --> motion control of the Avatar is NOT obtained
// by setting these values.
glm::vec3 _lastPosition;
glm::vec3 _velocity;
// Floating point error prevents us from accurately measuring velocity using a naive approach
// (e.g. vel = (pos - lastPos)/dt) so instead we use _positionDeltaAccumulator.
glm::vec3 _positionDeltaAccumulator;
glm::vec3 _lastVelocity;
glm::vec3 _acceleration;
glm::vec3 _angularVelocity;

View file

@ -227,7 +227,7 @@ void MyAvatar::simulate(float deltaTime) {
const float MAX_RAGDOLL_DISPLACEMENT_2 = 1.0f;
float length2 = glm::length2(ragdollDisplacement);
if (length2 > EPSILON && length2 < MAX_RAGDOLL_DISPLACEMENT_2) {
setPosition(getPosition() + ragdollDisplacement);
applyPositionDelta(ragdollDisplacement);
}
} else {
_skeletonModel.moveShapesTowardJoints(1.0f);
@ -609,13 +609,6 @@ void MyAvatar::setGravity(const glm::vec3& gravity) {
// so it continues to point opposite to the previous gravity setting.
}
void MyAvatar::slamPosition(const glm::vec3& newPosition) {
AvatarData::setPosition(newPosition);
_lastPosition = _position;
_velocity = glm::vec3(0.0f);
_lastVelocity = glm::vec3(0.0f);
}
AnimationHandlePointer MyAvatar::addAnimationHandle() {
AnimationHandlePointer handle = _skeletonModel.createAnimationHandle();
_animationHandles.append(handle);
@ -1275,15 +1268,11 @@ void MyAvatar::updatePosition(float deltaTime) {
if (_motionBehaviors & AVATAR_MOTION_MOTOR_ENABLED) {
glm::vec3 targetVelocity = _motorVelocity;
if (_motionBehaviors & AVATAR_MOTION_MOTOR_USE_LOCAL_FRAME) {
// rotate _motorVelocity into world frame
// rotate targetVelocity into world frame
glm::quat rotation = getHead()->getCameraOrientation();
targetVelocity = rotation * _motorVelocity;
}
glm::vec3 targetDirection(0.0f);
if (glm::length2(targetVelocity) > EPSILON) {
targetDirection = glm::normalize(targetVelocity);
}
glm::vec3 deltaVelocity = targetVelocity - velocity;
if (_motionBehaviors & AVATAR_MOTION_MOTOR_COLLISION_SURFACE_ONLY && glm::length2(_gravity) > EPSILON) {
@ -1315,7 +1304,7 @@ void MyAvatar::updatePosition(float deltaTime) {
// update position
const float MIN_AVATAR_SPEED = 0.075f;
if (speed > MIN_AVATAR_SPEED) {
_position += velocity * deltaTime;
applyPositionDelta(deltaTime * velocity);
}
}
@ -1972,3 +1961,9 @@ glm::vec3 MyAvatar::getLaserPointerTipPosition(const PalmData* palm) {
return palm->getPosition();
}
void MyAvatar::clearDriveKeys() {
for (int i = 0; i < MAX_DRIVE_KEYS; ++i) {
_driveKeys[i] = 0.0f;
}
}

View file

@ -57,7 +57,6 @@ public:
void setLeanScale(float scale) { _leanScale = scale; }
void setLocalGravity(glm::vec3 gravity);
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; }
void slamPosition(const glm::vec3& position);
// getters
float getLeanScale() const { return _leanScale; }
@ -98,6 +97,7 @@ public:
AttachmentData loadAttachmentData(const QUrl& modelURL, const QString& jointName = QString()) const;
// Set what driving keys are being pressed to control thrust levels
void clearDriveKeys();
void setDriveKeys(int key, float val) { _driveKeys[key] = val; };
bool getDriveKeys(int key) { return _driveKeys[key] != 0.f; };
void jump() { _shouldJump = true; };

View file

@ -60,6 +60,7 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setRotation(_owningAvatar->getOrientation() * refOrientation);
const float MODEL_SCALE = 0.0006f;
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale() * MODEL_SCALE);
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
Model::simulate(deltaTime, fullUpdate);

View file

@ -151,7 +151,7 @@ void Faceshift::connectSocket() {
qDebug("Faceshift: Connecting...");
}
_tcpSocket.connectToHost("localhost", FACESHIFT_PORT);
_tcpSocket.connectToHost(Menu::getInstance()->getFaceshiftHostname(), FACESHIFT_PORT);
_tracking = false;
}
}

View file

@ -53,6 +53,7 @@ unsigned int OculusManager::_frameIndex = 0;
bool OculusManager::_frameTimingActive = false;
bool OculusManager::_programInitialized = false;
Camera* OculusManager::_camera = NULL;
int OculusManager::_activeEyeIndex = -1;
#endif
@ -330,6 +331,8 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
//Render each eye into an fbo
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
_activeEyeIndex = eyeIndex;
#if defined(__APPLE__) || defined(_WIN32)
ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
#else
@ -363,6 +366,7 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
Application::getInstance()->displaySide(*_camera);
applicationOverlay.displayOverlayTextureOculus(*_camera);
_activeEyeIndex = -1;
}
//Wait till time-warp to reduce latency
@ -528,3 +532,16 @@ QSize OculusManager::getRenderTargetSize() {
return QSize(100, 100);
#endif
}
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
#ifdef HAVE_LIBOVR
if (_activeEyeIndex != -1) {
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
right = nearVal * port.RightTan;
left = -nearVal * port.LeftTan;
top = nearVal * port.UpTan;
bottom = -nearVal * port.DownTan;
}
#endif
}

View file

@ -43,6 +43,9 @@ public:
static glm::vec3 getRelativePosition();
static QSize getRenderTargetSize();
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
private:
#ifdef HAVE_LIBOVR
static void generateDistortionMesh();
@ -92,6 +95,7 @@ private:
static bool _frameTimingActive;
static bool _programInitialized;
static Camera* _camera;
static int _activeEyeIndex;
#endif
};

View file

@ -207,10 +207,11 @@ void PrioVR::renderCalibrationCountdown() {
Application::getInstance()->disconnect(this);
return;
}
static TextRenderer textRenderer(MONO_FONT_FAMILY, 18, QFont::Bold, false, TextRenderer::OUTLINE_EFFECT, 2);
static TextRenderer* textRenderer = TextRenderer::getInstance(MONO_FONT_FAMILY, 18, QFont::Bold,
false, TextRenderer::OUTLINE_EFFECT, 2);
QByteArray text = "Assume T-Pose in " + QByteArray::number(secondsRemaining) + "...";
textRenderer.draw((Application::getInstance()->getGLWidget()->width() -
textRenderer.computeWidth(text.constData())) / 2, Application::getInstance()->getGLWidget()->height() / 2,
textRenderer->draw((Application::getInstance()->getGLWidget()->width() -
textRenderer->computeWidth(text.constData())) / 2, Application::getInstance()->getGLWidget()->height() / 2,
text);
#endif
}

View file

@ -25,6 +25,7 @@ int TV3DManager::_screenHeight = 1;
double TV3DManager::_aspect = 1.0;
eyeFrustum TV3DManager::_leftEye;
eyeFrustum TV3DManager::_rightEye;
eyeFrustum* TV3DManager::_activeEye = NULL;
bool TV3DManager::isConnected() {
@ -93,8 +94,6 @@ void TV3DManager::display(Camera& whichCamera) {
int portalW = Application::getInstance()->getGLWidget()->getDeviceWidth() / 2;
int portalH = Application::getInstance()->getGLWidget()->getDeviceHeight();
const bool glowEnabled = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
// We only need to render the overlays to a texture once, then we just render the texture as a quad
@ -102,9 +101,7 @@ void TV3DManager::display(Camera& whichCamera) {
applicationOverlay.renderOverlay(true);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::UserInterface);
if (glowEnabled) {
Application::getInstance()->getGlowEffect()->prepare();
}
Application::getInstance()->getGlowEffect()->prepare();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
@ -115,7 +112,7 @@ void TV3DManager::display(Camera& whichCamera) {
glPushMatrix();
{
_activeEye = &_leftEye;
glMatrixMode(GL_PROJECTION);
glLoadIdentity(); // reset projection matrix
glFrustum(_leftEye.left, _leftEye.right, _leftEye.bottom, _leftEye.top, nearZ, farZ); // set left view frustum
@ -132,6 +129,7 @@ void TV3DManager::display(Camera& whichCamera) {
if (displayOverlays) {
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
}
_activeEye = NULL;
}
glPopMatrix();
glDisable(GL_SCISSOR_TEST);
@ -144,6 +142,7 @@ void TV3DManager::display(Camera& whichCamera) {
glScissor(portalX, portalY, portalW, portalH);
glPushMatrix();
{
_activeEye = &_rightEye;
glMatrixMode(GL_PROJECTION);
glLoadIdentity(); // reset projection matrix
glFrustum(_rightEye.left, _rightEye.right, _rightEye.bottom, _rightEye.top, nearZ, farZ); // set left view frustum
@ -160,6 +159,7 @@ void TV3DManager::display(Camera& whichCamera) {
if (displayOverlays) {
applicationOverlay.displayOverlayTexture3DTV(whichCamera, _aspect, fov);
}
_activeEye = NULL;
}
glPopMatrix();
glDisable(GL_SCISSOR_TEST);
@ -168,7 +168,15 @@ void TV3DManager::display(Camera& whichCamera) {
glViewport(0, 0, Application::getInstance()->getGLWidget()->getDeviceWidth(),
Application::getInstance()->getGLWidget()->getDeviceHeight());
if (glowEnabled) {
Application::getInstance()->getGlowEffect()->render();
Application::getInstance()->getGlowEffect()->render();
}
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
if (_activeEye) {
left = _activeEye->left;
right = _activeEye->right;
bottom = _activeEye->bottom;
top = _activeEye->top;
}
}

View file

@ -14,6 +14,8 @@
#include <iostream>
#include <glm/glm.hpp>
class Camera;
struct eyeFrustum {
@ -32,6 +34,8 @@ public:
static bool isConnected();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
static void display(Camera& whichCamera);
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
private:
static void setFrustum(Camera& whichCamera);
static int _screenWidth;
@ -39,6 +43,7 @@ private:
static double _aspect;
static eyeFrustum _leftEye;
static eyeFrustum _rightEye;
static eyeFrustum* _activeEye;
};
#endif // hifi_TV3DManager_h

View file

@ -37,7 +37,7 @@ void RenderableBoxEntityItem::render(RenderArgs* args) {
glm::quat rotation = getRotation();
const bool useGlutCube = false;
const bool useGlutCube = true;
if (useGlutCube) {
glColor3ub(getColor()[RED_INDEX], getColor()[GREEN_INDEX], getColor()[BLUE_INDEX]);

View file

@ -144,7 +144,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
float depth = unRotatedExtents.z;
Extents rotatedExtents = _model->getUnscaledMeshExtents();
calculateRotatedExtents(rotatedExtents, rotation);
rotatedExtents.rotate(rotation);
glm::vec3 rotatedSize = rotatedExtents.maximum - rotatedExtents.minimum;

View file

@ -116,9 +116,9 @@ void AmbientOcclusionEffect::render() {
glGetIntegerv(GL_VIEWPORT, viewport);
const int VIEWPORT_X_INDEX = 0;
const int VIEWPORT_WIDTH_INDEX = 2;
QSize widgetSize = Application::getInstance()->getGLWidget()->getDeviceSize();
float sMin = viewport[VIEWPORT_X_INDEX] / (float)widgetSize.width();
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)widgetSize.width();
QOpenGLFramebufferObject* primaryFBO = Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject();
float sMin = viewport[VIEWPORT_X_INDEX] / (float)primaryFBO->width();
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)primaryFBO->width();
_occlusionProgram->bind();
_occlusionProgram->setUniformValue(_nearLocation, nearVal);
@ -126,7 +126,7 @@ void AmbientOcclusionEffect::render() {
_occlusionProgram->setUniformValue(_leftBottomLocation, left, bottom);
_occlusionProgram->setUniformValue(_rightTopLocation, right, top);
_occlusionProgram->setUniformValue(_noiseScaleLocation, viewport[VIEWPORT_WIDTH_INDEX] / (float)ROTATION_WIDTH,
widgetSize.height() / (float)ROTATION_HEIGHT);
primaryFBO->height() / (float)ROTATION_HEIGHT);
_occlusionProgram->setUniformValue(_texCoordOffsetLocation, sMin, 0.0f);
_occlusionProgram->setUniformValue(_texCoordScaleLocation, sWidth, 1.0f);
@ -148,7 +148,7 @@ void AmbientOcclusionEffect::render() {
glBindTexture(GL_TEXTURE_2D, freeFBO->texture());
_blurProgram->bind();
_blurProgram->setUniformValue(_blurScaleLocation, 1.0f / widgetSize.width(), 1.0f / widgetSize.height());
_blurProgram->setUniformValue(_blurScaleLocation, 1.0f / primaryFBO->width(), 1.0f / primaryFBO->height());
renderFullscreenQuad(sMin, sMin + sWidth);

View file

@ -497,13 +497,9 @@ void NetworkTexture::setImage(const QImage& image, bool translucent, const QColo
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 0,
GL_RGB, GL_UNSIGNED_BYTE, image.constBits());
}
if (_type == SPLAT_TEXTURE) {
// generate mipmaps for splat textures
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
} else {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
// generate mipmaps
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
}
@ -541,7 +537,8 @@ QSharedPointer<Texture> DilatableNetworkTexture::getDilatedTexture(float dilatio
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, dilatedImage.width(), dilatedImage.height(), 0,
GL_RGB, GL_UNSIGNED_BYTE, dilatedImage.constBits());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
glBindTexture(GL_TEXTURE_2D, 0);
}

View file

@ -27,7 +27,11 @@ WindowScriptingInterface* WindowScriptingInterface::getInstance() {
return &sharedInstance;
}
WindowScriptingInterface::WindowScriptingInterface() {
WindowScriptingInterface::WindowScriptingInterface() :
_editDialog(NULL),
_nonBlockingFormActive(false),
_formResult(QDialog::Rejected)
{
}
QScriptValue WindowScriptingInterface::alert(const QString& message) {
@ -84,6 +88,26 @@ QScriptValue WindowScriptingInterface::s3Browse(const QString& nameFilter) {
return retVal;
}
void WindowScriptingInterface::nonBlockingForm(const QString& title, QScriptValue form) {
QMetaObject::invokeMethod(this, "showNonBlockingForm", Qt::BlockingQueuedConnection,
Q_ARG(const QString&, title), Q_ARG(QScriptValue, form));
}
void WindowScriptingInterface::reloadNonBlockingForm(QScriptValue newValues) {
QMetaObject::invokeMethod(this, "doReloadNonBlockingForm", Qt::BlockingQueuedConnection,
Q_ARG(QScriptValue, newValues));
}
QScriptValue WindowScriptingInterface::getNonBlockingFormResult(QScriptValue form) {
QScriptValue retVal;
QMetaObject::invokeMethod(this, "doGetNonBlockingFormResult", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(QScriptValue, retVal),
Q_ARG(QScriptValue, form));
return retVal;
}
/// Display an alert box
/// \param const QString& message message to display
/// \return QScriptValue::UndefinedValue
@ -126,12 +150,126 @@ void WindowScriptingInterface::chooseDirectory() {
button->setText(buttonText);
}
void WindowScriptingInterface::inlineButtonClicked() {
QPushButton* button = reinterpret_cast<QPushButton*>(sender());
QString name = button->property("name").toString();
emit inlineButtonClicked(name);
}
QString WindowScriptingInterface::jsRegExp2QtRegExp(QString string) {
// Converts string representation of RegExp from JavaScript format to Qt format.
return string.mid(1, string.length() - 2) // No enclosing slashes.
.replace("\\/", "/"); // No escaping of forward slash.
}
void WindowScriptingInterface::showNonBlockingForm(const QString& title, QScriptValue form) {
if (!form.isArray() || (form.isArray() && form.property("length").toInt32() <= 0)) {
return;
}
// what should we do if someone calls us while we still think we have a dialog showing???
if (_editDialog) {
qDebug() << "Show Non-Blocking Form called when form already active.";
return;
}
_form = form;
_editDialog = createForm(title, _form);
_nonBlockingFormActive = true;
connect(_editDialog, SIGNAL(accepted()), this, SLOT(nonBlockingFormAccepted()));
connect(_editDialog, SIGNAL(rejected()), this, SLOT(nonBlockingFormRejected()));
_editDialog->setModal(true);
_editDialog->show();
}
void WindowScriptingInterface::doReloadNonBlockingForm(QScriptValue newValues) {
if (!newValues.isArray() || (newValues.isArray() && newValues.property("length").toInt32() <= 0)) {
return;
}
// what should we do if someone calls us while we still think we have a dialog showing???
if (!_editDialog) {
qDebug() << "Reload Non-Blocking Form called when no form is active.";
return;
}
for (int i = 0; i < newValues.property("length").toInt32(); ++i) {
QScriptValue item = newValues.property(i);
if (item.property("oldIndex").isValid()) {
int oldIndex = item.property("oldIndex").toInt32();
QScriptValue oldItem = _form.property(oldIndex);
if (oldItem.isValid()) {
QLineEdit* originalEdit = _edits[oldItem.property("editIndex").toInt32()];
originalEdit->setText(item.property("value").toString());
}
}
}
}
bool WindowScriptingInterface::nonBlockingFormActive() {
return _nonBlockingFormActive;
}
QScriptValue WindowScriptingInterface::doGetNonBlockingFormResult(QScriptValue array) {
QScriptValue retVal;
if (_formResult == QDialog::Accepted) {
int e = -1;
int d = -1;
for (int i = 0; i < _form.property("length").toInt32(); ++i) {
QScriptValue item = _form.property(i);
QScriptValue value = item.property("value");
if (item.property("button").toString() != "") {
// Nothing to do
} else if (item.property("type").toString() == "inlineButton") {
// Nothing to do
} else if (item.property("type").toString() == "header") {
// Nothing to do
} else if (item.property("directory").toString() != "") {
d += 1;
value = _directories.at(d)->property("path").toString();
item.setProperty("directory", value);
_form.setProperty(i, item);
} else {
e += 1;
bool ok = true;
if (value.isNumber()) {
value = _edits.at(e)->text().toDouble(&ok);
} else if (value.isString()) {
value = _edits.at(e)->text();
} else if (value.isBool()) {
if (_edits.at(e)->text() == "true") {
value = true;
} else if (_edits.at(e)->text() == "false") {
value = false;
} else {
ok = false;
}
}
if (ok) {
item.setProperty("value", value);
_form.setProperty(i, item);
}
}
}
}
delete _editDialog;
_editDialog = NULL;
_form = QScriptValue();
_edits.clear();
_directories.clear();
array = _form;
return (_formResult == QDialog::Accepted);
}
/// Display a form layout with an edit box
/// \param const QString& title title to display
/// \param const QScriptValue form to display as an array of objects:
@ -140,143 +278,152 @@ QString WindowScriptingInterface::jsRegExp2QtRegExp(QString string) {
/// - button ("Cancel")
/// \return QScriptValue `true` if 'OK' was clicked, `false` otherwise
QScriptValue WindowScriptingInterface::showForm(const QString& title, QScriptValue form) {
if (form.isArray() && form.property("length").toInt32() <= 0) {
return false;
}
QDialog* editDialog = createForm(title, form);
if (form.isArray() && form.property("length").toInt32() > 0) {
QDialog* editDialog = new QDialog(Application::getInstance()->getWindow());
editDialog->setWindowTitle(title);
bool cancelButton = false;
QVBoxLayout* layout = new QVBoxLayout();
editDialog->setLayout(layout);
QScrollArea* area = new QScrollArea();
layout->addWidget(area);
area->setWidgetResizable(true);
QWidget* container = new QWidget();
QFormLayout* formLayout = new QFormLayout();
container->setLayout(formLayout);
container->sizePolicy().setHorizontalStretch(1);
formLayout->setRowWrapPolicy(QFormLayout::DontWrapRows);
formLayout->setFieldGrowthPolicy(QFormLayout::ExpandingFieldsGrow);
formLayout->setFormAlignment(Qt::AlignHCenter | Qt::AlignTop);
formLayout->setLabelAlignment(Qt::AlignLeft);
area->setWidget(container);
QVector<QLineEdit*> edits;
QVector<QComboBox*> combos;
QVector<QPushButton*> directories;
int result = editDialog->exec();
if (result == QDialog::Accepted) {
int e = -1;
int d = -1;
for (int i = 0; i < form.property("length").toInt32(); ++i) {
QScriptValue item = form.property(i);
QScriptValue value = item.property("value");
if (item.property("button").toString() != "") {
cancelButton = cancelButton || item.property("button").toString().toLower() == "cancel";
// Nothing to do
} else if (item.property("type").toString() == "inlineButton") {
// Nothing to do
} else if (item.property("type").toString() == "header") {
// Nothing to do
} else if (item.property("directory").toString() != "") {
QString path = item.property("directory").toString();
QString title = item.property("title").toString();
if (title == "") {
title = "Choose Directory";
}
QString displayAsString = item.property("displayAs").toString();
QRegExp displayAs = QRegExp(displayAsString != "" ? jsRegExp2QtRegExp(displayAsString) : "^(.*)$");
QString validateAsString = item.property("validateAs").toString();
QRegExp validateAs = QRegExp(validateAsString != "" ? jsRegExp2QtRegExp(validateAsString) : ".*");
QString errorMessage = item.property("errorMessage").toString();
if (errorMessage == "") {
errorMessage = "Invalid directory";
}
QPushButton* directory = new QPushButton(displayAs.cap(1));
directory->setProperty("title", title);
directory->setProperty("path", path);
directory->setProperty("displayAs", displayAs);
directory->setProperty("validateAs", validateAs);
directory->setProperty("errorMessage", errorMessage);
displayAs.indexIn(path);
directory->setText(displayAs.cap(1) != "" ? displayAs.cap(1) : ".");
directory->setMinimumWidth(200);
directories.push_back(directory);
formLayout->addRow(new QLabel(item.property("label").toString()), directory);
connect(directory, SIGNAL(clicked(bool)), SLOT(chooseDirectory()));
} else if (item.property("options").isArray()) {
QComboBox* combo = new QComboBox();
combo->setMinimumWidth(200);
QStringList options = item.property("options").toVariant().toStringList();
for (QStringList::const_iterator it = options.begin(); it != options.end(); it += 1) {
combo->addItem(*it);
}
combos.push_back(combo);
formLayout->addRow(new QLabel(item.property("label").toString()), combo);
d += 1;
value = _directories.at(d)->property("path").toString();
item.setProperty("directory", value);
form.setProperty(i, item);
} else {
QLineEdit* edit = new QLineEdit(item.property("value").toString());
edit->setMinimumWidth(200);
edits.push_back(edit);
formLayout->addRow(new QLabel(item.property("label").toString()), edit);
}
}
QDialogButtonBox* buttons = new QDialogButtonBox(
QDialogButtonBox::Ok
| (cancelButton ? QDialogButtonBox::Cancel : QDialogButtonBox::NoButton)
);
connect(buttons, SIGNAL(accepted()), editDialog, SLOT(accept()));
connect(buttons, SIGNAL(rejected()), editDialog, SLOT(reject()));
layout->addWidget(buttons);
int result = editDialog->exec();
if (result == QDialog::Accepted) {
int e = -1;
int d = -1;
int c = -1;
for (int i = 0; i < form.property("length").toInt32(); ++i) {
QScriptValue item = form.property(i);
QScriptValue value = item.property("value");
if (item.property("button").toString() != "") {
// Nothing to do
} else if (item.property("directory").toString() != "") {
d += 1;
value = directories.at(d)->property("path").toString();
item.setProperty("directory", value);
form.setProperty(i, item);
} else if (item.property("options").isArray()) {
c += 1;
item.setProperty("value", combos.at(c)->currentText());
form.setProperty(i, item);
} else {
e += 1;
bool ok = true;
if (value.isNumber()) {
value = edits.at(e)->text().toDouble(&ok);
} else if (value.isString()) {
value = edits.at(e)->text();
} else if (value.isBool()) {
if (edits.at(e)->text() == "true") {
value = true;
} else if (edits.at(e)->text() == "false") {
value = false;
} else {
ok = false;
}
}
if (ok) {
item.setProperty("value", value);
form.setProperty(i, item);
e += 1;
bool ok = true;
if (value.isNumber()) {
value = _edits.at(e)->text().toDouble(&ok);
} else if (value.isString()) {
value = _edits.at(e)->text();
} else if (value.isBool()) {
if (_edits.at(e)->text() == "true") {
value = true;
} else if (_edits.at(e)->text() == "false") {
value = false;
} else {
ok = false;
}
}
if (ok) {
item.setProperty("value", value);
form.setProperty(i, item);
}
}
}
delete editDialog;
return (result == QDialog::Accepted);
}
return false;
delete editDialog;
_edits.clear();
_directories.clear();
return (result == QDialog::Accepted);
}
QDialog* WindowScriptingInterface::createForm(const QString& title, QScriptValue form) {
QDialog* editDialog = new QDialog(Application::getInstance()->getWindow());
editDialog->setWindowTitle(title);
bool cancelButton = false;
QVBoxLayout* layout = new QVBoxLayout();
editDialog->setLayout(layout);
QScrollArea* area = new QScrollArea();
layout->addWidget(area);
area->setWidgetResizable(true);
QWidget* container = new QWidget();
QFormLayout* formLayout = new QFormLayout();
container->setLayout(formLayout);
container->sizePolicy().setHorizontalStretch(1);
formLayout->setRowWrapPolicy(QFormLayout::DontWrapRows);
formLayout->setFieldGrowthPolicy(QFormLayout::ExpandingFieldsGrow);
formLayout->setFormAlignment(Qt::AlignHCenter | Qt::AlignTop);
formLayout->setLabelAlignment(Qt::AlignLeft);
area->setWidget(container);
for (int i = 0; i < form.property("length").toInt32(); ++i) {
QScriptValue item = form.property(i);
if (item.property("button").toString() != "") {
cancelButton = cancelButton || item.property("button").toString().toLower() == "cancel";
} else if (item.property("directory").toString() != "") {
QString path = item.property("directory").toString();
QString title = item.property("title").toString();
if (title == "") {
title = "Choose Directory";
}
QString displayAsString = item.property("displayAs").toString();
QRegExp displayAs = QRegExp(displayAsString != "" ? jsRegExp2QtRegExp(displayAsString) : "^(.*)$");
QString validateAsString = item.property("validateAs").toString();
QRegExp validateAs = QRegExp(validateAsString != "" ? jsRegExp2QtRegExp(validateAsString) : ".*");
QString errorMessage = item.property("errorMessage").toString();
if (errorMessage == "") {
errorMessage = "Invalid directory";
}
QPushButton* directory = new QPushButton(displayAs.cap(1));
directory->setProperty("title", title);
directory->setProperty("path", path);
directory->setProperty("displayAs", displayAs);
directory->setProperty("validateAs", validateAs);
directory->setProperty("errorMessage", errorMessage);
displayAs.indexIn(path);
directory->setText(displayAs.cap(1) != "" ? displayAs.cap(1) : ".");
directory->setMinimumWidth(200);
_directories.push_back(directory);
formLayout->addRow(new QLabel(item.property("label").toString()), directory);
connect(directory, SIGNAL(clicked(bool)), SLOT(chooseDirectory()));
} else if (item.property("type").toString() == "inlineButton") {
QString buttonLabel = item.property("buttonLabel").toString();
QPushButton* inlineButton = new QPushButton(buttonLabel);
inlineButton->setMinimumWidth(200);
inlineButton->setProperty("name", item.property("name").toString());
formLayout->addRow(new QLabel(item.property("label").toString()), inlineButton);
connect(inlineButton, SIGNAL(clicked(bool)), SLOT(inlineButtonClicked()));
} else if (item.property("type").toString() == "header") {
formLayout->addRow(new QLabel(item.property("label").toString()));
} else {
QLineEdit* edit = new QLineEdit(item.property("value").toString());
edit->setMinimumWidth(200);
int editIndex = _edits.size();
_edits.push_back(edit);
item.setProperty("editIndex", editIndex);
formLayout->addRow(new QLabel(item.property("label").toString()), edit);
}
}
QDialogButtonBox* buttons = new QDialogButtonBox(
QDialogButtonBox::Ok
| (cancelButton ? QDialogButtonBox::Cancel : QDialogButtonBox::NoButton)
);
connect(buttons, SIGNAL(accepted()), editDialog, SLOT(accept()));
connect(buttons, SIGNAL(rejected()), editDialog, SLOT(reject()));
layout->addWidget(buttons);
return editDialog;
}
/// Display a prompt with a text box

View file

@ -34,6 +34,14 @@ public slots:
QScriptValue save(const QString& title = "", const QString& directory = "", const QString& nameFilter = "");
QScriptValue s3Browse(const QString& nameFilter = "");
void nonBlockingForm(const QString& title, QScriptValue array);
void reloadNonBlockingForm(QScriptValue array);
QScriptValue getNonBlockingFormResult(QScriptValue array);
signals:
void inlineButtonClicked(const QString& name);
void nonBlockingFormClosed();
private slots:
QScriptValue showAlert(const QString& message);
QScriptValue showConfirm(const QString& message);
@ -42,12 +50,29 @@ private slots:
QScriptValue showBrowse(const QString& title, const QString& directory, const QString& nameFilter,
QFileDialog::AcceptMode acceptMode = QFileDialog::AcceptOpen);
QScriptValue showS3Browse(const QString& nameFilter);
void showNonBlockingForm(const QString& title, QScriptValue array);
void doReloadNonBlockingForm(QScriptValue array);
bool nonBlockingFormActive();
QScriptValue doGetNonBlockingFormResult(QScriptValue array);
void chooseDirectory();
void inlineButtonClicked();
void nonBlockingFormAccepted() { _nonBlockingFormActive = false; _formResult = QDialog::Accepted; emit nonBlockingFormClosed(); }
void nonBlockingFormRejected() { _nonBlockingFormActive = false; _formResult = QDialog::Rejected; emit nonBlockingFormClosed(); }
private:
WindowScriptingInterface();
QString jsRegExp2QtRegExp(QString string);
QDialog* createForm(const QString& title, QScriptValue form);
QDialog* _editDialog;
QScriptValue _form;
bool _nonBlockingFormActive;
int _formResult;
QVector<QLineEdit*> _edits;
QVector<QPushButton*> _directories;
};
#endif // hifi_WindowScriptingInterface_h

View file

@ -414,7 +414,7 @@ void ApplicationOverlay::displayOverlayTextureOculus(Camera& whichCamera) {
renderTexturedHemisphere();
renderPointersOculus(whichCamera.getPosition());
renderPointersOculus(myAvatar->getHead()->getEyePosition());
glDepthMask(GL_TRUE);
glBindTexture(GL_TEXTURE_2D, 0);

View file

@ -50,7 +50,7 @@ BandwidthMeter::ChannelInfo BandwidthMeter::_CHANNELS[] = {
};
BandwidthMeter::BandwidthMeter() :
_textRenderer(INCONSOLATA_FONT_FAMILY, -1, QFont::Bold, false),
_textRenderer(TextRenderer::getInstance(INCONSOLATA_FONT_FAMILY, -1, QFont::Bold, false)),
_scaleMaxIndex(INITIAL_SCALE_MAXIMUM_INDEX) {
_channels = static_cast<ChannelInfo*>( malloc(sizeof(_CHANNELS)) );
@ -140,7 +140,7 @@ void BandwidthMeter::render(int screenWidth, int screenHeight) {
float totalMax = glm::max(totalIn, totalOut);
// Get font / caption metrics
QFontMetrics const& fontMetrics = _textRenderer.metrics();
QFontMetrics const& fontMetrics = _textRenderer->metrics();
int fontDescent = fontMetrics.descent();
int labelWidthIn = fontMetrics.width(CAPTION_IN);
int labelWidthOut = fontMetrics.width(CAPTION_OUT);
@ -163,9 +163,9 @@ void BandwidthMeter::render(int screenWidth, int screenHeight) {
// Render captions
setColorRGBA(COLOR_TEXT);
_textRenderer.draw(barWidth + SPACING_LEFT_CAPTION_UNIT, textYcenteredLine, CAPTION_UNIT);
_textRenderer.draw(-labelWidthIn - SPACING_RIGHT_CAPTION_IN_OUT, textYupperLine, CAPTION_IN);
_textRenderer.draw(-labelWidthOut - SPACING_RIGHT_CAPTION_IN_OUT, textYlowerLine, CAPTION_OUT);
_textRenderer->draw(barWidth + SPACING_LEFT_CAPTION_UNIT, textYcenteredLine, CAPTION_UNIT);
_textRenderer->draw(-labelWidthIn - SPACING_RIGHT_CAPTION_IN_OUT, textYupperLine, CAPTION_IN);
_textRenderer->draw(-labelWidthOut - SPACING_RIGHT_CAPTION_IN_OUT, textYlowerLine, CAPTION_OUT);
// Render vertical lines for the frame
setColorRGBA(COLOR_FRAME);
@ -229,11 +229,11 @@ void BandwidthMeter::render(int screenWidth, int screenHeight) {
char fmtBuf[8];
setColorRGBA(COLOR_TEXT);
sprintf(fmtBuf, "%0.1f", totalIn);
_textRenderer.draw(glm::max(xIn - fontMetrics.width(fmtBuf) - PADDING_HORIZ_VALUE,
_textRenderer->draw(glm::max(xIn - fontMetrics.width(fmtBuf) - PADDING_HORIZ_VALUE,
PADDING_HORIZ_VALUE),
textYupperLine, fmtBuf);
sprintf(fmtBuf, "%0.1f", totalOut);
_textRenderer.draw(glm::max(xOut - fontMetrics.width(fmtBuf) - PADDING_HORIZ_VALUE,
_textRenderer->draw(glm::max(xOut - fontMetrics.width(fmtBuf) - PADDING_HORIZ_VALUE,
PADDING_HORIZ_VALUE),
textYlowerLine, fmtBuf);

View file

@ -78,7 +78,7 @@ private:
static ChannelInfo _CHANNELS[];
TextRenderer _textRenderer;
TextRenderer* _textRenderer;
ChannelInfo* _channels;
Stream _streams[N_STREAMS];
int _scaleMaxIndex;

View file

@ -121,6 +121,8 @@ void PreferencesDialog::loadPreferences() {
ui.faceshiftEyeDeflectionSider->setValue(menuInstance->getFaceshiftEyeDeflection() *
ui.faceshiftEyeDeflectionSider->maximum());
ui.faceshiftHostnameEdit->setText(menuInstance->getFaceshiftHostname());
const InboundAudioStream::Settings& streamSettings = menuInstance->getReceivedAudioStreamSettings();
ui.dynamicJitterBuffersCheckBox->setChecked(streamSettings._dynamicJitterBuffers);
@ -165,19 +167,29 @@ void PreferencesDialog::savePreferences() {
}
QUrl faceModelURL(ui.faceURLEdit->text());
if (faceModelURL.toString() != _faceURLString) {
// change the faceModelURL in the profile, it will also update this user's BlendFace
myAvatar->setFaceModelURL(faceModelURL);
UserActivityLogger::getInstance().changedModel("head", faceModelURL.toString());
shouldDispatchIdentityPacket = true;
QString faceModelURLString = faceModelURL.toString();
if (faceModelURLString != _faceURLString) {
if (faceModelURLString.isEmpty() || faceModelURLString.toLower().endsWith(".fst")) {
// change the faceModelURL in the profile, it will also update this user's BlendFace
myAvatar->setFaceModelURL(faceModelURL);
UserActivityLogger::getInstance().changedModel("head", faceModelURLString);
shouldDispatchIdentityPacket = true;
} else {
qDebug() << "ERROR: Head model not FST or blank - " << faceModelURLString;
}
}
QUrl skeletonModelURL(ui.skeletonURLEdit->text());
if (skeletonModelURL.toString() != _skeletonURLString) {
// change the skeletonModelURL in the profile, it will also update this user's Body
myAvatar->setSkeletonModelURL(skeletonModelURL);
UserActivityLogger::getInstance().changedModel("skeleton", skeletonModelURL.toString());
shouldDispatchIdentityPacket = true;
QString skeletonModelURLString = skeletonModelURL.toString();
if (skeletonModelURLString != _skeletonURLString) {
if (skeletonModelURLString.isEmpty() || skeletonModelURLString.toLower().endsWith(".fst")) {
// change the skeletonModelURL in the profile, it will also update this user's Body
myAvatar->setSkeletonModelURL(skeletonModelURL);
UserActivityLogger::getInstance().changedModel("skeleton", skeletonModelURLString);
shouldDispatchIdentityPacket = true;
} else {
qDebug() << "ERROR: Skeleton model not FST or blank - " << skeletonModelURLString;
}
}
if (shouldDispatchIdentityPacket) {
@ -211,6 +223,9 @@ void PreferencesDialog::savePreferences() {
Menu::getInstance()->setFaceshiftEyeDeflection(ui.faceshiftEyeDeflectionSider->value() /
(float)ui.faceshiftEyeDeflectionSider->maximum());
Menu::getInstance()->setFaceshiftHostname(ui.faceshiftHostnameEdit->text());
Menu::getInstance()->setMaxVoxelPacketsPerSecond(ui.maxVoxelsPPSSpin->value());
Menu::getInstance()->setOculusUIAngularSize(ui.oculusUIAngularSizeSpin->value());

View file

@ -24,22 +24,23 @@
// the width/height of the cached glyph textures
const int IMAGE_SIZE = 256;
Glyph::Glyph(int textureID, const QPoint& location, const QRect& bounds, int width) :
_textureID(textureID), _location(location), _bounds(bounds), _width(width) {
static uint qHash(const TextRenderer::Properties& key, uint seed = 0) {
// can be switched to qHash(key.font, seed) when we require Qt 5.3+
return qHash(key.font.family(), qHash(key.font.pointSize(), seed));
}
TextRenderer::TextRenderer(const char* family, int pointSize, int weight, bool italic,
EffectType effectType, int effectThickness, QColor color) :
_font(family, pointSize, weight, italic),
_metrics(_font),
_effectType(effectType),
_effectThickness(effectThickness),
_x(IMAGE_SIZE),
_y(IMAGE_SIZE),
_rowHeight(0),
_color(color) {
_font.setKerning(false);
static bool operator==(const TextRenderer::Properties& p1, const TextRenderer::Properties& p2) {
return p1.font == p2.font && p1.effect == p2.effect && p1.effectThickness == p2.effectThickness && p1.color == p2.color;
}
TextRenderer* TextRenderer::getInstance(const char* family, int pointSize, int weight, bool italic,
EffectType effect, int effectThickness, const QColor& color) {
Properties properties = { QFont(family, pointSize, weight, italic), effect, effectThickness, color };
TextRenderer*& instance = _instances[properties];
if (!instance) {
instance = new TextRenderer(properties);
}
return instance;
}
TextRenderer::~TextRenderer() {
@ -122,6 +123,19 @@ int TextRenderer::computeWidth(const char* str)
return width;
}
TextRenderer::TextRenderer(const Properties& properties) :
_font(properties.font),
_metrics(_font),
_effectType(properties.effect),
_effectThickness(properties.effectThickness),
_x(IMAGE_SIZE),
_y(IMAGE_SIZE),
_rowHeight(0),
_color(properties.color) {
_font.setKerning(false);
}
const Glyph& TextRenderer::getGlyph(char c) {
Glyph& glyph = _glyphs[c];
if (glyph.isValid()) {
@ -213,3 +227,10 @@ const Glyph& TextRenderer::getGlyph(char c) {
glBindTexture(GL_TEXTURE_2D, 0);
return glyph;
}
QHash<TextRenderer::Properties, TextRenderer*> TextRenderer::_instances;
Glyph::Glyph(int textureID, const QPoint& location, const QRect& bounds, int width) :
_textureID(textureID), _location(location), _bounds(bounds), _width(width) {
}

View file

@ -33,7 +33,6 @@ const char SOLID_BLOCK_CHAR = 127;
// the Inconsolata font family
#define INCONSOLATA_FONT_FAMILY "Inconsolata"
class Glyph;
class TextRenderer {
@ -41,9 +40,17 @@ public:
enum EffectType { NO_EFFECT, SHADOW_EFFECT, OUTLINE_EFFECT };
TextRenderer(const char* family, int pointSize = -1, int weight = -1, bool italic = false,
EffectType effect = NO_EFFECT, int effectThickness = 1,
QColor color = QColor(255, 255, 255));
class Properties {
public:
QFont font;
EffectType effect;
int effectThickness;
QColor color;
};
static TextRenderer* getInstance(const char* family, int pointSize = -1, int weight = -1, bool italic = false,
EffectType effect = NO_EFFECT, int effectThickness = 1, const QColor& color = QColor(255, 255, 255));
~TextRenderer();
const QFontMetrics& metrics() const { return _metrics; }
@ -59,7 +66,9 @@ public:
private:
const Glyph& getGlyph (char c);
TextRenderer(const Properties& properties);
const Glyph& getGlyph(char c);
// the font to render
QFont _font;
@ -90,6 +99,8 @@ private:
// text color
QColor _color;
static QHash<Properties, TextRenderer*> _instances;
};
class Glyph {

View file

@ -58,7 +58,7 @@ void ModelOverlay::render() {
float depth = unRotatedExtents.z;
Extents rotatedExtents = _model.getUnscaledMeshExtents();
calculateRotatedExtents(rotatedExtents, _rotation);
rotatedExtents.rotate(_rotation);
glm::vec3 rotatedSize = rotatedExtents.maximum - rotatedExtents.minimum;

View file

@ -45,22 +45,21 @@ void TextOverlay::render() {
//TextRenderer(const char* family, int pointSize = -1, int weight = -1, bool italic = false,
// EffectType effect = NO_EFFECT, int effectThickness = 1);
TextRenderer textRenderer(SANS_FONT_FAMILY, _fontSize, 50, false, TextRenderer::NO_EFFECT, 1,
QColor(_color.red, _color.green, _color.blue));
TextRenderer* textRenderer = TextRenderer::getInstance(SANS_FONT_FAMILY, _fontSize, 50);
const int leftAdjust = -1; // required to make text render relative to left edge of bounds
const int topAdjust = -2; // required to make text render relative to top edge of bounds
int x = _bounds.left() + _leftMargin + leftAdjust;
int y = _bounds.top() + _topMargin + topAdjust;
glColor3f(1.0f, 1.0f, 1.0f);
glColor3f(_color.red / MAX_COLOR, _color.green / MAX_COLOR, _color.blue / MAX_COLOR);
QStringList lines = _text.split("\n");
int lineOffset = 0;
foreach(QString thisLine, lines) {
if (lineOffset == 0) {
lineOffset = textRenderer.calculateHeight(qPrintable(thisLine));
lineOffset = textRenderer->calculateHeight(qPrintable(thisLine));
}
lineOffset += textRenderer.draw(x, y + lineOffset, qPrintable(thisLine));
lineOffset += textRenderer->draw(x, y + lineOffset, qPrintable(thisLine));
const int lineGap = 2;
lineOffset += lineGap;

View file

@ -1726,6 +1726,70 @@
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_999">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>7</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>7</number>
</property>
<item>
<widget class="QLabel" name="label_999">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="text">
<string>Faceshift hostname</string>
</property>
<property name="indent">
<number>0</number>
</property>
<property name="buddy">
<cstring>faceshiftHostnameEdit</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_999">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
</spacer>
</item>
<item>
<widget class="QLineEdit" name="faceshiftHostnameEdit">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="layoutDirection">
<enum>Qt::LeftToRight</enum>
</property>
<property name="styleSheet">
<string notr="true"/>
</property>
<property name="placeholderText">
<string>localhost</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QLabel" name="voxelsTitleLabel">
<property name="sizePolicy">

View file

@ -110,12 +110,44 @@ public:
if ( !_frameBuffer || !frames) {
return;
}
assert(channelCount <= _channelCountMax);
assert(frameCount <= _frameCountMax);
_frameCount = frameCount; // we allow copying fewer frames than we've allocated
_channelCount = channelCount; // we allow copying fewer channels that we've allocated
if (channelCount <=_channelCountMax && frameCount <=_frameCountMax) {
// We always allow copying fewer frames than we have allocated
_frameCount = frameCount;
_channelCount = channelCount;
}
else {
//
// However we do not attempt to copy more frames than we've allocated ;-) This is a framing error caused by either
// a/ the platform audio driver not correctly queuing and regularly smoothing device IO capture frames -or-
// b/ our IO processing thread (currently running on a Qt GUI thread) has been delayed/scheduled too late.
//
// The fix is not to make the problem worse by allocating additional frames on this thread, rather, it is to handle
// dynamic re-sizing off the IO processing thread. While a/ is not in our control, we will address the off thread
// re-sizing,, as well as b/, in later releases.
//
// For now, we log this condition, and do our best to recover by copying as many frames as we have allocated.
// Unfortunately, this will result (temporarily), in an audible discontinuity.
//
// If you repeatedly receive this error, contact craig@highfidelity.io and send me what audio device you are using,
// what audio-stack you are using (pulse/alsa, core audio, ...), what OS, and what the reported frame/channel
// counts are. In addition, any information about what you were doing at the time of the discontinuity, would be
// useful (e.g., accessing any client features/menus)
//
qDebug() << "Audio framing error: _channelCount="
<< _channelCount
<< "channelCountMax="
<< _channelCountMax
<< "_frameCount="
<< _frameCount
<< "frameCountMax="
<< _frameCountMax;
_channelCount = std::min(_channelCount,_channelCountMax);
_frameCount = std::min(_frameCount,_frameCountMax);
}
if (copyOut) {
S* dst = frames;

View file

@ -55,40 +55,78 @@ public:
}
float32_t** samples = frameBuffer.getFrameData();
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 32) {
samples[j][i + 0] *= _gain;
samples[j][i + 1] *= _gain;
samples[j][i + 2] *= _gain;
samples[j][i + 3] *= _gain;
samples[j][i + 4] *= _gain;
samples[j][i + 5] *= _gain;
samples[j][i + 6] *= _gain;
samples[j][i + 7] *= _gain;
samples[j][i + 8] *= _gain;
samples[j][i + 9] *= _gain;
samples[j][i + 10] *= _gain;
samples[j][i + 11] *= _gain;
samples[j][i + 12] *= _gain;
samples[j][i + 13] *= _gain;
samples[j][i + 14] *= _gain;
samples[j][i + 15] *= _gain;
samples[j][i + 16] *= _gain;
samples[j][i + 17] *= _gain;
samples[j][i + 18] *= _gain;
samples[j][i + 19] *= _gain;
samples[j][i + 20] *= _gain;
samples[j][i + 21] *= _gain;
samples[j][i + 22] *= _gain;
samples[j][i + 23] *= _gain;
samples[j][i + 24] *= _gain;
samples[j][i + 25] *= _gain;
samples[j][i + 26] *= _gain;
samples[j][i + 27] *= _gain;
samples[j][i + 28] *= _gain;
samples[j][i + 29] *= _gain;
samples[j][i + 30] *= _gain;
samples[j][i + 31] *= _gain;
bool frameAlignment16 = (frameBuffer.getFrameCount() & 0x0F) == 0;
if (frameAlignment16) {
if (frameBuffer.getChannelCount() == 1) {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 16) {
samples[0][i + 0] *= _gain;
samples[0][i + 1] *= _gain;
samples[0][i + 2] *= _gain;
samples[0][i + 3] *= _gain;
samples[0][i + 4] *= _gain;
samples[0][i + 5] *= _gain;
samples[0][i + 6] *= _gain;
samples[0][i + 7] *= _gain;
samples[0][i + 8] *= _gain;
samples[0][i + 9] *= _gain;
samples[0][i + 10] *= _gain;
samples[0][i + 11] *= _gain;
samples[0][i + 12] *= _gain;
samples[0][i + 13] *= _gain;
samples[0][i + 14] *= _gain;
samples[0][i + 15] *= _gain;
}
}
else if (frameBuffer.getChannelCount() == 2) {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 16) {
samples[0][i + 0] *= _gain;
samples[0][i + 1] *= _gain;
samples[0][i + 2] *= _gain;
samples[0][i + 3] *= _gain;
samples[0][i + 4] *= _gain;
samples[0][i + 5] *= _gain;
samples[0][i + 6] *= _gain;
samples[0][i + 7] *= _gain;
samples[0][i + 8] *= _gain;
samples[0][i + 9] *= _gain;
samples[0][i + 10] *= _gain;
samples[0][i + 11] *= _gain;
samples[0][i + 12] *= _gain;
samples[0][i + 13] *= _gain;
samples[0][i + 14] *= _gain;
samples[0][i + 15] *= _gain;
samples[1][i + 0] *= _gain;
samples[1][i + 1] *= _gain;
samples[1][i + 2] *= _gain;
samples[1][i + 3] *= _gain;
samples[1][i + 4] *= _gain;
samples[1][i + 5] *= _gain;
samples[1][i + 6] *= _gain;
samples[1][i + 7] *= _gain;
samples[1][i + 8] *= _gain;
samples[1][i + 9] *= _gain;
samples[1][i + 10] *= _gain;
samples[1][i + 11] *= _gain;
samples[1][i + 12] *= _gain;
samples[1][i + 13] *= _gain;
samples[1][i + 14] *= _gain;
samples[1][i + 15] *= _gain;
}
}
else {
assert("unsupported channel format");
}
}
else {
for (uint16_t j = 0; j < frameBuffer.getChannelCount(); ++j) {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 1) {
samples[j][i] *= _gain;
}
}
}
}

View file

@ -0,0 +1,23 @@
//
// AudioSourceTone.cpp
// hifi
//
// Created by Craig Hansen-Sturm on 8/10/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <assert.h>
#include <math.h>
#include <SharedUtil.h>
#include "AudioRingBuffer.h"
#include "AudioFormat.h"
#include "AudioBuffer.h"
#include "AudioPan.h"
float32_t AudioPan::ONE_MINUS_EPSILON = 1.0f - EPSILON;
float32_t AudioPan::ZERO_PLUS_EPSILON = 0.0f + EPSILON;
float32_t AudioPan::ONE_HALF_MINUS_EPSILON = 0.5f - EPSILON;
float32_t AudioPan::ONE_HALF_PLUS_EPSILON = 0.5f + EPSILON;

View file

@ -0,0 +1,141 @@
//
// AudioPan.h
// hifi
//
// Created by Craig Hansen-Sturm on 9/1/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioPan_h
#define hifi_AudioPan_h
class AudioPan
{
float32_t _pan;
float32_t _gainLeft;
float32_t _gainRight;
static float32_t ONE_MINUS_EPSILON;
static float32_t ZERO_PLUS_EPSILON;
static float32_t ONE_HALF_MINUS_EPSILON;
static float32_t ONE_HALF_PLUS_EPSILON;
void updateCoefficients() {
// implement constant power sin^2 + cos^2 = 1 panning law
if (_pan >= ONE_MINUS_EPSILON) { // full right
_gainLeft = 0.0f;
_gainRight = 1.0f;
}
else if (_pan <= ZERO_PLUS_EPSILON) { // full left
_gainLeft = 1.0f;
_gainRight = 0.0f;
}
else if ((_pan >= ONE_HALF_MINUS_EPSILON) && (_pan <= ONE_HALF_PLUS_EPSILON)) { // center
_gainLeft = 1.0f / SQUARE_ROOT_OF_2;
_gainRight = 1.0f / SQUARE_ROOT_OF_2;
}
else { // intermediate cases
_gainLeft = cosf( TWO_PI * _pan );
_gainRight = sinf( TWO_PI * _pan );
}
}
public:
AudioPan() {
initialize();
}
~AudioPan() {
finalize();
}
void initialize() {
setParameters(0.5f);
}
void finalize() {
}
void reset() {
initialize();
}
void setParameters(const float32_t pan) {
// pan ranges between 0.0 and 1.0f inclusive. 0.5f is midpoint between full left and full right
_pan = std::min(std::max(pan, 0.0f), 1.0f);
updateCoefficients();
}
void getParameters(float32_t& pan) {
pan = _pan;
}
void render(AudioBufferFloat32& frameBuffer) {
if (frameBuffer.getChannelCount() != 2) {
return;
}
float32_t** samples = frameBuffer.getFrameData();
bool frameAlignment16 = (frameBuffer.getFrameCount() & 0x0F) == 0;
if (frameAlignment16) {
if (frameBuffer.getChannelCount() == 2) {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 16) {
samples[0][i + 0] *= _gainLeft;
samples[0][i + 1] *= _gainLeft;
samples[0][i + 2] *= _gainLeft;
samples[0][i + 3] *= _gainLeft;
samples[0][i + 4] *= _gainLeft;
samples[0][i + 5] *= _gainLeft;
samples[0][i + 6] *= _gainLeft;
samples[0][i + 7] *= _gainLeft;
samples[0][i + 8] *= _gainLeft;
samples[0][i + 9] *= _gainLeft;
samples[0][i + 10] *= _gainLeft;
samples[0][i + 11] *= _gainLeft;
samples[0][i + 12] *= _gainLeft;
samples[0][i + 13] *= _gainLeft;
samples[0][i + 14] *= _gainLeft;
samples[0][i + 15] *= _gainLeft;
samples[1][i + 0] *= _gainRight;
samples[1][i + 1] *= _gainRight;
samples[1][i + 2] *= _gainRight;
samples[1][i + 3] *= _gainRight;
samples[1][i + 4] *= _gainRight;
samples[1][i + 5] *= _gainRight;
samples[1][i + 6] *= _gainRight;
samples[1][i + 7] *= _gainRight;
samples[1][i + 8] *= _gainRight;
samples[1][i + 9] *= _gainRight;
samples[1][i + 10] *= _gainRight;
samples[1][i + 11] *= _gainRight;
samples[1][i + 12] *= _gainRight;
samples[1][i + 13] *= _gainRight;
samples[1][i + 14] *= _gainRight;
samples[1][i + 15] *= _gainRight;
}
}
else {
assert("unsupported channel format");
}
}
else {
for (uint16_t i = 0; i < frameBuffer.getFrameCount(); i += 1) {
samples[0][i] *= _gainLeft;
samples[1][i] *= _gainRight;
}
}
}
};
#endif // AudioPan_h

View file

@ -509,7 +509,7 @@ bool EntityTreeElement::findDetailedRayIntersection(const glm::vec3& origin, con
Extents rotatedExtents = extents;
calculateRotatedExtents(rotatedExtents, entity->getRotation());
rotatedExtents.rotate(entity->getRotation());
rotatedExtents.minimum += entity->getPosition();
rotatedExtents.maximum += entity->getPosition();

View file

@ -33,27 +33,6 @@
using namespace std;
void Extents::reset() {
minimum = glm::vec3(FLT_MAX);
maximum = glm::vec3(-FLT_MAX);
}
bool Extents::containsPoint(const glm::vec3& point) const {
return (point.x >= minimum.x && point.x <= maximum.x
&& point.y >= minimum.y && point.y <= maximum.y
&& point.z >= minimum.z && point.z <= maximum.z);
}
void Extents::addExtents(const Extents& extents) {
minimum = glm::min(minimum, extents.minimum);
maximum = glm::max(maximum, extents.maximum);
}
void Extents::addPoint(const glm::vec3& point) {
minimum = glm::min(minimum, point);
maximum = glm::max(maximum, point);
}
bool FBXMesh::hasSpecularTexture() const {
foreach (const FBXMeshPart& part, parts) {
if (!part.specularTexture.filename.isEmpty()) {
@ -80,6 +59,19 @@ bool FBXGeometry::hasBlendedMeshes() const {
return false;
}
Extents FBXGeometry::getUnscaledMeshExtents() const {
const Extents& extents = meshExtents;
// even though our caller asked for "unscaled" we need to include any fst scaling, translation, and rotation, which
// is captured in the offset matrix
glm::vec3 minimum = glm::vec3(offset * glm::vec4(extents.minimum, 1.0f));
glm::vec3 maximum = glm::vec3(offset * glm::vec4(extents.maximum, 1.0f));
Extents scaledExtents = { minimum, maximum };
return scaledExtents;
}
static int fbxGeometryMetaTypeId = qRegisterMetaType<FBXGeometry>();
static int fbxAnimationFrameMetaTypeId = qRegisterMetaType<FBXAnimationFrame>();
static int fbxAnimationFrameVectorMetaTypeId = qRegisterMetaType<QVector<FBXAnimationFrame> >();
@ -2119,40 +2111,3 @@ FBXGeometry readSVO(const QByteArray& model) {
return geometry;
}
void calculateRotatedExtents(Extents& extents, const glm::quat& rotation) {
glm::vec3 bottomLeftNear(extents.minimum.x, extents.minimum.y, extents.minimum.z);
glm::vec3 bottomRightNear(extents.maximum.x, extents.minimum.y, extents.minimum.z);
glm::vec3 bottomLeftFar(extents.minimum.x, extents.minimum.y, extents.maximum.z);
glm::vec3 bottomRightFar(extents.maximum.x, extents.minimum.y, extents.maximum.z);
glm::vec3 topLeftNear(extents.minimum.x, extents.maximum.y, extents.minimum.z);
glm::vec3 topRightNear(extents.maximum.x, extents.maximum.y, extents.minimum.z);
glm::vec3 topLeftFar(extents.minimum.x, extents.maximum.y, extents.maximum.z);
glm::vec3 topRightFar(extents.maximum.x, extents.maximum.y, extents.maximum.z);
glm::vec3 bottomLeftNearRotated = rotation * bottomLeftNear;
glm::vec3 bottomRightNearRotated = rotation * bottomRightNear;
glm::vec3 bottomLeftFarRotated = rotation * bottomLeftFar;
glm::vec3 bottomRightFarRotated = rotation * bottomRightFar;
glm::vec3 topLeftNearRotated = rotation * topLeftNear;
glm::vec3 topRightNearRotated = rotation * topRightNear;
glm::vec3 topLeftFarRotated = rotation * topLeftFar;
glm::vec3 topRightFarRotated = rotation * topRightFar;
extents.minimum = glm::min(bottomLeftNearRotated,
glm::min(bottomRightNearRotated,
glm::min(bottomLeftFarRotated,
glm::min(bottomRightFarRotated,
glm::min(topLeftNearRotated,
glm::min(topRightNearRotated,
glm::min(topLeftFarRotated,topRightFarRotated)))))));
extents.maximum = glm::max(bottomLeftNearRotated,
glm::max(bottomRightNearRotated,
glm::max(bottomLeftFarRotated,
glm::max(bottomRightFarRotated,
glm::max(topLeftNearRotated,
glm::max(topRightNearRotated,
glm::max(topLeftFarRotated,topRightFarRotated)))))));
}

View file

@ -18,8 +18,10 @@
#include <QVariant>
#include <QVector>
#include <Extents.h>
#include <Shape.h>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
@ -35,31 +37,6 @@ extern const int NUM_FACESHIFT_BLENDSHAPES;
/// The names of the joints in the Maya HumanIK rig, terminated with an empty string.
extern const char* HUMANIK_JOINTS[];
class Extents {
public:
/// set minimum and maximum to FLT_MAX and -FLT_MAX respectively
void reset();
/// \param extents another intance of extents
/// expand current limits to contain other extents
void addExtents(const Extents& extents);
/// \param point new point to compare against existing limits
/// compare point to current limits and expand them if necessary to contain point
void addPoint(const glm::vec3& point);
/// \param point
/// \return true if point is within current limits
bool containsPoint(const glm::vec3& point) const;
/// \return whether or not the extents are empty
bool isEmpty() const { return minimum == maximum; }
bool isValid() const { return !((minimum == glm::vec3(FLT_MAX)) && (maximum == glm::vec3(-FLT_MAX))); }
glm::vec3 minimum;
glm::vec3 maximum;
};
/// A node within an FBX document.
class FBXNode {
public:
@ -234,6 +211,9 @@ public:
QStringList getJointNames() const;
bool hasBlendedMeshes() const;
/// Returns the unscaled extents of the model's mesh
Extents getUnscaledMeshExtents() const;
};
Q_DECLARE_METATYPE(FBXGeometry)
@ -251,6 +231,4 @@ FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping);
/// Reads SVO geometry from the supplied model data.
FBXGeometry readSVO(const QByteArray& model);
void calculateRotatedExtents(Extents& extents, const glm::quat& rotation);
#endif // hifi_FBXReader_h

View file

@ -215,7 +215,11 @@ void Resource::allReferencesCleared() {
_cache->addUnusedResource(self);
} else {
#ifndef WIN32
// Note to Andrzej this causes a consistent crash on windows/vs2013
// patching here as a very temporary workaround. --craig
delete this;
#endif // !WIN32
}
}

View file

@ -238,19 +238,25 @@ int Octree::readElementData(OctreeElement* destinationElement, const unsigned ch
int bytesLeftToRead = bytesAvailable;
int bytesRead = 0;
bool wantDebug = false;
// give this destination element the child mask from the packet
const unsigned char ALL_CHILDREN_ASSUMED_TO_EXIST = 0xFF;
if ((size_t)bytesLeftToRead < sizeof(unsigned char)) {
qDebug() << "UNEXPECTED: readElementData() only had " << bytesLeftToRead << " bytes. Not enough for meaningful data.";
if (wantDebug) {
qDebug() << "UNEXPECTED: readElementData() only had " << bytesLeftToRead << " bytes. "
"Not enough for meaningful data.";
}
return bytesAvailable; // assume we read the entire buffer...
}
if (destinationElement->getScale() < SCALE_AT_DANGEROUSLY_DEEP_RECURSION) {
qDebug() << "UNEXPECTED: readElementData() destination element is unreasonably small ["
<< destinationElement->getScale() * (float)TREE_SCALE << " meters] "
<< " Discarding " << bytesAvailable << " remaining bytes.";
if (wantDebug) {
qDebug() << "UNEXPECTED: readElementData() destination element is unreasonably small ["
<< destinationElement->getScale() * (float)TREE_SCALE << " meters] "
<< " Discarding " << bytesAvailable << " remaining bytes.";
}
return bytesAvailable; // assume we read the entire buffer...
}
@ -299,8 +305,10 @@ int Octree::readElementData(OctreeElement* destinationElement, const unsigned ch
: sizeof(childInBufferMask);
if (bytesLeftToRead < bytesForMasks) {
qDebug() << "UNEXPECTED: readElementDataFromBuffer() only had " << bytesLeftToRead << " bytes before masks. "
"Not enough for meaningful data.";
if (wantDebug) {
qDebug() << "UNEXPECTED: readElementDataFromBuffer() only had " << bytesLeftToRead << " bytes before masks. "
"Not enough for meaningful data.";
}
return bytesAvailable; // assume we read the entire buffer...
}
@ -360,6 +368,7 @@ void Octree::readBitstreamToTree(const unsigned char * bitstream, unsigned long
ReadBitstreamToTreeParams& args) {
int bytesRead = 0;
const unsigned char* bitstreamAt = bitstream;
bool wantDebug = false;
// If destination element is not included, set it to root
if (!args.destinationElement) {
@ -376,7 +385,10 @@ void Octree::readBitstreamToTree(const unsigned char * bitstream, unsigned long
int numberOfThreeBitSectionsInStream = numberOfThreeBitSectionsInCode(bitstreamAt, bufferSizeBytes);
if (numberOfThreeBitSectionsInStream == OVERFLOWED_OCTCODE_BUFFER) {
qDebug() << "UNEXPECTED: parsing of the octal code would overflow the buffer. This buffer is corrupt. Returning.";
if (wantDebug) {
qDebug() << "UNEXPECTED: parsing of the octal code would overflow the buffer. "
"This buffer is corrupt. Returning.";
}
return;
}

View file

@ -11,6 +11,7 @@
#include "AABox.h"
#include "AACube.h"
#include "Extents.h"
#include "GeometryUtil.h"
#include "SharedUtil.h"
@ -19,6 +20,11 @@ AABox::AABox(const AACube& other) :
_corner(other.getCorner()), _scale(other.getScale(), other.getScale(), other.getScale()) {
}
AABox::AABox(const Extents& other) :
_corner(other.minimum),
_scale(other.maximum - other.minimum) {
}
AABox::AABox(const glm::vec3& corner, float size) :
_corner(corner), _scale(size, size, size) {
};

View file

@ -23,11 +23,13 @@
#include "StreamUtils.h"
class AACube;
class Extents;
class AABox {
public:
AABox(const AACube& other);
AABox(const Extents& other);
AABox(const glm::vec3& corner, float size);
AABox(const glm::vec3& corner, const glm::vec3& dimensions);
AABox();

View file

@ -9,11 +9,25 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/gtx/extented_min_max.hpp>
#include "AABox.h"
#include "AACube.h"
#include "Extents.h"
#include "GeometryUtil.h"
#include "SharedUtil.h"
AACube::AACube(const AABox& other) :
_corner(other.getCorner()), _scale(other.getLargestDimension()) {
}
AACube::AACube(const Extents& other) :
_corner(other.minimum)
{
glm::vec3 dimensions = other.maximum - other.minimum;
_scale = glm::max(dimensions.x, dimensions.y, dimensions.z);
}
AACube::AACube(const glm::vec3& corner, float size) :
_corner(corner), _scale(size) {
};

View file

@ -1,6 +1,6 @@
//
// AACube.h
// libraries/octree/src
// libraries/shared/src
//
// Created by Brad Hefta-Gaub on 04/11/13.
// Copyright 2013 High Fidelity, Inc.
@ -22,10 +22,13 @@
#include "BoxBase.h"
class AABox;
class Extents;
class AACube {
public:
AACube(const AABox& other);
AACube(const Extents& other);
AACube(const glm::vec3& corner, float size);
AACube();
~AACube() {};

View file

@ -0,0 +1,74 @@
//
// Extents.cpp
// libraries/shared/src
//
// Created by Andrzej Kapolka on 9/18/13.
// Moved to shared by Brad Hefta-Gaub on 9/11/14
// Copyright 2013-2104 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
#include <glm/gtx/transform.hpp>
#include "Extents.h"
void Extents::reset() {
minimum = glm::vec3(FLT_MAX);
maximum = glm::vec3(-FLT_MAX);
}
bool Extents::containsPoint(const glm::vec3& point) const {
return (point.x >= minimum.x && point.x <= maximum.x
&& point.y >= minimum.y && point.y <= maximum.y
&& point.z >= minimum.z && point.z <= maximum.z);
}
void Extents::addExtents(const Extents& extents) {
minimum = glm::min(minimum, extents.minimum);
maximum = glm::max(maximum, extents.maximum);
}
void Extents::addPoint(const glm::vec3& point) {
minimum = glm::min(minimum, point);
maximum = glm::max(maximum, point);
}
void Extents::rotate(const glm::quat& rotation) {
glm::vec3 bottomLeftNear(minimum.x, minimum.y, minimum.z);
glm::vec3 bottomRightNear(maximum.x, minimum.y, minimum.z);
glm::vec3 bottomLeftFar(minimum.x, minimum.y, maximum.z);
glm::vec3 bottomRightFar(maximum.x, minimum.y, maximum.z);
glm::vec3 topLeftNear(minimum.x, maximum.y, minimum.z);
glm::vec3 topRightNear(maximum.x, maximum.y, minimum.z);
glm::vec3 topLeftFar(minimum.x, maximum.y, maximum.z);
glm::vec3 topRightFar(maximum.x, maximum.y, maximum.z);
glm::vec3 bottomLeftNearRotated = rotation * bottomLeftNear;
glm::vec3 bottomRightNearRotated = rotation * bottomRightNear;
glm::vec3 bottomLeftFarRotated = rotation * bottomLeftFar;
glm::vec3 bottomRightFarRotated = rotation * bottomRightFar;
glm::vec3 topLeftNearRotated = rotation * topLeftNear;
glm::vec3 topRightNearRotated = rotation * topRightNear;
glm::vec3 topLeftFarRotated = rotation * topLeftFar;
glm::vec3 topRightFarRotated = rotation * topRightFar;
minimum = glm::min(bottomLeftNearRotated,
glm::min(bottomRightNearRotated,
glm::min(bottomLeftFarRotated,
glm::min(bottomRightFarRotated,
glm::min(topLeftNearRotated,
glm::min(topRightNearRotated,
glm::min(topLeftFarRotated,topRightFarRotated)))))));
maximum = glm::max(bottomLeftNearRotated,
glm::max(bottomRightNearRotated,
glm::max(bottomLeftFarRotated,
glm::max(bottomRightFarRotated,
glm::max(topLeftNearRotated,
glm::max(topRightNearRotated,
glm::max(topLeftFarRotated,topRightFarRotated)))))));
}

View file

@ -0,0 +1,53 @@
//
// Extents.h
// libraries/shared/src
//
// Created by Andrzej Kapolka on 9/18/13.
// Moved to shared by Brad Hefta-Gaub on 9/11/14
// Copyright 2013-2104 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Extents_h
#define hifi_Extents_h
#include <glm/glm.hpp>
class Extents {
public:
/// set minimum and maximum to FLT_MAX and -FLT_MAX respectively
void reset();
/// \param extents another intance of extents
/// expand current limits to contain other extents
void addExtents(const Extents& extents);
/// \param point new point to compare against existing limits
/// compare point to current limits and expand them if necessary to contain point
void addPoint(const glm::vec3& point);
/// \param point
/// \return true if point is within current limits
bool containsPoint(const glm::vec3& point) const;
/// \return whether or not the extents are empty
bool isEmpty() const { return minimum == maximum; }
bool isValid() const { return !((minimum == glm::vec3(FLT_MAX)) && (maximum == glm::vec3(-FLT_MAX))); }
/// rotate the extents around orign by rotation
void rotate(const glm::quat& rotation);
/// \return new Extents which is original rotated around orign by rotation
Extents getRotated(const glm::quat& rotation) const {
Extents temp = { minimum, maximum };
temp.rotate(rotation);
return temp;
}
glm::vec3 minimum;
glm::vec3 maximum;
};
#endif // hifi_Extents_h

View file

@ -49,7 +49,7 @@ public:
Enum lastFlag() const { return (Enum)_maxFlag; }
void setHasProperty(Enum flag, bool value = true);
bool getHasProperty(Enum flag);
bool getHasProperty(Enum flag) const;
QByteArray encode();
void decode(const QByteArray& fromEncoded);
@ -61,42 +61,42 @@ public:
PropertyFlags& operator=(const PropertyFlags& other);
PropertyFlags& operator|=(PropertyFlags other);
PropertyFlags& operator|=(const PropertyFlags& other);
PropertyFlags& operator|=(Enum flag);
PropertyFlags& operator&=(PropertyFlags other);
PropertyFlags& operator&=(const PropertyFlags& other);
PropertyFlags& operator&=(Enum flag);
PropertyFlags& operator+=(PropertyFlags other);
PropertyFlags& operator+=(const PropertyFlags& other);
PropertyFlags& operator+=(Enum flag);
PropertyFlags& operator-=(PropertyFlags other);
PropertyFlags& operator-=(const PropertyFlags& other);
PropertyFlags& operator-=(Enum flag);
PropertyFlags& operator<<=(PropertyFlags other);
PropertyFlags& operator<<=(const PropertyFlags& other);
PropertyFlags& operator<<=(Enum flag);
PropertyFlags operator|(PropertyFlags other) const;
PropertyFlags operator|(const PropertyFlags& other) const;
PropertyFlags operator|(Enum flag) const;
PropertyFlags operator&(PropertyFlags other) const;
PropertyFlags operator&(const PropertyFlags& other) const;
PropertyFlags operator&(Enum flag) const;
PropertyFlags operator+(PropertyFlags other) const;
PropertyFlags operator+(const PropertyFlags& other) const;
PropertyFlags operator+(Enum flag) const;
PropertyFlags operator-(PropertyFlags other) const;
PropertyFlags operator-(const PropertyFlags& other) const;
PropertyFlags operator-(Enum flag) const;
PropertyFlags operator<<(PropertyFlags other) const;
PropertyFlags operator<<(const PropertyFlags& other) const;
PropertyFlags operator<<(Enum flag) const;
// NOTE: due to the nature of the compact storage of these property flags, and the fact that the upper bound of the
// enum is not know, these operators will only perform their bitwise operations on the set of properties that have
// been previously set
PropertyFlags& operator^=(PropertyFlags other);
PropertyFlags& operator^=(const PropertyFlags& other);
PropertyFlags& operator^=(Enum flag);
PropertyFlags operator^(PropertyFlags other) const;
PropertyFlags operator^(const PropertyFlags& other) const;
PropertyFlags operator^(Enum flag) const;
PropertyFlags operator~() const;
@ -146,7 +146,7 @@ template<typename Enum> inline void PropertyFlags<Enum>::setHasProperty(Enum fla
}
}
template<typename Enum> inline bool PropertyFlags<Enum>::getHasProperty(Enum flag) {
template<typename Enum> inline bool PropertyFlags<Enum>::getHasProperty(Enum flag) const {
if (flag > _maxFlag) {
return _trailingFlipped; // usually false
}
@ -253,7 +253,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator|=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator|=(const PropertyFlags& other) {
_flags |= other._flags;
_maxFlag = std::max(_maxFlag, other._maxFlag);
_minFlag = std::min(_minFlag, other._minFlag);
@ -268,7 +268,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator&=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator&=(const PropertyFlags& other) {
_flags &= other._flags;
shinkIfNeeded();
return *this;
@ -281,7 +281,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator^=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator^=(const PropertyFlags& other) {
_flags ^= other._flags;
shinkIfNeeded();
return *this;
@ -294,7 +294,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator+=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator+=(const PropertyFlags& other) {
for(int flag = (int)other.firstFlag(); flag <= (int)other.lastFlag(); flag++) {
if (other.getHasProperty((Enum)flag)) {
setHasProperty((Enum)flag, true);
@ -308,7 +308,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator-=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator-=(const PropertyFlags& other) {
for(int flag = (int)other.firstFlag(); flag <= (int)other.lastFlag(); flag++) {
if (other.getHasProperty((Enum)flag)) {
setHasProperty((Enum)flag, false);
@ -322,7 +322,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator<<=(PropertyFlags other) {
template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operator<<=(const PropertyFlags& other) {
for(int flag = (int)other.firstFlag(); flag <= (int)other.lastFlag(); flag++) {
if (other.getHasProperty((Enum)flag)) {
setHasProperty((Enum)flag, true);
@ -336,7 +336,7 @@ template<typename Enum> inline PropertyFlags<Enum>& PropertyFlags<Enum>::operato
return *this;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator|(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator|(const PropertyFlags& other) const {
PropertyFlags result(*this);
result |= other;
return result;
@ -349,7 +349,7 @@ template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator
return result;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator&(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator&(const PropertyFlags& other) const {
PropertyFlags result(*this);
result &= other;
return result;
@ -362,7 +362,7 @@ template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator
return result;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator^(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator^(const PropertyFlags& other) const {
PropertyFlags result(*this);
result ^= other;
return result;
@ -375,7 +375,7 @@ template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator
return result;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator+(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator+(const PropertyFlags& other) const {
PropertyFlags result(*this);
result += other;
return result;
@ -387,7 +387,7 @@ template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator
return result;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator-(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator-(const PropertyFlags& other) const {
PropertyFlags result(*this);
result -= other;
return result;
@ -399,7 +399,7 @@ template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator
return result;
}
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator<<(PropertyFlags other) const {
template<typename Enum> inline PropertyFlags<Enum> PropertyFlags<Enum>::operator<<(const PropertyFlags& other) const {
PropertyFlags result(*this);
result <<= other;
return result;