resolve conflicts on merge with upstream master

This commit is contained in:
Stephen Birarda 2014-11-11 20:53:24 -08:00
commit b7b371ba61
45 changed files with 6859 additions and 3926 deletions

View file

@ -61,7 +61,7 @@
const float LOUDNESS_TO_DISTANCE_RATIO = 0.00001f;
const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.18;
const float DEFAULT_NOISE_MUTING_THRESHOLD = 0.001f;
const float DEFAULT_NOISE_MUTING_THRESHOLD = 0.003f;
const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
const QString AUDIO_ENV_GROUP_KEY = "audio_env";
const QString AUDIO_BUFFER_GROUP_KEY = "audio_buffer";
@ -465,6 +465,63 @@ int AudioMixer::prepareMixForListeningNode(Node* node) {
return streamsMixed;
}
void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
static char clientEnvBuffer[MAX_PACKET_SIZE];
// Send stream properties
bool hasReverb = false;
float reverbTime, wetLevel;
// find reverb properties
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
hasReverb = true;
reverbTime = _zoneReverbSettings[i].reverbTime;
wetLevel = _zoneReverbSettings[i].wetLevel;
break;
}
}
AudioMixerClientData* nodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
AvatarAudioStream* stream = nodeData->getAvatarAudioStream();
bool dataChanged = (stream->hasReverb() != hasReverb) ||
(stream->hasReverb() && (stream->getRevebTime() != reverbTime ||
stream->getWetLevel() != wetLevel));
if (dataChanged) {
// Update stream
if (hasReverb) {
stream->setReverb(reverbTime, wetLevel);
} else {
stream->clearReverb();
}
}
// Send at change or every so often
float CHANCE_OF_SEND = 0.01f;
bool sendData = dataChanged || (randFloat() < CHANCE_OF_SEND);
if (sendData) {
int numBytesEnvPacketHeader = populatePacketHeader(clientEnvBuffer, PacketTypeAudioEnvironment);
char* envDataAt = clientEnvBuffer + numBytesEnvPacketHeader;
unsigned char bitset = 0;
if (hasReverb) {
setAtBit(bitset, HAS_REVERB_BIT);
}
memcpy(envDataAt, &bitset, sizeof(unsigned char));
envDataAt += sizeof(unsigned char);
if (hasReverb) {
memcpy(envDataAt, &reverbTime, sizeof(float));
envDataAt += sizeof(float);
memcpy(envDataAt, &wetLevel, sizeof(float));
envDataAt += sizeof(float);
}
NodeList::getInstance()->writeDatagram(clientEnvBuffer, envDataAt - clientEnvBuffer, node);
}
}
void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
NodeList* nodeList = NodeList::getInstance();
@ -642,7 +699,6 @@ void AudioMixer::run() {
timer.start();
char clientMixBuffer[MAX_PACKET_SIZE];
char clientEnvBuffer[MAX_PACKET_SIZE];
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
@ -759,58 +815,6 @@ void AudioMixer::run() {
// pack mixed audio samples
memcpy(mixDataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
mixDataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
// Send stream properties
bool hasReverb = false;
float reverbTime, wetLevel;
// find reverb properties
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
hasReverb = true;
reverbTime = _zoneReverbSettings[i].reverbTime;
wetLevel = _zoneReverbSettings[i].wetLevel;
break;
}
}
AvatarAudioStream* stream = nodeData->getAvatarAudioStream();
bool dataChanged = (stream->hasReverb() != hasReverb) ||
(stream->hasReverb() && (stream->getRevebTime() != reverbTime ||
stream->getWetLevel() != wetLevel));
if (dataChanged) {
// Update stream
if (hasReverb) {
stream->setReverb(reverbTime, wetLevel);
} else {
stream->clearReverb();
}
}
// Send at change or every so often
float CHANCE_OF_SEND = 0.01f;
bool sendData = dataChanged || (randFloat() < CHANCE_OF_SEND);
if (sendData) {
int numBytesEnvPacketHeader = populatePacketHeader(clientEnvBuffer, PacketTypeAudioEnvironment);
char* envDataAt = clientEnvBuffer + numBytesEnvPacketHeader;
unsigned char bitset = 0;
if (hasReverb) {
setAtBit(bitset, HAS_REVERB_BIT);
}
memcpy(envDataAt, &bitset, sizeof(unsigned char));
envDataAt += sizeof(unsigned char);
if (hasReverb) {
memcpy(envDataAt, &reverbTime, sizeof(float));
envDataAt += sizeof(float);
memcpy(envDataAt, &wetLevel, sizeof(float));
envDataAt += sizeof(float);
}
nodeList->writeDatagram(clientEnvBuffer, envDataAt - clientEnvBuffer, node);
}
} else {
// pack header
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame);
@ -826,6 +830,9 @@ void AudioMixer::run() {
memcpy(mixDataAt, &numSilentSamples, sizeof(quint16));
mixDataAt += sizeof(quint16);
}
// Send audio environment
sendAudioEnvironmentPacket(node);
// send mixed audio packet
nodeList->writeDatagram(clientMixBuffer, mixDataAt - clientMixBuffer, node);

View file

@ -49,6 +49,9 @@ private:
/// prepares and sends a mix to one Node
int prepareMixForListeningNode(Node* node);
/// Send Audio Environment packet for a single node
void sendAudioEnvironmentPacket(SharedNodePointer node);
// used on a per stream basis to run the filter on before mixing, large enough to handle the historical
// data from a phase delay as well as an entire network buffer

View file

@ -93,8 +93,8 @@
"name": "noise_muting_threshold",
"label": "Noise Muting Threshold",
"help": "Loudness value for noise background between 0 and 1.0 (0: mute everyone, 1.0: never mute)",
"placeholder": "0.001",
"default": "0.001",
"placeholder": "0.003",
"default": "0.003",
"advanced": false
},
{

View file

@ -22,13 +22,21 @@
this.oldColorKnown = true;
print("storing old color... this.oldColor=" + this.oldColor.red + "," + this.oldColor.green + "," + this.oldColor.blue);
};
this.preload = function(entityID) {
print("preload");
this.storeOldColor(entityID);
};
this.hoverEnterEntity = function(entityID, mouseEvent) {
print("hoverEnterEntity");
if (!this.oldColorKnown) {
this.storeOldColor(entityID);
}
Entities.editEntity(entityID, { color: { red: 0, green: 255, blue: 255} });
};
this.hoverLeaveEntity = function(entityID, mouseEvent) {
print("hoverLeaveEntity");
if (this.oldColorKnown) {
print("leave restoring old color... this.oldColor="
+ this.oldColor.red + "," + this.oldColor.green + "," + this.oldColor.blue);

View file

@ -12,12 +12,18 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function(){
var bird = new Sound("http://s3.amazonaws.com/hifi-public/sounds/Animals/bushtit_1.raw");
var bird;
this.preload = function(entityID) {
print("preload("+entityID.id+")");
bird = new Sound("http://s3.amazonaws.com/hifi-public/sounds/Animals/bushtit_1.raw");
};
this.clickDownOnEntity = function(entityID, mouseEvent) {
print("clickDownOnEntity()...");
Audio.playSound(bird, {
position: MyAvatar.position,
volume: 0.5
Audio.playSound(bird, {
position: MyAvatar.position,
volume: 0.5
});
};
})

View file

@ -12,13 +12,18 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
(function(){
var bird = new Sound("http://s3.amazonaws.com/hifi-public/sounds/Animals/bushtit_1.raw");
var bird;
function playSound(entityID) {
Audio.playSound(bird, {
position: MyAvatar.position,
volume: 0.5
});
}
this.preload = function(entityID) {
print("preload("+entityID.id+")");
bird = new Sound("http://s3.amazonaws.com/hifi-public/sounds/Animals/bushtit_1.raw");
};
this.enterEntity = function(entityID) {

View file

@ -0,0 +1,193 @@
<html>
<head>
<script>
function loaded() {
var gridColor = { red: 0, green: 0, blue: 0 };
var gridColors = [
{ red: 0, green: 0, blue: 0 },
{ red: 128, green: 128, blue: 128 },
{ red: 255, green: 0, blue: 0 },
{ red: 0, green: 255, blue: 0},
{ red: 0, green: 0, blue: 255 },
];
posY = document.getElementById("horiz-y");
minorSpacing = document.getElementById("minor-spacing");
majorSpacing = document.getElementById("major-spacing");
gridOn = document.getElementById("grid-on");
snapToGrid = document.getElementById("snap-to-grid");
hGridVisible = document.getElementById("horiz-grid-visible");
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.origin) {
var origin = data.origin;
posY.value = origin.y;
}
if (data.minorGridSpacing) {
minorSpacing.value = data.minorGridSpacing;
}
if (data.majorGridEvery) {
majorSpacing.value = data.majorGridEvery;
}
if (data.gridColor) {
gridColor = data.gridColor;
}
if (data.snapToGrid !== undefined) {
snapToGrid.checked = data.snapToGrid == true;
}
if (data.visible !== undefined) {
hGridVisible.checked = data.visible == true;
}
});
function emitUpdate() {
EventBridge.emitWebEvent(JSON.stringify({
type: "update",
origin: {
y: posY.value,
},
minorGridSpacing: minorSpacing.value,
majorGridEvery: majorSpacing.value,
gridColor: gridColor,
snapToGrid: snapToGrid.checked,
visible: hGridVisible.checked,
}));
}
}
document.addEventListener("input", emitUpdate);
hGridVisible.addEventListener("change", emitUpdate);
snapToGrid.addEventListener("change", emitUpdate);
var gridColorBox = document.getElementById('grid-color');
for (var i = 0; i < gridColors.length; i++) {
var colors = gridColors[i];
var box = document.createElement('div');
box.setAttribute('class', 'color-box');
box.style.background = 'rgb(' + colors.red + ', ' + colors.green + ', ' + colors.blue + ')';
document.getElementById("grid-colors").appendChild(box);
box.addEventListener("click", function(color) {
return function() {
gridColor = color;
emitUpdate();
}
}({ red: colors.red, green: colors.green, blue: colors.blue }));
}
EventBridge.emitWebEvent(JSON.stringify({ type: 'init' }));
}
</script>
<style>
* {
}
body {
margin: 0;
padding: 0;
background: #DDD;
font-family: Sans-Serif;
font-size: 12px;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
input {
line-height: 2;
}
.input-left {
display: inline-block;
width: 20px;
}
.color-box {
display: inline-block;
width: 20px;
height: 20px;
border: 1px solid black;
background: blue;
margin: 2px;
}
.color-box.highlight {
width: 18px;
height: 18px;
border: 2px solid black;
}
.section-header {
background: #AAA;
border-bottom: 1px solid #CCC;
}
.section-header label {
font-weight: bold;
}
.grid-section {
border-top: 1px solid #DDD;
padding: 4px 0px 4px 20px;
background: #DDD;
}
</style>
</head>
<body onload='loaded();'>
<div class="section-header">
<input type='checkbox' id="horiz-grid-visible">
<label>Horizontal Grid</label>
</div>
<div class="grid-section">
<label>Snap to grid</label>
<div>
<div class="input-left">
</div>
<input type='checkbox' id="snap-to-grid">
</div>
<label>Position (Y Axis)</label>
<div id="horizontal-position">
<div class="input-left">
</div>
<input type='number' id="horiz-y" class="number" value="0.0" step="0.1"></input>
</div>
<label>Minor Grid Size</label>
<div>
<div class="input-left">
</div>
<input type='number' id="minor-spacing" min="0" step="0.01", ></input>
</div>
<label>Major Grid Every</label>
<div>
<div class="input-left">
</div>
<input type='number' id="major-spacing" min="2" step="1", ></input>
</div>
<label>Grid Color</label>
<div id="grid-colors">
<div class="input-left">
</div>
</div>
</div>
</body>
</html>

View file

@ -394,12 +394,12 @@ SelectionDisplay = (function () {
var baseOverlayAngles = { x: 0, y: 0, z: 0 };
var baseOverlayRotation = Quat.fromVec3Degrees(baseOverlayAngles);
var baseOfEntityProjectionOverlay = Overlays.addOverlay("rectangle3d", {
position: { x:0, y: 0, z: 0},
size: 1,
position: { x: 1, y: 0, z: 0},
color: { red: 51, green: 152, blue: 203 },
alpha: 0.5,
solid: true,
visible: false,
width: 300, height: 200,
rotation: baseOverlayRotation,
ignoreRayIntersection: true, // always ignore this
});
@ -570,6 +570,7 @@ SelectionDisplay = (function () {
xRailOverlay,
yRailOverlay,
zRailOverlay,
baseOfEntityProjectionOverlay,
].concat(stretchHandles);
overlayNames[highlightBox] = "highlightBox";
@ -878,20 +879,24 @@ SelectionDisplay = (function () {
translateHandlesVisible = false;
}
var rotation = SelectionManager.worldRotation;
var dimensions = SelectionManager.worldDimensions;
var position = SelectionManager.worldPosition;
var rotation = selectionManager.worldRotation;
var dimensions = selectionManager.worldDimensions;
var position = selectionManager.worldPosition;
Overlays.editOverlay(baseOfEntityProjectionOverlay,
{
visible: true,
solid:true,
lineWidth: 2.0,
position: { x: position.x,
y: 0,
z: position.z },
dimensions: { x: dimensions.x, y: 0, z: dimensions.z },
visible: mode != "ROTATE_YAW" && mode != "ROTATE_PITCH" && mode != "ROTATE_ROLL",
solid: true,
// lineWidth: 2.0,
position: {
x: position.x,
y: grid.getOrigin().y,
z: position.z
},
dimensions: {
x: dimensions.x,
y: dimensions.z
},
rotation: rotation,
});
@ -1098,6 +1103,7 @@ SelectionDisplay = (function () {
var initialXZPick = null;
var isConstrained = false;
var constrainMajorOnly = false;
var startPosition = null;
var duplicatedEntityIDs = null;
var translateXZTool = {
@ -1162,15 +1168,23 @@ SelectionDisplay = (function () {
if (isConstrained) {
Overlays.editOverlay(xRailOverlay, { visible: false });
Overlays.editOverlay(zRailOverlay, { visible: false });
isConstrained = false;
}
}
constrainMajorOnly = event.isControl;
var cornerPosition = Vec3.sum(startPosition, Vec3.multiply(-0.5, selectionManager.worldDimensions));
vector = Vec3.subtract(
grid.snapToGrid(Vec3.sum(cornerPosition, vector), constrainMajorOnly),
cornerPosition);
var wantDebug = false;
for (var i = 0; i < SelectionManager.selections.length; i++) {
var properties = SelectionManager.savedProperties[SelectionManager.selections[i].id];
var newPosition = Vec3.sum(properties.position, { x: vector.x, y: 0, z: vector.z });
Entities.editEntity(SelectionManager.selections[i], {
position: Vec3.sum(properties.position, vector),
position: newPosition,
});
if (wantDebug) {

View file

@ -0,0 +1,189 @@
Grid = function(opts) {
var that = {};
var color = { red: 100, green: 152, blue: 203 };
var gridColor = { red: 100, green: 152, blue: 203 };
var gridAlpha = 1.0;
var origin = { x: 0, y: 0, z: 0 };
var majorGridEvery = 5;
var minorGridSpacing = 0.2;
var halfSize = 40;
var yOffset = 0.001;
var worldSize = 16384;
var minorGridWidth = 0.5;
var majorGridWidth = 1.5;
var snapToGrid = true;
var gridOverlay = Overlays.addOverlay("grid", {
position: { x: 0 , y: 0, z: 0 },
visible: true,
color: { red: 0, green: 0, blue: 128 },
alpha: 1.0,
rotation: Quat.fromPitchYawRollDegrees(90, 0, 0),
minorGridWidth: 0.1,
majorGridEvery: 2,
});
that.getMinorIncrement = function() { return minorGridSpacing; };
that.getMajorIncrement = function() { return minorGridSpacing * majorGridEvery; };
that.visible = false;
that.getOrigin = function() {
return origin;
}
that.getSnapToGrid = function() { return snapToGrid; };
that.setVisible = function(visible, noUpdate) {
that.visible = visible;
updateGrid();
if (!noUpdate) {
that.emitUpdate();
}
}
that.snapToGrid = function(position, majorOnly) {
if (!snapToGrid) {
return position;
}
var spacing = majorOnly ? (minorGridSpacing * majorGridEvery) : minorGridSpacing;
position = Vec3.subtract(position, origin);
position.x = Math.round(position.x / spacing) * spacing;
position.y = Math.round(position.y / spacing) * spacing;
position.z = Math.round(position.z / spacing) * spacing;
return Vec3.sum(position, origin);
}
that.setPosition = function(newPosition, noUpdate) {
origin = Vec3.subtract(newPosition, { x: 0, y: yOffset, z: 0 });
origin.x = 0;
origin.z = 0;
updateGrid();
if (!noUpdate) {
that.emitUpdate();
}
};
that.emitUpdate = function() {
if (that.onUpdate) {
that.onUpdate({
origin: origin,
minorGridSpacing: minorGridSpacing,
majorGridEvery: majorGridEvery,
gridSize: halfSize,
visible: that.visible,
snapToGrid: snapToGrid,
gridColor: gridColor,
});
}
};
that.update = function(data) {
if (data.snapToGrid !== undefined) {
snapToGrid = data.snapToGrid;
}
if (data.origin) {
var pos = data.origin;
pos.x = pos.x === undefined ? origin.x : pos.x;
pos.y = pos.y === undefined ? origin.y : pos.y;
pos.z = pos.z === undefined ? origin.z : pos.z;
that.setPosition(pos, true);
}
if (data.minorGridSpacing) {
minorGridSpacing = data.minorGridSpacing;
}
if (data.majorGridEvery) {
majorGridEvery = data.majorGridEvery;
}
if (data.gridColor) {
gridColor = data.gridColor;
}
if (data.gridSize) {
halfSize = data.gridSize;
}
if (data.visible !== undefined) {
that.setVisible(data.visible, true);
}
updateGrid();
}
function updateGrid() {
Overlays.editOverlay(gridOverlay, {
position: { x: origin.y, y: origin.y, z: -origin.y },
visible: that.visible,
minorGridWidth: minorGridSpacing,
majorGridEvery: majorGridEvery,
color: gridColor,
alpha: gridAlpha,
});
}
function cleanup() {
Overlays.deleteOverlay(gridOverlay);
}
that.addListener = function(callback) {
that.onUpdate = callback;
}
Script.scriptEnding.connect(cleanup);
updateGrid();
that.onUpdate = null;
return that;
};
GridTool = function(opts) {
var that = {};
var horizontalGrid = opts.horizontalGrid;
var verticalGrid = opts.verticalGrid;
var listeners = [];
var url = Script.resolvePath('html/gridControls.html');
var webView = new WebWindow(url, 200, 280);
horizontalGrid.addListener(function(data) {
webView.eventBridge.emitScriptEvent(JSON.stringify(data));
});
webView.eventBridge.webEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.type == "init") {
horizontalGrid.emitUpdate();
} else if (data.type == "update") {
horizontalGrid.update(data);
for (var i = 0; i < listeners.length; i++) {
listeners[i](data);
}
}
});
that.addListener = function(callback) {
listeners.push(callback);
}
that.setVisible = function(visible) {
webView.setVisible(visible);
}
return that;
};

View file

@ -0,0 +1,412 @@
//
// walkObjects.js
//
// version 1.001
//
// Created by David Wooldridge, Autumn 2014
//
// Motion, state and Transition objects for use by the walk.js script v1.1
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// constructor for the Motion object
Motion = function() {
this.setGender = function(gender) {
this.avatarGender = gender;
switch(this.avatarGender) {
case MALE:
this.selWalk = walkAssets.maleStandardWalk;
this.selStand = walkAssets.maleStandOne;
this.selFlyUp = walkAssets.maleFlyingUp;
this.selFly = walkAssets.maleFlying;
this.selFlyDown = walkAssets.maleFlyingDown;
this.selSideStepLeft = walkAssets.maleSideStepLeft;
this.selSideStepRight = walkAssets.maleSideStepRight;
this.curAnim = this.selStand;
return;
case FEMALE:
this.selWalk = walkAssets.femaleStandardWalk;
this.selStand = walkAssets.femaleStandOne;
this.selFlyUp = walkAssets.femaleFlyingUp;
this.selFly = walkAssets.femaleFlying;
this.selFlyDown = walkAssets.femaleFlyingDown;
this.selSideStepLeft = walkAssets.femaleSideStepLeft;
this.selSideStepRight = walkAssets.femaleSideStepRight;
this.curAnim = this.selStand;
return;
}
}
this.hydraCheck = function() {
// function courtesy of Thijs Wenker, frisbee.js
var numberOfButtons = Controller.getNumberOfButtons();
var numberOfTriggers = Controller.getNumberOfTriggers();
var numberOfSpatialControls = Controller.getNumberOfSpatialControls();
var controllersPerTrigger = numberOfSpatialControls / numberOfTriggers;
hydrasConnected = (numberOfButtons == 12 && numberOfTriggers == 2 && controllersPerTrigger == 2);
return hydrasConnected;
}
// settings
this.armsFree = this.hydraCheck(); // automatically sets true for Hydra support - temporary fix
this.makesFootStepSounds = true;
this.avatarGender = MALE;
this.motionPitchMax = 60;
this.motionRollMax = 40;
// timing
this.frameStartTime = 0; // used for measuring frame execution times
this.frameExecutionTimeMax = 0; // keep track of the longest frame execution time
this.cumulativeTime = 0.0;
this.lastWalkStartTime = 0;
// selected animations
this.selWalk = walkAssets.maleStandardWalk;
this.selStand = walkAssets.maleStandOne;
this.selFlyUp = walkAssets.maleFlyingUp;
this.selFly = walkAssets.maleFlying;
this.selFlyDown = walkAssets.maleFlyingDown;
this.selSideStepLeft = walkAssets.maleSideStepLeft;
this.selSideStepRight = walkAssets.maleSideStepRight;
// the currently selected animation, joint and transition
this.curAnim = this.selStand;
this.curJointIndex = 0;
this.curTransition = null;
// zero out avi's joints, curl the fingers nicely then take some measurements
this.avatarJointNames = MyAvatar.getJointNames();
if (!this.armsFree) {
for (var i = 0; i < this.avatarJointNames.length; i++) {
if (i > 17 || i < 34) {
// left hand fingers
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
} else if (i > 33 || i < 38) {
// left hand thumb
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
} else if (i > 41 || i < 58) {
// right hand fingers
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(16, 0, 0));
} else if (i > 57 || i < 62) {
// right hand thumb
MyAvatar.setJointData(this.avatarJointNames[i], Quat.fromPitchYawRollDegrees(4, 0, 0));
} else {
// zero out the remaining joints
MyAvatar.clearJointData(this.avatarJointNames[i]);
}
}
}
this.footRPos = MyAvatar.getJointPosition("RightFoot");
this.hipsToFeet = MyAvatar.getJointPosition("Hips").y - this.footRPos.y;
// walkwheel (foot / ground speed matching)
this.direction = FORWARDS;
this.nextStep = RIGHT;
this.nFrames = 0;
this.strideLength = this.selWalk.calibration.strideLengthForwards;
this.walkWheelPos = 0;
this.advanceWalkWheel = function(angle){
this.walkWheelPos += angle;
if (motion.walkWheelPos >= 360) {
this.walkWheelPos = this.walkWheelPos % 360;
}
}
// last frame history
this.lastDirection = 0;
this.lastVelocity = 0;
this.lastStrideLength = 0; // kept for use during transitions
}; // end Motion constructor
// finite state machine
state = (function () {
return {
// the finite list of states
STANDING: 1,
WALKING: 2,
SIDE_STEP: 3,
FLYING: 4,
EDIT_WALK_STYLES: 5,
EDIT_WALK_TWEAKS: 6,
EDIT_WALK_JOINTS: 7,
EDIT_STANDING: 8,
EDIT_FLYING: 9,
EDIT_FLYING_UP: 10,
EDIT_FLYING_DOWN: 11,
EDIT_SIDESTEP_LEFT: 12,
EDIT_SIDESTEP_RIGHT: 14,
currentState: this.STANDING,
// status vars
powerOn: true,
minimised: true,
editing: false,
editingTranslation: false,
setInternalState: function(newInternalState) {
switch (newInternalState) {
case this.WALKING:
this.currentState = this.WALKING;
this.editing = false;
motion.lastWalkStartTime = new Date().getTime();
walkInterface.updateMenu();
return;
case this.FLYING:
this.currentState = this.FLYING;
this.editing = false;
motion.lastWalkStartTime = 0;
walkInterface.updateMenu();
return;
case this.SIDE_STEP:
this.currentState = this.SIDE_STEP;
this.editing = false;
motion.lastWalkStartTime = new Date().getTime();
walkInterface.updateMenu();
return;
case this.EDIT_WALK_STYLES:
this.currentState = this.EDIT_WALK_STYLES;
this.editing = true;
motion.lastWalkStartTime = new Date().getTime();
motion.curAnim = motion.selWalk;
walkInterface.updateMenu();
return;
case this.EDIT_WALK_TWEAKS:
this.currentState = this.EDIT_WALK_TWEAKS;
this.editing = true;
motion.lastWalkStartTime = new Date().getTime();
motion.curAnim = motion.selWalk;
walkInterface.updateMenu();
return;
case this.EDIT_WALK_JOINTS:
this.currentState = this.EDIT_WALK_JOINTS;
this.editing = true;
motion.lastWalkStartTime = new Date().getTime();
motion.curAnim = motion.selWalk;
walkInterface.updateMenu();
return;
case this.EDIT_STANDING:
this.currentState = this.EDIT_STANDING;
this.editing = true;
motion.lastWalkStartTime = 0;
motion.curAnim = motion.selStand;
walkInterface.updateMenu();
return;
case this.EDIT_SIDESTEP_LEFT:
this.currentState = this.EDIT_SIDESTEP_LEFT;
this.editing = true;
motion.lastWalkStartTime = new Date().getTime();
motion.curAnim = motion.selSideStepLeft;
walkInterface.updateMenu();
return;
case this.EDIT_SIDESTEP_RIGHT:
this.currentState = this.EDIT_SIDESTEP_RIGHT;
this.editing = true;
motion.lastWalkStartTime = new Date().getTime();
motion.curAnim = motion.selSideStepRight;
walkInterface.updateMenu();
return;
case this.EDIT_FLYING:
this.currentState = this.EDIT_FLYING;
this.editing = true;
motion.lastWalkStartTime = 0;
motion.curAnim = motion.selFly;
walkInterface.updateMenu();
return;
case this.EDIT_FLYING_UP:
this.currentState = this.EDIT_FLYING_UP;
this.editing = true;
motion.lastWalkStartTime = 0;
motion.curAnim = motion.selFlyUp;
walkInterface.updateMenu();
return;
case this.EDIT_FLYING_DOWN:
this.currentState = this.EDIT_FLYING_DOWN;
this.editing = true;
motion.lastWalkStartTime = 0;
motion.curAnim = motion.selFlyDown;
walkInterface.updateMenu();
return;
case this.STANDING:
default:
this.currentState = this.STANDING;
this.editing = false;
motion.lastWalkStartTime = 0;
motion.curAnim = motion.selStand;
walkInterface.updateMenu();
// initialisation - runs at script startup only
if (motion.strideLength === 0) {
motion.setGender(MALE);
if (motion.direction === BACKWARDS) {
motion.strideLength = motion.selWalk.calibration.strideLengthBackwards;
} else {
motion.strideLength = motion.selWalk.calibration.strideLengthForwards;
}
}
return;
}
}
}
})(); // end state object literal
// constructor for animation Transition
Transition = function(lastAnimation, nextAnimation, reachPoses, transitionDuration, easingLower, easingUpper) {
this.lastAnim = lastAnimation; // name of last animation
this.nextAnimation = nextAnimation; // name of next animation
if (lastAnimation === motion.selWalk ||
nextAnimation === motion.selSideStepLeft ||
nextAnimation === motion.selSideStepRight) {
// boolean - is the last animation a walking animation?
this.walkingAtStart = true;
} else {
this.walkingAtStart = false;
}
if (nextAnimation === motion.selWalk ||
nextAnimation === motion.selSideStepLeft ||
nextAnimation === motion.selSideStepRight) {
// boolean - is the next animation a walking animation?
this.walkingAtEnd = true;
} else {
this.walkingAtEnd = false;
}
this.reachPoses = reachPoses; // placeholder / stub: array of reach poses for squash and stretch techniques
this.transitionDuration = transitionDuration; // length of transition (seconds)
this.easingLower = easingLower; // Bezier curve handle (normalised)
this.easingUpper = easingUpper; // Bezier curve handle (normalised)
this.startTime = new Date().getTime(); // Starting timestamp (seconds)
this.progress = 0; // how far are we through the transition?
this.walkWheelIncrement = 3; // how much to turn the walkwheel each frame when transitioning to / from walking
this.walkWheelAdvance = 0; // how many degrees the walk wheel has been advanced during the transition
this.walkStopAngle = 0; // what angle should we stop the walk cycle?
}; // end Transition constructor
walkAssets = (function () {
// path to the sounds used for the footsteps
var _pathToSounds = 'https://s3.amazonaws.com/hifi-public/sounds/Footsteps/';
// read in the sounds
var _footsteps = [];
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Left-12db.wav"));
_footsteps.push(new Sound(_pathToSounds+"FootstepW2Right-12db.wav"));
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Left-12db.wav"));
_footsteps.push(new Sound(_pathToSounds+"FootstepW3Right-12db.wav"));
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Left-12db.wav"));
_footsteps.push(new Sound(_pathToSounds+"FootstepW5Right-12db.wav"));
// load the animation datafiles
Script.include(pathToAssets+"animations/dd-female-standard-walk-animation.js");
Script.include(pathToAssets+"animations/dd-female-flying-up-animation.js");
Script.include(pathToAssets+"animations/dd-female-flying-animation.js");
Script.include(pathToAssets+"animations/dd-female-flying-down-animation.js");
Script.include(pathToAssets+"animations/dd-female-standing-one-animation.js");
Script.include(pathToAssets+"animations/dd-female-sidestep-left-animation.js");
Script.include(pathToAssets+"animations/dd-female-sidestep-right-animation.js");
Script.include(pathToAssets+"animations/dd-male-standard-walk-animation.js");
Script.include(pathToAssets+"animations/dd-male-flying-up-animation.js");
Script.include(pathToAssets+"animations/dd-male-flying-animation.js");
Script.include(pathToAssets+"animations/dd-male-flying-down-animation.js");
Script.include(pathToAssets+"animations/dd-male-standing-one-animation.js");
Script.include(pathToAssets+"animations/dd-male-sidestep-left-animation.js");
Script.include(pathToAssets+"animations/dd-male-sidestep-right-animation.js");
// read in the animation files
var _FemaleStandardWalkFile = new FemaleStandardWalk();
var _femaleStandardWalk = _FemaleStandardWalkFile.loadAnimation();
var _FemaleFlyingUpFile = new FemaleFlyingUp();
var _femaleFlyingUp = _FemaleFlyingUpFile.loadAnimation();
var _FemaleFlyingFile = new FemaleFlying();
var _femaleFlying = _FemaleFlyingFile.loadAnimation();
var _FemaleFlyingDownFile = new FemaleFlyingDown();
var _femaleFlyingDown = _FemaleFlyingDownFile.loadAnimation();
var _FemaleStandOneFile = new FemaleStandingOne();
var _femaleStandOne = _FemaleStandOneFile.loadAnimation();
var _FemaleSideStepLeftFile = new FemaleSideStepLeft();
var _femaleSideStepLeft = _FemaleSideStepLeftFile.loadAnimation();
var _FemaleSideStepRightFile = new FemaleSideStepRight();
var _femaleSideStepRight = _FemaleSideStepRightFile.loadAnimation();
var _MaleStandardWalkFile = new MaleStandardWalk(filter);
var _maleStandardWalk = _MaleStandardWalkFile.loadAnimation();
var _MaleFlyingUpFile = new MaleFlyingUp();
var _maleFlyingUp = _MaleFlyingUpFile.loadAnimation();
var _MaleFlyingFile = new MaleFlying();
var _maleFlying = _MaleFlyingFile.loadAnimation();
var _MaleFlyingDownFile = new MaleFlyingDown();
var _maleFlyingDown = _MaleFlyingDownFile.loadAnimation();
var _MaleStandOneFile = new MaleStandingOne();
var _maleStandOne = _MaleStandOneFile.loadAnimation();
var _MaleSideStepLeftFile = new MaleSideStepLeft();
var _maleSideStepLeft = _MaleSideStepLeftFile.loadAnimation();
var _MaleSideStepRightFile = new MaleSideStepRight();
var _maleSideStepRight = _MaleSideStepRightFile.loadAnimation();
return {
// expose the sound assets
footsteps: _footsteps,
// expose the animation assets
femaleStandardWalk: _femaleStandardWalk,
femaleFlyingUp: _femaleFlyingUp,
femaleFlying: _femaleFlying,
femaleFlyingDown: _femaleFlyingDown,
femaleStandOne: _femaleStandOne,
femaleSideStepLeft: _femaleSideStepLeft,
femaleSideStepRight: _femaleSideStepRight,
maleStandardWalk: _maleStandardWalk,
maleFlyingUp: _maleFlyingUp,
maleFlying: _maleFlying,
maleFlyingDown: _maleFlyingDown,
maleStandOne: _maleStandOne,
maleSideStepLeft: _maleSideStepLeft,
maleSideStepRight: _maleSideStepRight,
}
})();

View file

@ -0,0 +1,225 @@
//
// walkFilters.js
//
// version 1.001
//
// Created by David Wooldridge, Autumn 2014
//
// Provides a variety of filters for use by the walk.js script v1.1
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
AveragingFilter = function(length) {
//this.name = name;
this.pastValues = [];
for(var i = 0; i < length; i++) {
this.pastValues.push(0);
}
// single arg is the nextInputValue
this.process = function() {
if (this.pastValues.length === 0 && arguments[0]) {
return arguments[0];
} else if (arguments[0]) {
// apply quick and simple LP filtering
this.pastValues.push(arguments[0]);
this.pastValues.shift();
var nextOutputValue = 0;
for (var ea in this.pastValues) nextOutputValue += this.pastValues[ea];
return nextOutputValue / this.pastValues.length;
} else {
return 0;
}
};
};
// 2nd order Butterworth LP filter - calculate coeffs here: http://www-users.cs.york.ac.uk/~fisher/mkfilter/trad.html
// provides LP filtering with a more stable frequency / phase response
ButterworthFilter = function(cutOff) {
// cut off frequency = 5Hz
this.gain = 20.20612010;
this.coeffOne = -0.4775922501;
this.coeffTwo = 1.2796324250;
// initialise the arrays
this.xv = [];
this.yv = [];
for(var i = 0; i < 3; i++) {
this.xv.push(0);
this.yv.push(0);
}
// process values
this.process = function(nextInputValue) {
this.xv[0] = this.xv[1];
this.xv[1] = this.xv[2];
this.xv[2] = nextInputValue / this.gain;
this.yv[0] = this.yv[1];
this.yv[1] = this.yv[2];
this.yv[2] = (this.xv[0] + this.xv[2]) +
2 * this.xv[1] +
(this.coeffOne * this.yv[0]) +
(this.coeffTwo * this.yv[1]);
return this.yv[2];
};
}; // end Butterworth filter contructor
// Add harmonics to a given sine wave to form square, sawtooth or triangle waves
// Geometric wave synthesis fundamentals taken from: http://hyperphysics.phy-astr.gsu.edu/hbase/audio/geowv.html
WaveSynth = function(waveShape, numHarmonics, smoothing) {
this.numHarmonics = numHarmonics;
this.waveShape = waveShape;
this.averagingFilter = new AveragingFilter(smoothing);
// NB: frequency in radians
this.shapeWave = function(frequency) {
// make some shapes
var harmonics = 0;
var multiplier = 0;
var iterations = this.numHarmonics * 2 + 2;
if (this.waveShape === TRIANGLE) {
iterations++;
}
for(var n = 2; n < iterations; n++) {
switch(this.waveShape) {
case SAWTOOTH: {
multiplier = 1 / n;
harmonics += multiplier * Math.sin(n * frequency);
break;
}
case TRIANGLE: {
if (n % 2 === 1) {
var mulitplier = 1 / (n * n);
// multiply (4n-1)th harmonics by -1
if (n === 3 || n === 7 || n === 11 || n === 15) {
mulitplier *= -1;
}
harmonics += mulitplier * Math.sin(n * frequency);
}
break;
}
case SQUARE: {
if (n % 2 === 1) {
multiplier = 1 / n;
harmonics += multiplier * Math.sin(n * frequency);
}
break;
}
}
}
// smooth the result and return
return this.averagingFilter.process(harmonics);
};
};
// Create a wave shape by summing pre-calcualted sinusoidal harmonics
HarmonicsFilter = function(magnitudes, phaseAngles) {
this.magnitudes = magnitudes;
this.phaseAngles = phaseAngles;
this.calculate = function(twoPiFT) {
var harmonics = 0;
var numHarmonics = magnitudes.length;
for(var n = 0; n < numHarmonics; n++) {
harmonics += this.magnitudes[n] * Math.cos(n * twoPiFT - this.phaseAngles[n]);
}
return harmonics;
};
};
// the main filter object literal
filter = (function() {
// Bezier private functions
function _B1(t) { return t * t * t };
function _B2(t) { return 3 * t * t * (1 - t) };
function _B3(t) { return 3 * t * (1 - t) * (1 - t) };
function _B4(t) { return (1 - t) * (1 - t) * (1 - t) };
return {
// helper methods
degToRad: function(degrees) {
var convertedValue = degrees * Math.PI / 180;
return convertedValue;
},
radToDeg: function(radians) {
var convertedValue = radians * 180 / Math.PI;
return convertedValue;
},
// these filters need instantiating, as they hold arrays of previous values
createAveragingFilter: function(length) {
var newAveragingFilter = new AveragingFilter(length);
return newAveragingFilter;
},
createButterworthFilter: function(cutoff) {
var newButterworthFilter = new ButterworthFilter(cutoff);
return newButterworthFilter;
},
createWaveSynth: function(waveShape, numHarmonics, smoothing) {
var newWaveSynth = new WaveSynth(waveShape, numHarmonics, smoothing);
return newWaveSynth;
},
createHarmonicsFilter: function(magnitudes, phaseAngles) {
var newHarmonicsFilter = new HarmonicsFilter(magnitudes, phaseAngles);
return newHarmonicsFilter;
},
// the following filters do not need separate instances, as they hold no previous values
bezier: function(percent, C1, C2, C3, C4) {
// Bezier functions for more natural transitions
// based on script by Dan Pupius (www.pupius.net) http://13thparallel.com/archive/bezier-curves/
var pos = {x: 0, y: 0};
pos.x = C1.x * _B1(percent) + C2.x * _B2(percent) + C3.x * _B3(percent) + C4.x * _B4(percent);
pos.y = C1.y * _B1(percent) + C2.y * _B2(percent) + C3.y * _B3(percent) + C4.y * _B4(percent);
return pos;
},
// simple clipping filter (clips bottom of wave only, special case for hips y-axis skeleton offset)
clipTrough: function(inputValue, peak, strength) {
var outputValue = inputValue * strength;
if (outputValue < -peak) {
outputValue = -peak;
}
return outputValue;
}
}
})();

File diff suppressed because it is too large Load diff

View file

@ -35,6 +35,10 @@ var entityPropertyDialogBox = EntityPropertyDialogBox;
Script.include("libraries/entityCameraTool.js");
var cameraManager = new CameraManager();
Script.include("libraries/gridTool.js");
var grid = Grid();
gridTool = GridTool({ horizontalGrid: grid });
selectionManager.setEventListener(selectionDisplay.updateHandles);
var windowDimensions = Controller.getViewportDimensions();
@ -51,11 +55,13 @@ var wantEntityGlow = false;
var SPAWN_DISTANCE = 1;
var DEFAULT_DIMENSION = 0.20;
var MENU_GRID_TOOL_ENABLED = 'Grid Tool';
var MENU_INSPECT_TOOL_ENABLED = "Inspect Tool";
var MENU_EASE_ON_FOCUS = "Ease Orientation on Focus";
var SETTING_INSPECT_TOOL_ENABLED = "inspectToolEnabled";
var SETTING_EASE_ON_FOCUS = "cameraEaseOnFocus";
var SETTING_GRID_TOOL_ENABLED = 'GridToolEnabled';
var modelURLs = [
HIFI_PUBLIC_BUCKET + "models/entities/2-Terrain:%20Alder.fbx",
@ -262,10 +268,12 @@ var toolBar = (function () {
if (activeButton === toolBar.clicked(clickedOverlay)) {
isActive = !isActive;
if (!isActive) {
gridTool.setVisible(false);
selectionManager.clearSelections();
cameraManager.disable();
} else {
cameraManager.enable();
gridTool.setVisible(Menu.isOptionChecked(MENU_GRID_TOOL_ENABLED));
}
return true;
}
@ -597,7 +605,9 @@ function setupModelMenus() {
Menu.addMenuItem({ menuName: "File", menuItemName: "Import Models", shortcutKey: "CTRL+META+I", afterItem: "Export Models" });
Menu.addMenuItem({ menuName: "Developer", menuItemName: "Debug Ryans Rotation Problems", isCheckable: true });
Menu.addMenuItem({ menuName: "View", menuItemName: MENU_INSPECT_TOOL_ENABLED, afterItem: "Edit Entities Help...",
Menu.addMenuItem({ menuName: "View", menuItemName: MENU_GRID_TOOL_ENABLED, afterItem: "Edit Entities Help...", isCheckable: true,
isChecked: Settings.getValue(SETTING_GRID_TOOL_ENABLED) == 'true'});
Menu.addMenuItem({ menuName: "View", menuItemName: MENU_INSPECT_TOOL_ENABLED, afterItem: MENU_GRID_TOOL_ENABLED,
isCheckable: true, isChecked: Settings.getValue(SETTING_INSPECT_TOOL_ENABLED) == "true" });
Menu.addMenuItem({ menuName: "View", menuItemName: MENU_EASE_ON_FOCUS, afterItem: MENU_INSPECT_TOOL_ENABLED,
isCheckable: true, isChecked: Settings.getValue(SETTING_EASE_ON_FOCUS) == "true" });
@ -623,6 +633,8 @@ function cleanupModelMenus() {
Menu.removeMenuItem("File", "Import Models");
Menu.removeMenuItem("Developer", "Debug Ryans Rotation Problems");
Settings.setValue(SETTING_GRID_TOOL_ENABLED, Menu.isOptionChecked(MENU_GRID_TOOL_ENABLED));
Menu.removeMenuItem("View", MENU_GRID_TOOL_ENABLED);
Menu.removeMenuItem("View", MENU_INSPECT_TOOL_ENABLED);
Menu.removeMenuItem("View", MENU_EASE_ON_FOCUS);
}
@ -734,6 +746,10 @@ function handeMenuEvent(menuItem) {
}
} else if (menuItem == "Import Models") {
modelImporter.doImport();
} else if (menuItem == MENU_GRID_TOOL_ENABLED) {
if (isActive) {
gridTool.setVisible(Menu.isOptionChecked(MENU_GRID_TOOL_ENABLED));
}
}
tooltip.show(false);
}
@ -759,25 +775,32 @@ Controller.keyReleaseEvent.connect(function (event) {
if (isActive) {
cameraManager.enable();
}
} else if (event.text == 'g') {
if (isActive && selectionManager.hasSelection()) {
var newPosition = selectionManager.worldPosition;
newPosition = Vec3.subtract(newPosition, { x: 0, y: selectionManager.worldDimensions.y * 0.5, z: 0 });
grid.setPosition(newPosition);
}
} else if (isActive) {
var delta = null;
var increment = event.isShifted ? grid.getMajorIncrement() : grid.getMinorIncrement();
if (event.text == 'UP') {
if (event.isControl || event.isAlt) {
delta = { x: 0, y: 1, z: 0 };
delta = { x: 0, y: increment, z: 0 };
} else {
delta = { x: 0, y: 0, z: -1 };
delta = { x: 0, y: 0, z: -increment };
}
} else if (event.text == 'DOWN') {
if (event.isControl || event.isAlt) {
delta = { x: 0, y: -1, z: 0 };
delta = { x: 0, y: -increment, z: 0 };
} else {
delta = { x: 0, y: 0, z: 1 };
delta = { x: 0, y: 0, z: increment };
}
} else if (event.text == 'LEFT') {
delta = { x: -1, y: 0, z: 0 };
delta = { x: -increment, y: 0, z: 0 };
} else if (event.text == 'RIGHT') {
delta = { x: 1, y: 0, z: 0 };
delta = { x: increment, y: 0, z: 0 };
}
if (delta != null) {

File diff suppressed because it is too large Load diff

View file

@ -92,6 +92,7 @@
#include "scripting/MenuScriptingInterface.h"
#include "scripting/SettingsScriptingInterface.h"
#include "scripting/WindowScriptingInterface.h"
#include "scripting/WebWindowClass.h"
#include "ui/DataWebDialog.h"
#include "ui/InfoView.h"
@ -723,11 +724,11 @@ void Application::paintGL() {
displaySide(*whichCamera);
glPopMatrix();
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(_mirrorViewRect);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(true);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(_mirrorViewRect);
}
_glowEffect.render();
@ -785,7 +786,7 @@ void Application::updateProjectionMatrix(Camera& camera, bool updateViewFrustum)
// Tell our viewFrustum about this change, using the application camera
if (updateViewFrustum) {
loadViewFrustum(camera, _viewFrustum);
computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
_viewFrustum.computeOffAxisFrustum(left, right, bottom, top, nearVal, farVal, nearClipPlane, farClipPlane);
// If we're in Display Frustum mode, then we want to use the slightly adjust near/far clip values of the
// _viewFrustumOffsetCamera, so that we can see more of the application content in the application's frustum
@ -2008,25 +2009,17 @@ void Application::init() {
void Application::closeMirrorView() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
Menu::getInstance()->triggerOption(MenuOption::Mirror);
}
}
void Application::restoreMirrorView() {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
}
if (!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
}
}
void Application::shrinkMirrorView() {
if (!Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
Menu::getInstance()->triggerOption(MenuOption::Mirror);;
}
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
Menu::getInstance()->triggerOption(MenuOption::FullscreenMirror);
}
@ -3904,6 +3897,8 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
// register `location` on the global object.
scriptEngine->registerGetterSetter("location", LocationScriptingInterface::locationGetter,
LocationScriptingInterface::locationSetter);
scriptEngine->registerFunction("WebWindow", WebWindowClass::constructor, 1);
scriptEngine->registerGlobalObject("Menu", MenuScriptingInterface::getInstance());
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
@ -4313,8 +4308,6 @@ bool Application::isVSyncOn() const {
if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
return (swapInterval > 0);
} else {
return true;
}
#elif defined(Q_OS_LINUX)
// TODO: write the poper code for linux
@ -4325,10 +4318,9 @@ bool Application::isVSyncOn() const {
} else {
return true;
}
*/
#else
return true;
*/
#endif
return true;
}
bool Application::isVSyncEditable() const {
@ -4343,7 +4335,6 @@ bool Application::isVSyncEditable() const {
return true;
}
*/
#else
#endif
return false;
}

View file

@ -518,6 +518,33 @@ void Audio::initGverb() {
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
}
void Audio::updateGverbOptions() {
bool reverbChanged = false;
if (_receivedAudioStream.hasReverb()) {
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
reverbChanged = true;
}
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
reverbChanged = true;
}
if (_reverbOptions != &_zoneReverbOptions) {
_reverbOptions = &_zoneReverbOptions;
reverbChanged = true;
}
} else if (_reverbOptions != &_scriptReverbOptions) {
_reverbOptions = &_scriptReverbOptions;
reverbChanged = true;
}
if (reverbChanged) {
initGverb();
}
}
void Audio::setReverbOptions(const AudioEffectOptions* options) {
// Save the new options
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
@ -549,14 +576,14 @@ void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioF
gverb_do(_gverb, value, &lValue, &rValue);
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
for (unsigned int j = sample; j < sample + audioFormat.channelCount(); j++) {
for (int j = sample; j < sample + audioFormat.channelCount(); j++) {
if (j == sample) {
// left channel
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
samplesData[j] = (int16_t)lResult;
} else if (j == (sample + 1)) {
// right channel
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), -32768, 32767);
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
samplesData[j] = (int16_t)rResult;
} else {
// ignore channels above 2
@ -565,6 +592,60 @@ void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioF
}
}
void Audio::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
bool hasEcho = Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio);
// If there is server echo, reverb will be applied to the recieved audio stream so no need to have it here.
bool hasLocalReverb = (_reverb || _receivedAudioStream.hasReverb()) &&
!Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio);
if (_muted || !_audioOutput || (!hasEcho && !hasLocalReverb)) {
return;
}
// if this person wants local loopback add that to the locally injected audio
// if there is reverb apply it to local audio and substract the origin samples
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
// we didn't have the loopback output device going so set that up now
_loopbackOutputDevice = _loopbackAudioOutput->start();
}
QByteArray loopBackByteArray(inputByteArray);
if (_inputFormat != _outputFormat) {
float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate()) *
(_outputFormat.channelCount() / _inputFormat.channelCount());
loopBackByteArray.resize(inputByteArray.size() * loopbackOutputToInputRatio);
loopBackByteArray.fill(0);
linearResampling(reinterpret_cast<int16_t*>(inputByteArray.data()),
reinterpret_cast<int16_t*>(loopBackByteArray.data()),
inputByteArray.size() / sizeof(int16_t), loopBackByteArray.size() / sizeof(int16_t),
_inputFormat, _outputFormat);
}
if (hasLocalReverb) {
QByteArray loopbackCopy;
if (!hasEcho) {
loopbackCopy = loopBackByteArray;
}
int16_t* loopbackSamples = reinterpret_cast<int16_t*>(loopBackByteArray.data());
int numLoopbackSamples = loopBackByteArray.size() / sizeof(int16_t);
updateGverbOptions();
addReverb(loopbackSamples, numLoopbackSamples, _outputFormat);
if (!hasEcho) {
int16_t* loopbackCopySamples = reinterpret_cast<int16_t*>(loopbackCopy.data());
for (int i = 0; i < numLoopbackSamples; ++i) {
loopbackSamples[i] = glm::clamp((int)loopbackSamples[i] - loopbackCopySamples[i],
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
}
}
}
if (_loopbackOutputDevice) {
_loopbackOutputDevice->write(loopBackByteArray);
}
}
void Audio::handleAudioInput() {
static char audioDataPacket[MAX_PACKET_SIZE];
@ -609,34 +690,8 @@ void Audio::handleAudioInput() {
_inputFrameBuffer.copyFrames(1, inputFrameCount, inputFrameData, true /*copy out*/);
}
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio) && !_muted && _audioOutput) {
// if this person wants local loopback add that to the locally injected audio
if (!_loopbackOutputDevice && _loopbackAudioOutput) {
// we didn't have the loopback output device going so set that up now
_loopbackOutputDevice = _loopbackAudioOutput->start();
}
if (_inputFormat == _outputFormat) {
if (_loopbackOutputDevice) {
_loopbackOutputDevice->write(inputByteArray);
}
} else {
float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
* (_outputFormat.channelCount() / _inputFormat.channelCount());
QByteArray loopBackByteArray(inputByteArray.size() * loopbackOutputToInputRatio, 0);
linearResampling((int16_t*) inputByteArray.data(), (int16_t*) loopBackByteArray.data(),
inputByteArray.size() / sizeof(int16_t),
loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
if (_loopbackOutputDevice) {
_loopbackOutputDevice->write(loopBackByteArray);
}
}
}
handleLocalEchoAndReverb(inputByteArray);
_inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
@ -973,30 +1028,7 @@ void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& ou
_desiredOutputFormat, _outputFormat);
if(_reverb || _receivedAudioStream.hasReverb()) {
bool reverbChanged = false;
if (_receivedAudioStream.hasReverb()) {
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
reverbChanged = true;
}
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
reverbChanged = true;
}
if (_reverbOptions != &_zoneReverbOptions) {
_reverbOptions = &_zoneReverbOptions;
reverbChanged = true;
}
} else if (_reverbOptions != &_scriptReverbOptions) {
_reverbOptions = &_scriptReverbOptions;
reverbChanged = true;
}
if (reverbChanged) {
initGverb();
}
updateGverbOptions();
addReverb((int16_t*)outputBuffer.data(), numDeviceOutputSamples, _outputFormat);
}
}

View file

@ -268,8 +268,11 @@ private:
// Adds Reverb
void initGverb();
void updateGverbOptions();
void addReverb(int16_t* samples, int numSamples, QAudioFormat& format);
void handleLocalEchoAndReverb(QByteArray& inputByteArray);
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
void addProceduralSounds(int16_t* monoInput, int numSamples);

View file

@ -386,7 +386,7 @@ Menu::Menu() :
#if defined(Q_OS_MAC)
#else
QAction* vsyncAction = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
#endif
}

View file

@ -49,7 +49,7 @@ const float PITCH_SPEED = 100.0f; // degrees/sec
const float COLLISION_RADIUS_SCALAR = 1.2f; // pertains to avatar-to-avatar collisions
const float COLLISION_RADIUS_SCALE = 0.125f;
const float MAX_WALKING_SPEED = 4.5f;
const float MAX_WALKING_SPEED = 2.5f; // human walking speed
const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // keyboard motor gets additive boost below this speed
const float MIN_AVATAR_SPEED = 0.05f; // speed is set to zero below this

View file

@ -81,6 +81,9 @@ void EntityTreeRenderer::init() {
_lastAvatarPosition = avatarPosition + glm::vec3(1.f, 1.f, 1.f);
connect(entityTree, &EntityTree::deletingEntity, this, &EntityTreeRenderer::deletingEntity);
connect(entityTree, &EntityTree::addingEntity, this, &EntityTreeRenderer::checkAndCallPreload);
connect(entityTree, &EntityTree::entityScriptChanging, this, &EntityTreeRenderer::checkAndCallPreload);
connect(entityTree, &EntityTree::changingEntityID, this, &EntityTreeRenderer::changingEntityID);
}
QScriptValue EntityTreeRenderer::loadEntityScript(const EntityItemID& entityItemID) {
@ -770,3 +773,20 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
_entityScripts.remove(entityID);
}
void EntityTreeRenderer::checkAndCallPreload(const EntityItemID& entityID) {
// load the entity script if needed...
QScriptValue entityScript = loadEntityScript(entityID);
if (entityScript.property("preload").isValid()) {
QScriptValueList entityArgs = createEntityArgs(entityID);
entityScript.property("preload").call(entityScript, entityArgs);
}
}
void EntityTreeRenderer::changingEntityID(const EntityItemID& oldEntityID, const EntityItemID& newEntityID) {
if (_entityScripts.contains(oldEntityID)) {
EntityScriptDetails details = _entityScripts[oldEntityID];
_entityScripts.remove(oldEntityID);
_entityScripts[newEntityID] = details;
}
}

View file

@ -104,6 +104,8 @@ signals:
public slots:
void deletingEntity(const EntityItemID& entityID);
void changingEntityID(const EntityItemID& oldEntityID, const EntityItemID& newEntityID);
void checkAndCallPreload(const EntityItemID& entityID);
private:
QList<Model*> _releasedModels;

View file

@ -106,7 +106,7 @@ void Batch::setInputStream(Slot startChannel, const BufferStream& stream) {
const Buffers& buffers = stream.getBuffers();
const Offsets& offsets = stream.getOffsets();
const Offsets& strides = stream.getStrides();
for (int i = 0; i < buffers.size(); i++) {
for (unsigned int i = 0; i < buffers.size(); i++) {
setInputBuffer(startChannel + i, buffers[i], offsets[i], strides[i]);
}
}

View file

@ -31,15 +31,15 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glEnable),
(&::gpu::GLBackend::do_glDisable),
(&::gpu::GLBackend::do_glEnableClientState),
(&::gpu::GLBackend::do_glDisableClientState),
(&::gpu::GLBackend::do_glCullFace),
(&::gpu::GLBackend::do_glAlphaFunc),
(&::gpu::GLBackend::do_glDepthFunc),
(&::gpu::GLBackend::do_glDepthMask),
(&::gpu::GLBackend::do_glDepthFunc),
(&::gpu::GLBackend::do_glDepthMask),
(&::gpu::GLBackend::do_glDepthRange),
(&::gpu::GLBackend::do_glBindBuffer),
@ -57,18 +57,18 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glPushMatrix),
(&::gpu::GLBackend::do_glPopMatrix),
(&::gpu::GLBackend::do_glMultMatrixf),
(&::gpu::GLBackend::do_glLoadMatrixf),
(&::gpu::GLBackend::do_glLoadIdentity),
(&::gpu::GLBackend::do_glRotatef),
(&::gpu::GLBackend::do_glScalef),
(&::gpu::GLBackend::do_glTranslatef),
(&::gpu::GLBackend::do_glLoadMatrixf),
(&::gpu::GLBackend::do_glLoadIdentity),
(&::gpu::GLBackend::do_glRotatef),
(&::gpu::GLBackend::do_glScalef),
(&::gpu::GLBackend::do_glTranslatef),
(&::gpu::GLBackend::do_glDrawArrays),
(&::gpu::GLBackend::do_glDrawArrays),
(&::gpu::GLBackend::do_glDrawRangeElements),
(&::gpu::GLBackend::do_glColorPointer),
(&::gpu::GLBackend::do_glNormalPointer),
(&::gpu::GLBackend::do_glTexCoordPointer),
(&::gpu::GLBackend::do_glColorPointer),
(&::gpu::GLBackend::do_glNormalPointer),
(&::gpu::GLBackend::do_glTexCoordPointer),
(&::gpu::GLBackend::do_glVertexPointer),
(&::gpu::GLBackend::do_glVertexAttribPointer),
@ -77,7 +77,7 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glColor4f),
(&::gpu::GLBackend::do_glMaterialf),
(&::gpu::GLBackend::do_glMaterialf),
(&::gpu::GLBackend::do_glMaterialfv),
};
@ -112,9 +112,8 @@ static const GLenum _elementTypeToGLType[NUM_TYPES]= {
GLBackend::GLBackend() :
_inputFormat(0),
_inputAttributeActivation(0),
_needInputFormatUpdate(true),
_inputFormat(0),
_inputBuffersState(0),
_inputBuffers(_inputBuffersState.size(), BufferPointer(0)),
@ -122,7 +121,8 @@ GLBackend::GLBackend() :
_inputBufferStrides(_inputBuffersState.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0)
_indexBufferOffset(0),
_inputAttributeActivation(0)
{
}
@ -138,7 +138,7 @@ void GLBackend::renderBatch(Batch& batch) {
GLBackend backend;
for (int i = 0; i < numCommands; i++) {
for (unsigned int i = 0; i < numCommands; i++) {
CommandCall call = _commandCalls[(*command)];
(backend.*(call))(batch, *offset);
command++;
@ -203,7 +203,7 @@ void GLBackend::do_drawIndexed(Batch& batch, uint32 paramOffset) {
GLenum glType = _elementTypeToGLType[_indexBufferType];
glDrawElements(mode, numIndices, glType, (GLvoid*)(startIndex + _indexBufferOffset));
glDrawElements(mode, numIndices, glType, reinterpret_cast<GLvoid*>(startIndex + _indexBufferOffset));
CHECK_GL_ERROR();
}
@ -265,7 +265,7 @@ void GLBackend::updateInput() {
}
// Manage Activation what was and what is expected now
for (int i = 0; i < newActivation.size(); i++) {
for (unsigned int i = 0; i < newActivation.size(); i++) {
bool newState = newActivation[i];
if (newState != _inputAttributeActivation[i]) {
#if defined(SUPPORT_LEGACY_OPENGL)
@ -314,7 +314,7 @@ void GLBackend::updateInput() {
CHECK_GL_ERROR();
_inputBuffersState[bufferNum] = false;
for (int i = 0; i < channel._slots.size(); i++) {
for (unsigned int i = 0; i < channel._slots.size(); i++) {
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
GLuint slot = attrib._slot;
GLuint count = attrib._element.getDimensionCount();
@ -325,16 +325,16 @@ void GLBackend::updateInput() {
if (slot < NUM_CLASSIC_ATTRIBS) {
switch (slot) {
case Stream::POSITION:
glVertexPointer(count, type, stride, (GLvoid*)pointer);
glVertexPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
break;
case Stream::NORMAL:
glNormalPointer(type, stride, (GLvoid*)pointer);
glNormalPointer(type, stride, reinterpret_cast<GLvoid*>(pointer));
break;
case Stream::COLOR:
glColorPointer(count, type, stride, (GLvoid*)pointer);
glColorPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
break;
case Stream::TEXCOORD:
glTexCoordPointer(count, type, stride, (GLvoid*)pointer);
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
break;
};
} else {
@ -342,7 +342,8 @@ void GLBackend::updateInput() {
{
#endif
GLboolean isNormalized = attrib._element.isNormalized();
glVertexAttribPointer(slot, count, type, isNormalized, stride, (GLvoid*)pointer);
glVertexAttribPointer(slot, count, type, isNormalized, stride,
reinterpret_cast<GLvoid*>(pointer));
}
CHECK_GL_ERROR();
}

View file

@ -8,6 +8,8 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Context.h"
#include "Resource.h"
#include <QDebug>

View file

@ -479,9 +479,7 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
float bestDistance = std::numeric_limits<float>::max();
float distanceToSubMesh;
BoxFace subMeshFace;
BoxFace bestSubMeshFace;
int subMeshIndex = 0;
int bestSubMeshIndex = -1;
// If we hit the models box, then consider the submeshes...
foreach(const AABox& subMeshBox, _calculatedMeshBoxes) {
@ -489,10 +487,9 @@ bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const g
if (subMeshBox.findRayIntersection(origin, direction, distanceToSubMesh, subMeshFace)) {
if (distanceToSubMesh < bestDistance) {
bestSubMeshIndex = subMeshIndex;
bestDistance = distanceToSubMesh;
bestSubMeshFace = subMeshFace;
intersectedSomething = true;
face = subMeshFace;
extraInfo = geometry.getModelNameOfMesh(subMeshIndex);
}
}

View file

@ -52,9 +52,12 @@ JoystickScriptingInterface::JoystickScriptingInterface() :
for (int i = 0; i < joystickCount; i++) {
SDL_GameController* controller = SDL_GameControllerOpen(i);
SDL_JoystickID id = getInstanceId(controller);
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
if (controller) {
SDL_JoystickID id = getInstanceId(controller);
Joystick* joystick = new Joystick(id, SDL_GameControllerName(controller), controller);
_openJoysticks[id] = joystick;
}
}
_isInitialized = true;

View file

@ -0,0 +1,68 @@
//
// WebWindowClass.cpp
// interface/src/scripting
//
// Created by Ryan Huffman on 11/06/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QVBoxLayout>
#include <QApplication>
#include <QWebFrame>
#include <QWebView>
#include "WindowScriptingInterface.h"
#include "WebWindowClass.h"
ScriptEventBridge::ScriptEventBridge(QObject* parent) : QObject(parent) {
}
void ScriptEventBridge::emitWebEvent(const QString& data) {
emit webEventReceived(data);
}
void ScriptEventBridge::emitScriptEvent(const QString& data) {
emit scriptEventReceived(data);
}
WebWindowClass::WebWindowClass(const QString& url, int width, int height)
: QObject(NULL),
_window(new QWidget(NULL, Qt::Tool)),
_eventBridge(new ScriptEventBridge(this)) {
QWebView* webView = new QWebView(_window);
webView->page()->mainFrame()->addToJavaScriptWindowObject("EventBridge", _eventBridge);
webView->setUrl(url);
QVBoxLayout* layout = new QVBoxLayout(_window);
_window->setLayout(layout);
layout->addWidget(webView);
layout->setSpacing(0);
layout->setContentsMargins(0, 0, 0, 0);
_window->setGeometry(0, 0, width, height);
connect(this, &WebWindowClass::destroyed, _window, &QWidget::deleteLater);
}
WebWindowClass::~WebWindowClass() {
}
void WebWindowClass::setVisible(bool visible) {
QMetaObject::invokeMethod(_window, "setVisible", Qt::BlockingQueuedConnection, Q_ARG(bool, visible));
}
QScriptValue WebWindowClass::constructor(QScriptContext* context, QScriptEngine* engine) {
WebWindowClass* retVal;
QString file = context->argument(0).toString();
QMetaObject::invokeMethod(WindowScriptingInterface::getInstance(), "doCreateWebWindow", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(WebWindowClass*, retVal),
Q_ARG(const QString&, file),
Q_ARG(int, context->argument(1).toInteger()),
Q_ARG(int, context->argument(2).toInteger()));
connect(engine, &QScriptEngine::destroyed, retVal, &WebWindowClass::deleteLater);
return engine->newQObject(retVal);
}

View file

@ -0,0 +1,51 @@
//
// WebWindowClass.h
// interface/src/scripting
//
// Created by Ryan Huffman on 11/06/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_WebWindowClass_h
#define hifi_WebWindowClass_h
#include <QScriptContext>
#include <QScriptEngine>
class ScriptEventBridge : public QObject {
Q_OBJECT
public:
ScriptEventBridge(QObject* parent = NULL);
public slots:
void emitWebEvent(const QString& data);
void emitScriptEvent(const QString& data);
signals:
void webEventReceived(const QString& data);
void scriptEventReceived(const QString& data);
};
class WebWindowClass : public QObject {
Q_OBJECT
Q_PROPERTY(QObject* eventBridge READ getEventBridge)
public:
WebWindowClass(const QString& url, int width, int height);
~WebWindowClass();
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
public slots:
void setVisible(bool visible);
ScriptEventBridge* getEventBridge() const { return _eventBridge; }
private:
QWidget* _window;
ScriptEventBridge* _eventBridge;
};
#endif

View file

@ -34,6 +34,10 @@ WindowScriptingInterface::WindowScriptingInterface() :
{
}
WebWindowClass* WindowScriptingInterface::doCreateWebWindow(const QString& url, int width, int height) {
return new WebWindowClass(url, width, height);
}
QScriptValue WindowScriptingInterface::hasFocus() {
return Application::getInstance()->getGLWidget()->hasFocus();
}

View file

@ -15,6 +15,11 @@
#include <QObject>
#include <QScriptValue>
#include <QString>
#include <QFileDialog>
#include <QComboBox>
#include <QLineEdit>
#include "WebWindowClass.h"
class WindowScriptingInterface : public QObject {
Q_OBJECT
@ -72,6 +77,8 @@ private slots:
void nonBlockingFormAccepted() { _nonBlockingFormActive = false; _formResult = QDialog::Accepted; emit nonBlockingFormClosed(); }
void nonBlockingFormRejected() { _nonBlockingFormActive = false; _formResult = QDialog::Rejected; emit nonBlockingFormClosed(); }
WebWindowClass* doCreateWebWindow(const QString& url, int width, int height);
private:
WindowScriptingInterface();

View file

@ -984,7 +984,9 @@ void ApplicationOverlay::renderAudioMeter() {
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_INSET + AUDIO_METER_GAP;
int audioMeterY;
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
bool boxed = Menu::getInstance()->isOptionChecked(MenuOption::Mirror) &&
!Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
if (boxed) {
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_GAP + MUTE_ICON_PADDING;
} else {
audioMeterY = AUDIO_METER_GAP + MUTE_ICON_PADDING;
@ -1022,9 +1024,7 @@ void ApplicationOverlay::renderAudioMeter() {
renderCollisionOverlay(glWidget->width(), glWidget->height(), magnitude, 1.0f);
}
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP,
audioMeterY,
Menu::getInstance()->isOptionChecked(MenuOption::Mirror));
audio->renderToolBox(MIRROR_VIEW_LEFT_PADDING + AUDIO_METER_GAP, audioMeterY, boxed);
audio->renderScope(glWidget->width(), glWidget->height());

View file

@ -156,6 +156,9 @@ void RearMirrorTools::displayIcon(QRect bounds, QRect iconBounds, GLuint texture
}
glEnd();
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}

View file

@ -129,7 +129,7 @@ int TextRenderer::draw(int x, int y, const char* str) {
leftBottom.x, rightTop.y, ls, tt, };
const int NUM_COLOR_SCALARS_PER_GLYPH = 4;
unsigned int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
int colorBuffer[NUM_COLOR_SCALARS_PER_GLYPH] = { compactColor, compactColor, compactColor, compactColor };
gpu::Buffer::Size offset = sizeof(vertexBuffer) * _numGlyphsBatched;
gpu::Buffer::Size colorOffset = sizeof(colorBuffer) * _numGlyphsBatched;
@ -181,9 +181,9 @@ TextRenderer::TextRenderer(const Properties& properties) :
_color(properties.color),
_glyphsBuffer(new gpu::Buffer()),
_glyphsColorBuffer(new gpu::Buffer()),
_numGlyphsBatched(0),
_glyphsStreamFormat(new gpu::Stream::Format()),
_glyphsStream(new gpu::BufferStream())
_glyphsStream(new gpu::BufferStream()),
_numGlyphsBatched(0)
{
_glyphsStreamFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::POS_XYZ), 0);
const int NUM_POS_COORDS = 2;

View file

@ -0,0 +1,118 @@
//
// Grid3DOverlay.cpp
// interface/src/ui/overlays
//
// Created by Ryan Huffman on 11/06/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Grid3DOverlay.h"
#include "Application.h"
ProgramObject Grid3DOverlay::_gridProgram;
Grid3DOverlay::Grid3DOverlay() : Base3DOverlay(),
_minorGridWidth(1.0),
_majorGridEvery(5) {
}
Grid3DOverlay::~Grid3DOverlay() {
}
void Grid3DOverlay::render(RenderArgs* args) {
if (!_visible) {
return; // do nothing if we're not visible
}
if (!_gridProgram.isLinked()) {
if (!_gridProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + "shaders/grid.frag")) {
qDebug() << "Failed to compile: " + _gridProgram.log();
return;
}
if (!_gridProgram.link()) {
qDebug() << "Failed to link: " + _gridProgram.log();
return;
}
}
// Render code largely taken from MetavoxelEditor::render()
glDisable(GL_LIGHTING);
glDepthMask(GL_FALSE);
glPushMatrix();
glm::quat rotation = getRotation();
glm::vec3 axis = glm::axis(rotation);
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
glLineWidth(1.5f);
// center the grid around the camera position on the plane
glm::vec3 rotated = glm::inverse(rotation) * Application::getInstance()->getCamera()->getPosition();
float spacing = _minorGridWidth;
float alpha = getAlpha();
xColor color = getColor();
glm::vec3 position = getPosition();
const int GRID_DIVISIONS = 300;
const float MAX_COLOR = 255.0f;
float scale = GRID_DIVISIONS * spacing;
glColor4f(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
_gridProgram.bind();
// Minor grid
glPushMatrix();
{
glTranslatef(_minorGridWidth * (floorf(rotated.x / spacing) - GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - GRID_DIVISIONS / 2), position.z);
glScalef(scale, scale, scale);
Application::getInstance()->getGeometryCache()->renderGrid(GRID_DIVISIONS, GRID_DIVISIONS);
}
glPopMatrix();
// Major grid
glPushMatrix();
{
glLineWidth(4.0f);
spacing *= _majorGridEvery;
glTranslatef(spacing * (floorf(rotated.x / spacing) - GRID_DIVISIONS / 2),
spacing * (floorf(rotated.y / spacing) - GRID_DIVISIONS / 2), position.z);
scale *= _majorGridEvery;
glScalef(scale, scale, scale);
Application::getInstance()->getGeometryCache()->renderGrid(GRID_DIVISIONS, GRID_DIVISIONS);
}
glPopMatrix();
_gridProgram.release();
glPopMatrix();
glEnable(GL_LIGHTING);
glDepthMask(GL_TRUE);
}
void Grid3DOverlay::setProperties(const QScriptValue& properties) {
Base3DOverlay::setProperties(properties);
if (properties.property("minorGridWidth").isValid()) {
_minorGridWidth = properties.property("minorGridWidth").toVariant().toFloat();
}
if (properties.property("majorGridEvery").isValid()) {
_majorGridEvery = properties.property("majorGridEvery").toVariant().toInt();
}
}

View file

@ -0,0 +1,44 @@
//
// Grid3DOverlay.h
// interface/src/ui/overlays
//
// Created by Ryan Huffman on 11/06/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_Grid3DOverlay_h
#define hifi_Grid3DOverlay_h
// include this before QGLWidget, which includes an earlier version of OpenGL
#include "InterfaceConfig.h"
#include <glm/glm.hpp>
#include <QGLWidget>
#include <SharedUtil.h>
#include "Base3DOverlay.h"
#include "renderer/ProgramObject.h"
class Grid3DOverlay : public Base3DOverlay {
Q_OBJECT
public:
Grid3DOverlay();
~Grid3DOverlay();
virtual void render(RenderArgs* args);
virtual void setProperties(const QScriptValue& properties);
private:
float _minorGridWidth;
int _majorGridEvery;
static ProgramObject _gridProgram;
};
#endif // hifi_Grid3DOverlay_h

View file

@ -23,6 +23,7 @@
#include "Overlays.h"
#include "Rectangle3DOverlay.h"
#include "Sphere3DOverlay.h"
#include "Grid3DOverlay.h"
#include "TextOverlay.h"
#include "Text3DOverlay.h"
@ -155,6 +156,8 @@ unsigned int Overlays::addOverlay(const QString& type, const QScriptValue& prope
thisOverlay = new Rectangle3DOverlay();
} else if (type == "line3d") {
thisOverlay = new Line3DOverlay();
} else if (type == "grid") {
thisOverlay = new Grid3DOverlay();
} else if (type == "localvoxels") {
thisOverlay = new LocalVoxelsOverlay();
} else if (type == "localmodels") {

View file

@ -297,7 +297,7 @@ void Player::play() {
_injector->setOptions(_options);
}
void Player::setCurrentFrame(unsigned int currentFrame) {
void Player::setCurrentFrame(int currentFrame) {
if (_recording && currentFrame >= _recording->getFrameNumber()) {
stopPlaying();
return;
@ -314,7 +314,7 @@ void Player::setCurrentFrame(unsigned int currentFrame) {
}
}
void Player::setCurrentTime(unsigned int currentTime) {
void Player::setCurrentTime(int currentTime) {
if (currentTime >= _recording->getLength()) {
stopPlaying();
return;
@ -393,7 +393,7 @@ bool Player::computeCurrentFrame() {
_currentFrame = 0;
}
quint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
qint64 elapsed = glm::clamp(Player::elapsed() - _audioOffset, (qint64)0, (qint64)_recording->getLength());
while(_currentFrame >= 0 &&
_recording->getFrameTimestamp(_currentFrame) > elapsed) {
--_currentFrame;

View file

@ -44,8 +44,8 @@ public slots:
void loadRecording(RecordingPointer recording);
void play();
void setCurrentFrame(unsigned int currentFrame);
void setCurrentTime(unsigned int currentTime);
void setCurrentFrame(int currentFrame);
void setCurrentTime(int currentTime);
void setVolume(float volume);
void setAudioOffset(int audioOffset);
@ -87,4 +87,4 @@ private:
bool _useSkeletonURL;
};
#endif // hifi_Player_h
#endif // hifi_Player_h

View file

@ -145,14 +145,14 @@ public:
float getLargestDimension() const { return glm::length(_dimensions); } /// get the largest possible dimension
/// set dimensions in domain scale units (0.0 - 1.0) this will also reset radius appropriately
void setDimensions(const glm::vec3& value) { _dimensions = value; ; recalculateCollisionShape(); }
void setDimensions(const glm::vec3& value) { _dimensions = value; recalculateCollisionShape(); }
/// set dimensions in meter units (0.0 - TREE_SCALE) this will also reset radius appropriately
void setDimensionsInMeters(const glm::vec3& value) { setDimensions(value / (float) TREE_SCALE); }
static const glm::quat DEFAULT_ROTATION;
const glm::quat& getRotation() const { return _rotation; }
void setRotation(const glm::quat& rotation) { _rotation = rotation; ; recalculateCollisionShape(); }
void setRotation(const glm::quat& rotation) { _rotation = rotation; recalculateCollisionShape(); }
static const float DEFAULT_GLOW_LEVEL;
float getGlowLevel() const { return _glowLevel; }
@ -169,7 +169,7 @@ public:
static const glm::vec3 DEFAULT_VELOCITY;
static const glm::vec3 NO_VELOCITY;
static const float EPSILON_VELOCITY_LENGTH;
const glm::vec3 getVelocity() const { return _velocity; } /// velocity in domain scale units (0.0-1.0) per second
const glm::vec3& getVelocity() const { return _velocity; } /// velocity in domain scale units (0.0-1.0) per second
glm::vec3 getVelocityInMeters() const { return _velocity * (float) TREE_SCALE; } /// get velocity in meters
void setVelocity(const glm::vec3& value) { _velocity = value; } /// velocity in domain scale units (0.0-1.0) per second
void setVelocityInMeters(const glm::vec3& value) { _velocity = value / (float) TREE_SCALE; } /// velocity in meters

View file

@ -124,6 +124,7 @@ bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProp
} else {
// check to see if we need to simulate this entity...
EntityItem::SimulationState oldState = existingEntity->getSimulationState();
QString entityScriptBefore = existingEntity->getScript();
UpdateEntityOperator theOperator(this, containingElement, existingEntity, properties);
recurseTreeWithOperator(&theOperator);
@ -131,6 +132,12 @@ bool EntityTree::updateEntity(const EntityItemID& entityID, const EntityItemProp
EntityItem::SimulationState newState = existingEntity->getSimulationState();
changeEntityState(existingEntity, oldState, newState);
QString entityScriptAfter = existingEntity->getScript();
if (entityScriptBefore != entityScriptAfter) {
emitEntityScriptChanging(entityID); // the entity script has changed
}
}
containingElement = getContainingElement(entityID);
@ -168,6 +175,7 @@ EntityItem* EntityTree::addEntity(const EntityItemID& entityID, const EntityItem
if (result) {
// this does the actual adding of the entity
addEntityItem(result);
emitAddingEntity(entityID);
}
return result;
}
@ -184,6 +192,14 @@ void EntityTree::trackDeletedEntity(const EntityItemID& entityID) {
}
}
void EntityTree::emitAddingEntity(const EntityItemID& entityItemID) {
emit addingEntity(entityItemID);
}
void EntityTree::emitEntityScriptChanging(const EntityItemID& entityItemID) {
emit entityScriptChanging(entityItemID);
}
void EntityTree::deleteEntity(const EntityItemID& entityID) {
emit deletingEntity(entityID);
@ -290,6 +306,7 @@ void EntityTree::handleAddEntityResponse(const QByteArray& packet) {
EntityItemID creatorTokenVersion = searchEntityID.convertToCreatorTokenVersion();
EntityItemID knownIDVersion = searchEntityID.convertToKnownIDVersion();
// First look for and find the "viewed version" of this entity... it's possible we got
// the known ID version sent to us between us creating our local version, and getting this
// remapping message. If this happened, we actually want to find and delete that version of
@ -310,6 +327,10 @@ void EntityTree::handleAddEntityResponse(const QByteArray& packet) {
creatorTokenContainingElement->updateEntityItemID(creatorTokenVersion, knownIDVersion);
setContainingElement(creatorTokenVersion, NULL);
setContainingElement(knownIDVersion, creatorTokenContainingElement);
// because the ID of the entity is switching, we need to emit these signals for any
// listeners who care about the changing of IDs
emit changingEntityID(creatorTokenVersion, knownIDVersion);
}
}
unlock();
@ -981,7 +1002,6 @@ int EntityTree::processEraseMessageDetails(const QByteArray& dataByteArray, cons
return processedBytes;
}
EntityTreeElement* EntityTree::getContainingElement(const EntityItemID& entityItemID) /*const*/ {
// TODO: do we need to make this thread safe? Or is it acceptable as is
if (_entityToElementMap.contains(entityItemID)) {

View file

@ -140,10 +140,16 @@ public:
void trackDeletedEntity(const EntityItemID& entityID);
void emitAddingEntity(const EntityItemID& entityItemID);
void emitEntityScriptChanging(const EntityItemID& entityItemID);
QList<EntityItem*>& getMovingEntities() { return _movingEntities; }
signals:
void deletingEntity(const EntityItemID& entityID);
void addingEntity(const EntityItemID& entityID);
void entityScriptChanging(const EntityItemID& entityItemID);
void changingEntityID(const EntityItemID& oldEntityID, const EntityItemID& newEntityID);
private:

View file

@ -726,7 +726,7 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
entityItemID = EntityItemID::readEntityItemIDFromBuffer(dataAt, bytesLeftToRead);
entityItem = _myTree->findEntityByEntityItemID(entityItemID);
}
// If the item already exists in our tree, we want do the following...
// 1) allow the existing item to read from the databuffer
// 2) check to see if after reading the item, the containing element is still correct, fix it if needed
@ -734,10 +734,13 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
// TODO: Do we need to also do this?
// 3) remember the old cube for the entity so we can mark it as dirty
if (entityItem) {
QString entityScriptBefore = entityItem->getScript();
bool bestFitBefore = bestFitEntityBounds(entityItem);
EntityTreeElement* currentContainingElement = _myTree->getContainingElement(entityItemID);
EntityItem::SimulationState oldState = entityItem->getSimulationState();
bytesForThisEntity = entityItem->readEntityDataFromBuffer(dataAt, bytesLeftToRead, args);
EntityItem::SimulationState newState = entityItem->getSimulationState();
if (oldState != newState) {
_myTree->changeEntityState(entityItem, oldState, newState);
@ -755,6 +758,12 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
}
}
}
QString entityScriptAfter = entityItem->getScript();
if (entityScriptBefore != entityScriptAfter) {
_myTree->emitEntityScriptChanging(entityItemID); // the entity script has changed
}
} else {
entityItem = EntityTypes::constructEntityItem(dataAt, bytesLeftToRead, args);
if (entityItem) {
@ -762,6 +771,7 @@ int EntityTreeElement::readElementDataFromBuffer(const unsigned char* data, int
addEntityItem(entityItem); // add this new entity to this elements entities
entityItemID = entityItem->getEntityItemID();
_myTree->setContainingElement(entityItemID, this);
_myTree->emitAddingEntity(entityItemID); // we just added an entity
EntityItem::SimulationState newState = entityItem->getSimulationState();
_myTree->changeEntityState(entityItem, EntityItem::Static, newState);
}

View file

@ -209,8 +209,15 @@ bool LimitedNodeList::packetVersionAndHashMatch(const QByteArray& packet) {
if (hashFromPacketHeader(packet) == hashForPacketAndConnectionUUID(packet, sendingNode->getConnectionSecret())) {
return true;
} else {
qDebug() << "Packet hash mismatch on" << checkType << "- Sender"
static QMultiMap<QUuid, PacketType> hashDebugSuppressMap;
QUuid senderUUID = uuidFromPacketHeader(packet);
if (!hashDebugSuppressMap.contains(senderUUID, checkType)) {
qDebug() << "Packet hash mismatch on" << checkType << "- Sender"
<< uuidFromPacketHeader(packet);
hashDebugSuppressMap.insert(senderUUID, checkType);
}
}
} else {
static QString repeatedMessage

View file

@ -311,6 +311,11 @@ QScriptValue ScriptEngine::registerGlobalObject(const QString& name, QObject* ob
return QScriptValue::NullValue;
}
void ScriptEngine::registerFunction(const QString& name, QScriptEngine::FunctionSignature fun, int numArguments) {
QScriptValue scriptFun = newFunction(fun, numArguments);
globalObject().setProperty(name, scriptFun);
}
void ScriptEngine::registerGetterSetter(const QString& name, QScriptEngine::FunctionSignature getter,
QScriptEngine::FunctionSignature setter, QScriptValue object) {
QScriptValue setterFunction = newFunction(setter, 1);
@ -625,7 +630,7 @@ void ScriptEngine::stopTimer(QTimer *timer) {
}
}
QUrl ScriptEngine::resolveInclude(const QString& include) const {
QUrl ScriptEngine::resolvePath(const QString& include) const {
// first lets check to see if it's already a full URL
QUrl url(include);
if (!url.scheme().isEmpty()) {
@ -651,7 +656,7 @@ void ScriptEngine::print(const QString& message) {
}
void ScriptEngine::include(const QString& includeFile) {
QUrl url = resolveInclude(includeFile);
QUrl url = resolvePath(includeFile);
QString includeContents;
if (url.scheme() == "http" || url.scheme() == "https" || url.scheme() == "ftp") {
@ -689,7 +694,7 @@ void ScriptEngine::include(const QString& includeFile) {
}
void ScriptEngine::load(const QString& loadFile) {
QUrl url = resolveInclude(loadFile);
QUrl url = resolvePath(loadFile);
emit loadScript(url.toString(), false);
}

View file

@ -65,6 +65,7 @@ public:
QScriptValue registerGlobalObject(const QString& name, QObject* object); /// registers a global object by name
void registerGetterSetter(const QString& name, QScriptEngine::FunctionSignature getter,
QScriptEngine::FunctionSignature setter, QScriptValue object = QScriptValue::NullValue);
void registerFunction(const QString& name, QScriptEngine::FunctionSignature fun, int numArguments = -1);
Q_INVOKABLE void setIsAvatar(bool isAvatar);
bool isAvatar() const { return _isAvatar; }
@ -103,6 +104,7 @@ public slots:
void include(const QString& includeFile);
void load(const QString& loadfile);
void print(const QString& message);
QUrl resolvePath(const QString& path) const;
void nodeKilled(SharedNodePointer node);
@ -131,7 +133,6 @@ protected:
int _numAvatarSoundSentBytes;
private:
QUrl resolveInclude(const QString& include) const;
void sendAvatarIdentityPacket();
void sendAvatarBillboardPacket();