Merge branch 'master' of https://github.com/highfidelity/hifi into moreLightPropertiesAttempt2

This commit is contained in:
ZappoMan 2014-10-28 10:50:02 -07:00
commit 7d2ef7923b
109 changed files with 4596 additions and 1200 deletions
assignment-client/src
cmake
domain-server/resources
examples
interface
libraries

View file

@ -428,8 +428,8 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
}
int AudioMixer::prepareMixForListeningNode(Node* node) {
AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerNodeData = (AudioMixerClientData*)node->getLinkedData();
AvatarAudioStream* nodeAudioStream = static_cast<AudioMixerClientData*>(node->getLinkedData())->getAvatarAudioStream();
AudioMixerClientData* listenerNodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
// zero out the client mix for this node
memset(_preMixSamples, 0, sizeof(_preMixSamples));
@ -730,6 +730,33 @@ void AudioMixer::run() {
memcpy(dataAt, &sequence, sizeof(quint16));
dataAt += sizeof(quint16);
// Pack stream properties
bool inAZone = false;
for (int i = 0; i < _zoneReverbSettings.size(); ++i) {
AudioMixerClientData* data = static_cast<AudioMixerClientData*>(node->getLinkedData());
glm::vec3 streamPosition = data->getAvatarAudioStream()->getPosition();
if (_audioZones[_zoneReverbSettings[i].zone].contains(streamPosition)) {
bool hasReverb = true;
float reverbTime = _zoneReverbSettings[i].reverbTime;
float wetLevel = _zoneReverbSettings[i].wetLevel;
memcpy(dataAt, &hasReverb, sizeof(bool));
dataAt += sizeof(bool);
memcpy(dataAt, &reverbTime, sizeof(float));
dataAt += sizeof(float);
memcpy(dataAt, &wetLevel, sizeof(float));
dataAt += sizeof(float);
inAZone = true;
break;
}
}
if (!inAZone) {
bool hasReverb = false;
memcpy(dataAt, &hasReverb, sizeof(bool));
dataAt += sizeof(bool);
}
// pack mixed audio samples
memcpy(dataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
@ -1033,6 +1060,38 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
}
}
}
const QString REVERB = "reverb";
if (audioEnvGroupObject[REVERB].isArray()) {
const QJsonArray& reverb = audioEnvGroupObject[REVERB].toArray();
const QString ZONE = "zone";
const QString REVERB_TIME = "reverb_time";
const QString WET_LEVEL = "wet_level";
for (int i = 0; i < reverb.count(); ++i) {
QJsonObject reverbObject = reverb[i].toObject();
if (reverbObject.contains(ZONE) &&
reverbObject.contains(REVERB_TIME) &&
reverbObject.contains(WET_LEVEL)) {
bool okReverbTime, okWetLevel;
QString zone = reverbObject.value(ZONE).toString();
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
ReverbSettings settings;
settings.zone = zone;
settings.reverbTime = reverbTime;
settings.wetLevel = wetLevel;
_zoneReverbSettings.push_back(settings);
qDebug() << "Added Reverb:" << zone << reverbTime << wetLevel;
}
}
}
}
}
}

View file

@ -82,7 +82,13 @@ private:
float coefficient;
};
QVector<ZonesSettings> _zonesSettings;
struct ReverbSettings {
QString zone;
float reverbTime;
float wetLevel;
};
QVector<ReverbSettings> _zoneReverbSettings;
static InboundAudioStream::Settings _streamSettings;
static bool _printStreamStats;

View file

@ -237,8 +237,7 @@ void MetavoxelSession::update() {
// go back to the beginning with the current packet and note that there's a delta pending
_sequencer.getOutputStream().getUnderlying().device()->seek(start);
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID, sendRecord->getPacketNumber(),
_sequencer.getIncomingPacketNumber() };
MetavoxelDeltaPendingMessage msg = { ++_reliableDeltaID, sendRecord->getPacketNumber(), _lodPacketNumber };
out << (_reliableDeltaMessage = QVariant::fromValue(msg));
_sequencer.endPacket();
@ -265,7 +264,8 @@ void MetavoxelSession::handleMessage(const QVariant& message) {
if (userType == ClientStateMessage::Type) {
ClientStateMessage state = message.value<ClientStateMessage>();
_lod = state.lod;
_lodPacketNumber = _sequencer.getIncomingPacketNumber();
} else if (userType == MetavoxelEditMessage::Type) {
QMetaObject::invokeMethod(_sender->getServer(), "applyEdit", Q_ARG(const MetavoxelEditMessage&,
message.value<MetavoxelEditMessage>()));

View file

@ -127,6 +127,7 @@ private:
MetavoxelSender* _sender;
MetavoxelLOD _lod;
int _lodPacketNumber;
ReliableChannel* _reliableDeltaChannel;
int _reliableDeltaReceivedOffset;

View file

@ -12,7 +12,7 @@ macro(SETUP_HIFI_LIBRARY)
project(${TARGET_NAME})
# grab the implemenation and header files
file(GLOB LIB_SRCS src/*.h src/*.cpp)
file(GLOB_RECURSE LIB_SRCS "src/*.h" "src/*.cpp")
set(LIB_SRCS ${LIB_SRCS})
# create a library and set the property so it can be referenced later

View file

@ -16,8 +16,11 @@
#
if (WIN32)
find_library(ATL_LIBRARY_RELEASE atls PATH_SUFFIXES "7600.16385.1/lib/ATL/i386" HINTS "C:\\WinDDK")
find_library(ATL_LIBRARY_DEBUG atlsd PATH_SUFFIXES "7600.16385.1/lib/ATL/i386" HINTS "C:\\WinDDK")
set(ATL_SEARCH_DIRS "C:\\WinDDK")
find_path(ATL_INCLUDE_DIRS atlbase.h PATH_SUFFIXES "7600.16385.1/inc/atl71" HINTS ${ATL_SEARCH_DIRS})
find_library(ATL_LIBRARY_RELEASE atls PATH_SUFFIXES "7600.16385.1/lib/ATL/i386" HINTS ${ATL_SEARCH_DIRS})
find_library(ATL_LIBRARY_DEBUG atlsd PATH_SUFFIXES "7600.16385.1/lib/ATL/i386" HINTS ${ATL_SEARCH_DIRS})
include(SelectLibraryConfigurations)
select_library_configurations(ATL)
@ -26,4 +29,4 @@ endif ()
set(ATL_LIBRARIES "${ATL_LIBRARY}")
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(ATL DEFAULT_MSG ATL_LIBRARIES)
find_package_handle_standard_args(ATL DEFAULT_MSG ATL_INCLUDE_DIRS ATL_LIBRARIES)

View file

@ -0,0 +1,39 @@
# FindGVerb.cmake
#
# Try to find the Gverb library.
#
# You must provide a GVERB_ROOT_DIR which contains src and include directories
#
# Once done this will define
#
# GVERB_FOUND - system found Gverb
# GVERB_INCLUDE_DIRS - the Gverb include directory
#
# Copyright 2014 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (GVERB_INCLUDE_DIRS)
# in cache already
set(GVERB_FOUND TRUE)
else (GVERB_INCLUDE_DIRS)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("gverb")
find_path(GVERB_INCLUDE_DIRS gverb.h PATH_SUFFIXES include HINTS ${GVERB_SEARCH_DIRS})
find_path(GVERB_SRC_DIRS gverb.c PATH_SUFFIXES src HINTS ${GVERB_SEARCH_DIRS})
if (GVERB_INCLUDE_DIRS)
set(GVERB_FOUND TRUE)
endif (GVERB_INCLUDE_DIRS)
if (GVERB_FOUND)
message(STATUS "Found Gverb: ${GVERB_INCLUDE_DIRS}")
else (GVERB_FOUND)
message(FATAL_ERROR "Could NOT find Gverb. Read ./interface/externals/gverb/readme.txt")
endif (GVERB_FOUND)
endif(GVERB_INCLUDE_DIRS)

View file

@ -0,0 +1,49 @@
#
# FindNSIGHT.cmake
#
# Try to find NSIGHT NvToolsExt library and include path.
# Once done this will define
#
# NSIGHT_FOUND
# NSIGHT_INCLUDE_DIRS
# NSIGHT_LIBRARIES
#
# Created on 10/27/2014 by Sam Gateau
# Copyright 2014 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
if (WIN32)
find_path(NSIGHT_INCLUDE_DIRS
NAMES
nvToolsExt.h
PATH_SUFFIXES
include
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
find_library(NSIGHT_LIBRARY_RELEASE nvToolsExt32_1
PATH_SUFFIXES
"lib/Win32" "lib"
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
find_library(NSIGHT_LIBRARY_DEBUG nvToolsExt32_1
PATH_SUFFIXES
"lib/Win32" "lib"
PATHS
"C:/Program Files/NVIDIA Corporation/NvToolsExt")
include(SelectLibraryConfigurations)
select_library_configurations(NSIGHT)
endif ()
set(NSIGHT_LIBRARIES "${NSIGHT_LIBRARY}")
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(NSIGHT DEFAULT_MSG NSIGHT_INCLUDE_DIRS NSIGHT_LIBRARIES)
mark_as_advanced(NSIGHT_INCLUDE_DIRS NSIGHT_LIBRARIES NSIGHT_SEARCH_DIRS)

View file

@ -154,6 +154,33 @@
"placeholder": "0.18"
}
]
},
{
"name": "reverb",
"type": "table",
"label": "Reverb Settings",
"help": "In this table you can set custom reverb values for each audio zones",
"numbered": true,
"columns": [
{
"name": "zone",
"label": "Zone",
"can_set": true,
"placeholder": "Audio_Zone"
},
{
"name": "reverb_time",
"label": "Reverb Decay Time",
"can_set": true,
"placeholder": "(in sec)"
},
{
"name": "wet_level",
"label": "Wet Level",
"can_set": true,
"placeholder": "(in db)"
}
]
}
]
},

View file

@ -529,7 +529,7 @@ function deleteTableRow(delete_glyphicon) {
row.html("<input type='hidden' class='form-control' name='"
+ row.attr('name') + "' data-changed='true' value=''>");
} else {
if (table.find('.' + Settings.DATA_ROW_CLASS).length) {
if (table.find('.' + Settings.DATA_ROW_CLASS).length > 1) {
updateDataChangedForSiblingRows(row)
// this isn't the last row - we can just remove it

39
examples/audioReverbOn.js Normal file
View file

@ -0,0 +1,39 @@
//
// audioReverbOn.js
// examples
//
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// http://wiki.audacityteam.org/wiki/GVerb#Instant_reverb_settings
var audioOptions = new AudioEffectOptions({
// Square Meters
maxRoomSize: 50,
roomSize: 50,
// Seconds
reverbTime: 4,
// Between 0 - 1
damping: 0.50,
inputBandwidth: 0.75,
// dB
earlyLevel: -22,
tailLevel: -28,
dryLevel: 0,
wetLevel: 6
});
AudioDevice.setReverbOptions(audioOptions);
AudioDevice.setReverb(true);
print("Reverb is now on with the updated options.");
function scriptEnding() {
AudioDevice.setReverb(false);
print("Reberb is now off.");
}
Script.scriptEnding.connect(scriptEnding);

47
examples/guidedTour.js Normal file
View file

@ -0,0 +1,47 @@
//
// TourGuide.js
//
// This script will follow another person, if their display name is "Tour Guide"
//
var leaderName = "Tour Guide";
var guide;
var isGuide = false;
var lastGuidePosition = { x:0, y:0, z:0 };
var MIN_CHANGE = 2.0;
var LANDING_DISTANCE = 2.0;
var LANDING_RANDOM = 0.2;
function update(deltaTime) {
if (Math.random() < deltaTime) {
guide = AvatarList.avatarWithDisplayName(leaderName);
if (guide && !isGuide) {
print("Found a tour guide!");
isGuide = true;
} else if (!guide && isGuide) {
print("Lost My Guide");
isguide = false;
}
}
if (guide) {
// Check whether guide has moved, update if so
if (Vec3.length(lastGuidePosition) == 0.0) {
lastGuidePosition = guide.position;
} else {
if (Vec3.length(Vec3.subtract(lastGuidePosition, guide.position)) > MIN_CHANGE) {
var meToGuide = Vec3.multiply(Vec3.normalize(Vec3.subtract(guide.position, MyAvatar.position)), LANDING_DISTANCE);
var newPosition = Vec3.subtract(guide.position, meToGuide);
newPosition = Vec3.sum(newPosition, { x: Math.random() * LANDING_RANDOM - LANDING_RANDOM / 2.0,
y: 0,
z: Math.random() * LANDING_RANDOM - LANDING_RANDOM / 2.0 });
MyAvatar.position = newPosition;
lastGuidePosition = guide.position;
MyAvatar.orientation = guide.orientation;
}
}
}
}
Script.update.connect(update);

View file

@ -17,6 +17,8 @@ var willMove = false;
var warpActive = false;
var warpPosition = { x: 0, y: 0, z: 0 };
var hipsToEyes;
// Overlays to show target location
var WARP_SPHERE_SIZE = 0.15;
@ -43,8 +45,11 @@ var movingWithHead = false;
var headStartPosition, headStartDeltaPitch, headStartFinalPitch, headStartRoll, headStartYaw;
var deltaYaw = 0.0;
var keyDownTime = 0.0;
var timeSinceLastUp = 0.0;
var watchAvatar = false;
var oldMode;
var lastYawTurned = 0.0;
var startPullbackPosition;
function saveCameraState() {
oldMode = Camera.getMode();
@ -64,46 +69,55 @@ function activateWarp() {
var TRIGGER_PULLBACK_DISTANCE = 0.04;
var WATCH_AVATAR_DISTANCE = 1.5;
var MAX_WARP_YAW = 40.0;
var MAX_PULLBACK_YAW = 5.0;
var MAX_PULLBACK_PITCH = 5.0;
var sound = new Sound("http://public.highfidelity.io/sounds/Footsteps/FootstepW2Right-12db.wav");
function playSound() {
var options = new AudioInjectionOptions();
var position = MyAvatar.position;
options.position = position;
options.volume = 0.5;
options.volume = 1.0;
Audio.playSound(sound, options);
}
var WARP_SMOOTHING = 0.90;
var WARP_START_TIME = 0.50;
var WARP_START_DISTANCE = 1.0;
var WARP_START_DISTANCE = 1.5;
var WARP_SENSITIVITY = 0.15;
var fixedHeight = true;
function updateWarp() {
if (!warpActive) return;
var look = Quat.getFront(Camera.getOrientation());
var viewEulers = Quat.safeEulerAngles(Camera.getOrientation());
var deltaPosition = Vec3.subtract(MyAvatar.getTrackedHeadPosition(), headStartPosition);
var deltaPitch = MyAvatar.getHeadFinalPitch() - headStartFinalPitch;
deltaYaw = MyAvatar.getHeadFinalYaw() - headStartYaw;
viewEulers.x -= deltaPitch;
var look = Quat.getFront(Quat.fromVec3Degrees(viewEulers));
willMove = (!watchAvatar && (Math.abs(deltaYaw) < MAX_WARP_YAW) && (keyDownTime > WARP_START_TIME));
willMove = (keyDownTime > WARP_START_TIME);
if (willMove) {
//var distance = Math.pow((deltaPitch - WARP_PITCH_DEAD_ZONE) * WARP_SENSITIVITY, 2.0);
var distance = Math.exp(deltaPitch * WARP_SENSITIVITY) * WARP_START_DISTANCE;
var warpDirection = Vec3.normalize({ x: look.x, y: 0, z: look.z });
warpPosition = Vec3.mix(Vec3.sum(MyAvatar.position, Vec3.multiply(warpDirection, distance)), warpPosition, WARP_SMOOTHING);
var warpDirection = Vec3.normalize({ x: look.x, y: (fixedHeight ? 0 : look.y), z: look.z });
var startPosition = (watchAvatar ? Camera.getPosition(): MyAvatar.getEyePosition());
warpPosition = Vec3.mix(Vec3.sum(startPosition, Vec3.multiply(warpDirection, distance)), warpPosition, WARP_SMOOTHING);
}
var height = MyAvatar.getEyePosition().y - MyAvatar.position.y;
var cameraPosition;
if (!watchAvatar && (Math.abs(deltaYaw) < MAX_PULLBACK_YAW) && (deltaPosition.z > TRIGGER_PULLBACK_DISTANCE)) {
if (!watchAvatar &&
(Math.abs(deltaYaw) < MAX_PULLBACK_YAW) &&
(Math.abs(deltaPitch) < MAX_PULLBACK_PITCH) &&
(Vec3.length(deltaPosition) > TRIGGER_PULLBACK_DISTANCE)) {
saveCameraState();
var cameraPosition = Vec3.subtract(MyAvatar.position, Vec3.multiplyQbyV(Camera.getOrientation(), { x: 0, y: -height, z: -height * WATCH_AVATAR_DISTANCE }));
cameraPosition = Vec3.subtract(MyAvatar.position, Vec3.multiplyQbyV(Camera.getOrientation(), { x: 0, y: -height, z: -height * WATCH_AVATAR_DISTANCE }));
Camera.setPosition(cameraPosition);
cameraPosition = Camera.getPosition();
startPullbackPosition = cameraPosition;
watchAvatar = true;
}
@ -129,13 +143,14 @@ function finishWarp() {
visible: false,
});
if (willMove) {
warpPosition.y -= hipsToEyes;
MyAvatar.position = warpPosition;
playSound();
}
}
function update(deltaTime) {
timeSinceLastUp += deltaTime;
if (movingWithHead) {
keyDownTime += deltaTime;
updateWarp();
@ -143,9 +158,10 @@ function update(deltaTime) {
}
Controller.keyPressEvent.connect(function(event) {
if (event.text == "SPACE" && !movingWithHead) {
if (event.text == "SPACE" && !event.isAutoRepeat && !movingWithHead) {
keyDownTime = 0.0;
movingWithHead = true;
hipsToEyes = MyAvatar.getEyePosition().y - MyAvatar.position.y;
headStartPosition = MyAvatar.getTrackedHeadPosition();
headStartDeltaPitch = MyAvatar.getHeadDeltaPitch();
headStartFinalPitch = MyAvatar.getHeadFinalPitch();
@ -153,26 +169,31 @@ Controller.keyPressEvent.connect(function(event) {
headStartYaw = MyAvatar.getHeadFinalYaw();
deltaYaw = 0.0;
warpPosition = MyAvatar.position;
warpPosition.y += hipsToEyes;
activateWarp();
}
});
var TIME_FOR_TURN_AROUND = 0.50;
var TIME_FOR_TURN = 0.25;
var DOUBLE_CLICK_TIME = 0.50;
var TURN_AROUND = 180.0;
Controller.keyReleaseEvent.connect(function(event) {
if (event.text == "SPACE") {
if (event.text == "SPACE" && !event.isAutoRepeat) {
movingWithHead = false;
if (keyDownTime < TIME_FOR_TURN_AROUND) {
if (keyDownTime < TIME_FOR_TURN) {
var currentYaw = MyAvatar.getHeadFinalYaw();
MyAvatar.orientation = Quat.multiply(Quat.fromPitchYawRollDegrees(0, currentYaw, 0), MyAvatar.orientation);
} else {
MyAvatar.orientation = Quat.multiply(Quat.fromPitchYawRollDegrees(0, TURN_AROUND, 0), MyAvatar.orientation);
}
if (timeSinceLastUp < DOUBLE_CLICK_TIME) {
// Turn all the way around
var turnRemaining = TURN_AROUND - lastYawTurned;
lastYawTurned = 0.0;
MyAvatar.orientation = Quat.multiply(Quat.fromPitchYawRollDegrees(0, TURN_AROUND, 0), MyAvatar.orientation);
playSound();
} else if (keyDownTime < TIME_FOR_TURN) {
var currentYaw = MyAvatar.getHeadFinalYaw();
lastYawTurned = currentYaw;
MyAvatar.orientation = Quat.multiply(Quat.fromPitchYawRollDegrees(0, currentYaw, 0), MyAvatar.orientation);
playSound();
}
timeSinceLastUp = 0.0;
finishWarp();
if (watchAvatar) {
restoreCameraState();

View file

@ -34,7 +34,7 @@ var EASING_MULTIPLIER = 8;
var INITIAL_ZOOM_DISTANCE = 2;
var INITIAL_ZOOM_DISTANCE_FIRST_PERSON = 3;
EntityCameraTool = function() {
CameraManager = function() {
var that = {};
that.enabled = false;
@ -85,24 +85,28 @@ EntityCameraTool = function() {
Camera.setMode("independent");
that.updateCamera();
cameraTool.setVisible(true);
}
that.disable = function() {
that.disable = function(ignoreCamera) {
if (!that.enabled) return;
that.enabled = false;
that.mode = MODE_INACTIVE;
Camera.setMode(that.previousCameraMode);
if (!ignoreCamera) {
Camera.setMode(that.previousCameraMode);
}
cameraTool.setVisible(false);
}
that.focus = function(entityProperties) {
var dim = entityProperties.dimensions;
dim = SelectionManager.worldDimensions;
var dim = SelectionManager.worldDimensions;
var size = Math.max(dim.x, Math.max(dim.y, dim.z));
that.targetZoomDistance = Math.max(size * FOCUS_ZOOM_SCALE, FOCUS_MIN_ZOOM);
that.setFocalPoint(SelectionManager.worldPosition);//entityProperties.position);
that.setFocalPoint(SelectionManager.worldPosition);
that.updateCamera();
}
@ -116,6 +120,42 @@ EntityCameraTool = function() {
that.updateCamera();
}
that.addYaw = function(yaw) {
that.targetYaw += yaw;
that.updateCamera();
}
that.addPitch = function(pitch) {
that.targetPitch += pitch;
that.updateCamera();
}
that.addZoom = function(zoom) {
zoom *= that.targetZoomDistance * ZOOM_SCALING;
that.targetZoomDistance = Math.min(Math.max(that.targetZoomDistance + zoom, MIN_ZOOM_DISTANCE), MAX_ZOOM_DISTANCE);
that.updateCamera();
}
that.getZoomPercentage = function() {
return (that.zoomDistance - MIN_ZOOM_DISTANCE) / MAX_ZOOM_DISTANCE;
}
that.setZoomPercentage = function(pct) {
that.targetZoomDistance = pct * (MAX_ZOOM_DISTANCE - MIN_ZOOM_DISTANCE);
}
that.pan = function(offset) {
var up = Quat.getUp(Camera.getOrientation());
var right = Quat.getRight(Camera.getOrientation());
up = Vec3.multiply(up, offset.y * 0.01 * PAN_ZOOM_SCALE_RATIO * that.zoomDistance);
right = Vec3.multiply(right, offset.x * 0.01 * PAN_ZOOM_SCALE_RATIO * that.zoomDistance);
var dPosition = Vec3.sum(up, right);
that.moveFocalPoint(dPosition);
}
that.mouseMoveEvent = function(event) {
if (that.enabled && that.mode != MODE_INACTIVE) {
if (that.mode == MODE_ORBIT) {
@ -168,7 +208,7 @@ EntityCameraTool = function() {
return true;
}
return false;
return cameraTool.mousePressEvent(event);
}
that.mouseReleaseEvent = function(event) {
@ -185,13 +225,13 @@ EntityCameraTool = function() {
// Scale based on current zoom level
dZoom *= that.targetZoomDistance * ZOOM_SCALING;
that.targetZoomDistance = Math.max(that.targetZoomDistance + dZoom, MIN_ZOOM_DISTANCE);
that.targetZoomDistance = Math.min(Math.max(that.targetZoomDistance + dZoom, MIN_ZOOM_DISTANCE), MAX_ZOOM_DISTANCE);
that.updateCamera();
}
that.updateCamera = function() {
if (!that.enabled) return;
if (!that.enabled || Camera.getMode() != "independent") return;
var yRot = Quat.angleAxis(that.yaw, { x: 0, y: 1, z: 0 });
var xRot = Quat.angleAxis(that.pitch, { x: 1, y: 0, z: 0 });
@ -215,6 +255,10 @@ EntityCameraTool = function() {
// Ease the position and orbit of the camera
that.update = function(dt) {
if (Camera.getMode() != "independent") {
return;
}
var scale = Math.min(dt * EASING_MULTIPLIER, 1.0);
var dYaw = that.targetYaw - that.yaw;
@ -239,9 +283,336 @@ EntityCameraTool = function() {
that.updateCamera();
}
// Last mode that was first or third person
var lastAvatarCameraMode = "first person";
Camera.modeUpdated.connect(function(newMode) {
print("Camera mode has been updated: " + newMode);
if (newMode == "first person" || newMode == "third person") {
lastAvatarCameraMode = newMode;
that.disable(true);
} else {
that.enable();
}
});
Controller.keyReleaseEvent.connect(function (event) {
if (event.text == "ESC" && that.enabled) {
Camera.setMode(lastAvatarCameraMode);
cameraManager.disable(true);
}
});
Script.update.connect(that.update);
Controller.wheelEvent.connect(that.wheelEvent);
var cameraTool = new CameraTool(that);
return that;
}
var ZoomTool = function(opts) {
var that = {};
var position = opts.position || { x: 0, y: 0 };
var height = opts.height || 200;
var color = opts.color || { red: 255, green: 0, blue: 0 };
var arrowButtonSize = opts.buttonSize || 20;
var arrowButtonBackground = opts.arrowBackground || { red: 255, green: 255, blue: 255 };
var zoomBackground = { red: 128, green: 0, blue: 0 };
var zoomHeight = height - (arrowButtonSize * 2);
var zoomBarY = position.y + arrowButtonSize,
var onIncreasePressed = opts.onIncreasePressed;
var onDecreasePressed = opts.onDecreasePressed;
var onPercentageSet = opts.onPercentageSet;
var increaseButton = Overlays.addOverlay("text", {
x: position.x,
y: position.y,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "+",
alpha: 1.0,
visible: true,
});
var decreaseButton = Overlays.addOverlay("text", {
x: position.x,
y: position.y + arrowButtonSize + zoomHeight,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "-",
alpha: 1.0,
visible: true,
});
var zoomBar = Overlays.addOverlay("text", {
x: position.x + 5,
y: zoomBarY,
width: 10,
height: zoomHeight,
color: { red: 0, green: 255, blue: 0 },
backgroundColor: zoomBackground,
topMargin: 4,
leftMargin: 4,
text: "",
alpha: 1.0,
visible: true,
});
var zoomHandle = Overlays.addOverlay("text", {
x: position.x,
y: position.y + arrowButtonSize,
width: arrowButtonSize,
height: 10,
backgroundColor: { red: 0, green: 255, blue: 0 },
topMargin: 4,
leftMargin: 4,
text: "",
alpha: 1.0,
visible: true,
});
var allOverlays = [
increaseButton,
decreaseButton,
zoomBar,
zoomHandle,
];
that.destroy = function() {
for (var i = 0; i < allOverlays.length; i++) {
Overlays.deleteOverlay(allOverlays[i]);
}
};
that.setVisible = function(visible) {
for (var i = 0; i < allOverlays.length; i++) {
Overlays.editOverlay(allOverlays[i], { visible: visible });
}
}
that.setZoomPercentage = function(pct) {
var yOffset = (zoomHeight - 10) * pct;
Overlays.editOverlay(zoomHandle, {
y: position.y + arrowButtonSize + yOffset,
});
}
that.mouseReleaseEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
var clicked = false;
if (clickedOverlay == increaseButton) {
if (onIncreasePressed) onIncreasePressed();
clicked = true;
} else if (clickedOverlay == decreaseButton) {
if (onDecreasePressed) onDecreasePressed();
clicked = true;
} else if (clickedOverlay == zoomBar) {
if (onPercentageSet) onPercentageSet((event.y - zoomBarY) / zoomHeight);
clicked = true;
}
return clicked;
}
return that;
};
var ArrowTool = function(opts) {
var that = {};
var position = opts.position || { x: 0, y: 0 };
var arrowButtonSize = opts.buttonSize || 20;
var color = opts.color || { red: 255, green: 0, blue: 0 };
var arrowButtonBackground = opts.arrowBackground || { red: 255, green: 255, blue: 255 };
var centerButtonBackground = opts.centerBackground || { red: 255, green: 255, blue: 255 };
var onUpPressed = opts.onUpPressed;
var onDownPressed = opts.onDownPressed;
var onLeftPressed = opts.onLeftPressed;
var onRightPressed = opts.onRightPressed;
var onCenterPressed = opts.onCenterPressed;
var upButton = Overlays.addOverlay("text", {
x: position.x + arrowButtonSize,
y: position.y,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "^",
alpha: 1.0,
visible: true,
});
var leftButton = Overlays.addOverlay("text", {
x: position.x,
y: position.y + arrowButtonSize,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "<",
alpha: 1.0,
visible: true,
});
var rightButton = Overlays.addOverlay("text", {
x: position.x + (arrowButtonSize * 2),
y: position.y + arrowButtonSize,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: ">",
alpha: 1.0,
visible: true,
});
var downButton = Overlays.addOverlay("text", {
x: position.x + arrowButtonSize,
y: position.y + (arrowButtonSize * 2),
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: arrowButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "v",
alpha: 1.0,
visible: true,
});
var centerButton = Overlays.addOverlay("text", {
x: position.x + arrowButtonSize,
y: position.y + arrowButtonSize,
width: arrowButtonSize,
height: arrowButtonSize,
color: color,
backgroundColor: centerButtonBackground,
topMargin: 4,
leftMargin: 4,
text: "",
alpha: 1.0,
visible: true,
});
var allOverlays = [
upButton,
downButton,
leftButton,
rightButton,
centerButton,
];
that.destroy = function() {
for (var i = 0; i < allOverlays.length; i++) {
Overlays.deleteOverlay(allOverlays[i]);
}
};
that.setVisible = function(visible) {
for (var i = 0; i < allOverlays.length; i++) {
Overlays.editOverlay(allOverlays[i], { visible: visible });
}
}
that.mouseReleaseEvent = function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
var clicked = false;
if (clickedOverlay == leftButton) {
if (onLeftPressed) onLeftPressed();
clicked = true;
} else if (clickedOverlay == rightButton) {
if (onRightPressed) onRightPressed();
clicked = true;
} else if (clickedOverlay == upButton) {
if (onUpPressed) onUpPressed();
clicked = true;
} else if (clickedOverlay == downButton) {
if (onDownPressed) onDownPressed();
clicked = true;
} else if (clickedOverlay == centerButton) {
if (onCenterPressed) onCenterPressed();
clicked = true;
}
return clicked;
}
return that;
}
CameraTool = function(cameraManager) {
var that = {};
var toolsPosition = { x: 20, y: 280 };
var orbitToolPosition = toolsPosition;
var panToolPosition = { x: toolsPosition.x + 80, y: toolsPosition.y };
var zoomToolPosition = { x: toolsPosition.x + 20, y: toolsPosition.y + 80 };
var orbitIncrement = 15;
orbitTool = ArrowTool({
position: orbitToolPosition,
arrowBackground: { red: 192, green: 192, blue: 192 },
centerBackground: { red: 128, green: 128, blue: 255 },
color: { red: 0, green: 0, blue: 0 },
onUpPressed: function() { cameraManager.addPitch(orbitIncrement); },
onDownPressed: function() { cameraManager.addPitch(-orbitIncrement); },
onLeftPressed: function() { cameraManager.addYaw(-orbitIncrement); },
onRightPressed: function() { cameraManager.addYaw(orbitIncrement); },
onCenterPressed: function() { cameraManager.focus(); },
});
panTool = ArrowTool({
position: panToolPosition,
arrowBackground: { red: 192, green: 192, blue: 192 },
centerBackground: { red: 128, green: 128, blue: 255 },
color: { red: 0, green: 0, blue: 0 },
onUpPressed: function() { cameraManager.pan({ x: 0, y: 15 }); },
onDownPressed: function() { cameraManager.pan({ x: 0, y: -15 }); },
onLeftPressed: function() { cameraManager.pan({ x: -15, y: 0 }); },
onRightPressed: function() { cameraManager.pan({ x: 15, y: 0 }); },
});
zoomTool = ZoomTool({
position: zoomToolPosition,
arrowBackground: { red: 192, green: 192, blue: 192 },
color: { red: 0, green: 0, blue: 0 },
onIncreasePressed: function() { cameraManager.addZoom(-10); },
onDecreasePressed: function() { cameraManager.addZoom(10); },
onPercentageSet: function(pct) { cameraManager.setZoomPercentage(pct); }
});
Script.scriptEnding.connect(function() {
orbitTool.destroy();
panTool.destroy();
zoomTool.destroy();
});
that.mousePressEvent = function(event) {
return orbitTool.mouseReleaseEvent(event)
|| panTool.mouseReleaseEvent(event)
|| zoomTool.mouseReleaseEvent(event);
};
that.setVisible = function(visible) {
orbitTool.setVisible(visible);
panTool.setVisible(visible);
zoomTool.setVisible(visible);
};
Script.update.connect(function() {
cameraManager.getZoomPercentage();
zoomTool.setZoomPercentage(cameraManager.getZoomPercentage());
});
that.setVisible(false);
return that;
};

View file

@ -19,7 +19,6 @@ SPACE_WORLD = "world";
SelectionManager = (function() {
var that = {};
that.savedProperties = {};
that.eventListener = null;
@ -78,6 +77,8 @@ SelectionManager = (function() {
that.worldDimensions = null;
that.worldPosition = null;
} else if (that.selections.length == 1) {
SelectionDisplay.setSpaceMode(SPACE_LOCAL);
var properties = Entities.getEntityProperties(that.selections[0]);
that.localDimensions = properties.dimensions;
that.localPosition = properties.position;
@ -622,8 +623,8 @@ SelectionDisplay = (function () {
}
var diagonal = (Vec3.length(properties.dimensions) / 2) * 1.1;
var halfDimensions = Vec3.multiply(properties.dimensions, 0.5);
var diagonal = (Vec3.length(selectionManager.worldDimensions) / 2) * 1.1;
var halfDimensions = Vec3.multiply(selectionManager.worldDimensions, 0.5);
innerRadius = diagonal;
outerRadius = diagonal * 1.15;
var innerActive = false;
@ -843,7 +844,7 @@ SelectionDisplay = (function () {
Overlays.editOverlay(grabberMoveUp, { visible: translateHandlesVisible, position: { x: boundsCenter.x, y: top + grabberMoveUpOffset, z: boundsCenter.z } });
that.updateHandles(entityID);
that.updateHandles();
Overlays.editOverlay(baseOfEntityProjectionOverlay,
@ -924,18 +925,16 @@ SelectionDisplay = (function () {
entitySelected = false;
};
that.updateHandles = function(entityID) {
if (!entitySelected) {
that.updateHandles = function() {
if (SelectionManager.selections.length == 0) {
that.setOverlaysVisible(false);
return;
}
var properties = Entities.getEntityProperties(entityID);
var rotation, dimensions, position;
if (spaceMode == SPACE_LOCAL) {
rotation = properties.rotation;
rotation = SelectionManager.localRotation;
dimensions = SelectionManager.localDimensions;
position = SelectionManager.localPosition;
} else {
@ -1095,17 +1094,68 @@ SelectionDisplay = (function () {
entitySelected = false;
};
var lastXZPick = null;
function applyEntityProperties(data) {
for (var i = 0; i < data.length; i++) {
var entityID = data[i].entityID;
var properties = data[i].properties;
Entities.editEntity(entityID, properties);
}
selectionManager._update();
};
// For currently selected entities, push a command to the UndoStack that uses the current entity properties for the
// redo command, and the saved properties for the undo command.
function pushCommandForSelections() {
var undoData = [];
var redoData = [];
for (var i = 0; i < SelectionManager.selections.length; i++) {
var entityID = SelectionManager.selections[i];
var initialProperties = SelectionManager.savedProperties[entityID.id];
var currentProperties = Entities.getEntityProperties(entityID);
undoData.push({
entityID: entityID,
properties: {
position: initialProperties.position,
rotation: initialProperties.rotation,
dimensions: initialProperties.dimensions,
},
});
redoData.push({
entityID: entityID,
properties: {
position: currentProperties.position,
rotation: currentProperties.rotation,
dimensions: currentProperties.dimensions,
},
});
}
UndoStack.pushCommand(applyEntityProperties, undoData, applyEntityProperties, redoData);
}
var initialXZPick = null;
var isConstrained = false;
var startPosition = null;
var translateXZTool = {
mode: 'TRANSLATE_XZ',
onBegin: function(event) {
SelectionManager.saveProperties();
var position = SelectionManager.worldPosition;
startPosition = SelectionManager.worldPosition;
var dimensions = SelectionManager.worldDimensions;
var bottom = position.y - (dimensions.y / 2)
var pickRay = Camera.computePickRay(event.x, event.y);
lastXZPick = rayPlaneIntersection(pickRay, position, { x: 0, y: 1, z: 0 });
initialXZPick = rayPlaneIntersection(pickRay, startPosition, { x: 0, y: 1, z: 0 });
// Duplicate entities if alt is pressed. This will make a
// copy of the selected entities and move the _original_ entities, not
// the new ones.
if (event.isAlt) {
for (var otherEntityID in SelectionManager.savedProperties) {
var properties = SelectionManager.savedProperties[otherEntityID];
var entityID = Entities.addEntity(properties);
}
}
isConstrained = false;
},
onEnd: function(event, reason) {
if (reason == 'cancel') {
@ -1114,7 +1164,11 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
Overlays.editOverlay(xRailOverlay, { visible: false });
Overlays.editOverlay(zRailOverlay, { visible: false });
},
onMove: function(event) {
if (!entitySelected || mode !== "TRANSLATE_XZ") {
@ -1129,26 +1183,47 @@ SelectionDisplay = (function () {
Quat.getFront(lastCameraOrientation));
var vector = Vec3.subtract(newIntersection, lastPlaneIntersection);
var pickRay = Camera.computePickRay(event.x, event.y);
var pick = rayPlaneIntersection(pickRay, SelectionManager.worldPosition, { x: 0, y: 1, z: 0 });
vector = Vec3.subtract(pick, lastXZPick);
lastXZPick = pick;
var vector = Vec3.subtract(pick, initialXZPick);
// initialXZPick = pick;
// If shifted, constrain to one axis
if (event.isShifted) {
if (Math.abs(vector.x) > Math.abs(vector.z)) {
vector.z = 0;
} else {
vector.x = 0;
}
if (!isConstrained) {
Overlays.editOverlay(xRailOverlay, { visible: true });
var xStart = Vec3.sum(startPosition, { x: -10000, y: 0, z: 0 });
var xEnd = Vec3.sum(startPosition, { x: 10000, y: 0, z: 0 });
var zStart = Vec3.sum(startPosition, { x: 0, y: 0, z: -10000 });
var zEnd = Vec3.sum(startPosition, { x: 0, y: 0, z: 10000 });
Overlays.editOverlay(xRailOverlay, { start: xStart, end: xEnd, visible: true });
Overlays.editOverlay(zRailOverlay, { start: zStart, end: zEnd, visible: true });
isConstrained = true;
}
} else {
if (isConstrained) {
Overlays.editOverlay(xRailOverlay, { visible: false });
Overlays.editOverlay(zRailOverlay, { visible: false });
}
}
var wantDebug = false;
for (var i = 0; i < SelectionManager.selections.length; i++) {
var properties = Entities.getEntityProperties(SelectionManager.selections[i]);
var original = properties.position;
properties.position = Vec3.sum(properties.position, vector);
Entities.editEntity(SelectionManager.selections[i], properties);
var properties = SelectionManager.savedProperties[SelectionManager.selections[i].id];
Entities.editEntity(SelectionManager.selections[i], {
position: Vec3.sum(properties.position, vector),
});
if (wantDebug) {
print("translateXZ... ");
Vec3.print(" lastPlaneIntersection:", lastPlaneIntersection);
Vec3.print(" newIntersection:", newIntersection);
Vec3.print(" vector:", vector);
Vec3.print(" originalPosition:", original);
Vec3.print(" newPosition:", properties.position);
Vec3.print(" newPosition:", newPosition);
}
@ -1172,6 +1247,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1245,7 +1322,6 @@ SelectionDisplay = (function () {
var rotation = null;
var onBegin = function(event) {
print("STARTING: " + stretchMode);
var properties = Entities.getEntityProperties(currentSelection);
initialProperties = properties;
rotation = spaceMode == SPACE_LOCAL ? properties.rotation : Quat.fromPitchYawRollDegrees(0, 0, 0);
@ -1323,7 +1399,6 @@ SelectionDisplay = (function () {
};
var onEnd = function(event, reason) {
print("ENDING: " + stretchMode);
Overlays.editOverlay(xRailOverlay, { visible: false });
Overlays.editOverlay(yRailOverlay, { visible: false });
Overlays.editOverlay(zRailOverlay, { visible: false });
@ -1334,6 +1409,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
};
@ -1370,7 +1447,6 @@ SelectionDisplay = (function () {
var absX = Math.abs(changeInDimensions.x);
var absY = Math.abs(changeInDimensions.y);
var absZ = Math.abs(changeInDimensions.z);
print('abs: ' + absX + ', ' + absY + ', ' + absZ);
var pctChange = 0;
if (absX > absY && absX > absZ) {
pctChange = changeInDimensions.x / initialProperties.dimensions.x;
@ -1382,7 +1458,6 @@ SelectionDisplay = (function () {
pctChange = changeInDimensions.z / initialProperties.dimensions.z;
pctChange = changeInDimensions.z / initialDimensions.z;
}
print('change: ' + pctChange);
pctChange += 1.0;
newDimensions = Vec3.multiply(pctChange, initialDimensions);
} else {
@ -1496,6 +1571,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1602,6 +1679,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1706,6 +1785,8 @@ SelectionDisplay = (function () {
var initialProperties = SelectionManager.savedProperties[entityID.id];
Entities.editEntity(entityID, initialProperties);
}
} else {
pushCommandForSelections();
}
},
onMove: function(event) {
@ -1830,7 +1911,6 @@ SelectionDisplay = (function () {
var tool = grabberTools[result.overlayID];
if (tool) {
print("FOUND TOOL! " + tool.mode);
activeTool = tool;
mode = tool.mode;
somethingClicked = true;
@ -1930,7 +2010,6 @@ SelectionDisplay = (function () {
if (result.intersects) {
var tool = grabberTools[result.overlayID];
if (tool) {
print("FOUND TOOL! " + tool.mode);
activeTool = tool;
mode = tool.mode;
somethingClicked = true;

View file

@ -8,4 +8,4 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
HIFI_PUBLIC_BUCKET = "https://s3.amazonaws.com/hifi-public/";
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";

View file

@ -33,9 +33,9 @@ Script.include("libraries/entityPropertyDialogBox.js");
var entityPropertyDialogBox = EntityPropertyDialogBox;
Script.include("libraries/entityCameraTool.js");
var entityCameraTool = new EntityCameraTool();
var cameraManager = new CameraManager();
selectionManager.setEventListener(selectionDisplay.updateHandles());
selectionManager.setEventListener(selectionDisplay.updateHandles);
var windowDimensions = Controller.getViewportDimensions();
var toolIconUrl = HIFI_PUBLIC_BUCKET + "images/tools/";
@ -188,7 +188,7 @@ var toolBar = (function () {
position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
if (position.x > 0 && position.y > 0 && position.z > 0) {
Entities.addEntity({
var entityId = Entities.addEntity({
type: "Model",
position: position,
dimensions: { x: DEFAULT_DIMENSION, y: DEFAULT_DIMENSION, z: DEFAULT_DIMENSION },
@ -257,9 +257,9 @@ var toolBar = (function () {
isActive = !isActive;
if (!isActive) {
selectionDisplay.unselectAll();
entityCameraTool.disable();
cameraManager.disable();
} else {
entityCameraTool.enable();
cameraManager.enable();
}
return true;
}
@ -400,7 +400,7 @@ function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
if (toolBar.mousePressEvent(event) || progressDialog.mousePressEvent(event)
|| entityCameraTool.mousePressEvent(event) || selectionDisplay.mousePressEvent(event)) {
|| cameraManager.mousePressEvent(event) || selectionDisplay.mousePressEvent(event)) {
// Event handled; do nothing.
return;
} else {
@ -508,8 +508,8 @@ function mouseMoveEvent(event) {
return;
}
// allow the selectionDisplay and entityCameraTool to handle the event first, if it doesn't handle it, then do our own thing
if (selectionDisplay.mouseMoveEvent(event) || entityCameraTool.mouseMoveEvent(event)) {
// allow the selectionDisplay and cameraManager to handle the event first, if it doesn't handle it, then do our own thing
if (selectionDisplay.mouseMoveEvent(event) || cameraManager.mouseMoveEvent(event)) {
return;
}
@ -548,7 +548,7 @@ function mouseReleaseEvent(event) {
if (entitySelected) {
tooltip.show(false);
}
entityCameraTool.mouseReleaseEvent(event);
cameraManager.mouseReleaseEvent(event);
}
Controller.mousePressEvent.connect(mousePressEvent);
@ -634,9 +634,12 @@ function handeMenuEvent(menuItem) {
} else if (menuItem == "Delete") {
if (entitySelected) {
print(" Delete Entity.... selectedEntityID="+ selectedEntityID);
Entities.deleteEntity(selectedEntityID);
for (var i = 0; i < selectionManager.selections.length; i++) {
Entities.deleteEntity(selectionManager.selections[i]);
}
selectionDisplay.unselect(selectedEntityID);
entitySelected = false;
selectionManager.clearSelections();
} else {
print(" Delete Entity.... not holding...");
}
@ -644,7 +647,7 @@ function handeMenuEvent(menuItem) {
// good place to put the properties dialog
editModelID = -1;
if (entitySelected) {
if (selectionManager.selections.length == 1) {
print(" Edit Properties.... selectedEntityID="+ selectedEntityID);
editModelID = selectedEntityID;
} else {
@ -675,11 +678,10 @@ Menu.menuItemEvent.connect(handeMenuEvent);
Controller.keyReleaseEvent.connect(function (event) {
// since sometimes our menu shortcut keys don't work, trap our menu items here also and fire the appropriate menu items
print(event.text);
if (event.text == "`") {
handeMenuEvent("Edit Properties...");
}
if (event.text == "BACKSPACE") {
if (event.text == "BACKSPACE" || event.text == "DELETE") {
handeMenuEvent("Delete");
} else if (event.text == "TAB") {
selectionDisplay.toggleSpaceMode();
@ -689,7 +691,11 @@ Controller.keyReleaseEvent.connect(function (event) {
if (entitySelected) {
// Get latest properties
var properties = Entities.getEntityProperties(selectedEntityID);
entityCameraTool.focus(properties);
cameraManager.focus(properties);
}
} else if (event.text == '[') {
if (isActive) {
cameraManager.enable();
}
}
});

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "Visage" "LeapMotion" "RtMidi" "Qxmpp" "SDL2")
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "Visage" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "Gverb")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -14,6 +14,10 @@ endforeach()
find_package(Qt5LinguistTools REQUIRED)
find_package(Qt5LinguistToolsMacros)
# As Gverb is currently the only reverb library, it's required.
find_package(Gverb REQUIRED)
if (DEFINED ENV{JOB_ID})
set(BUILD_SEQ $ENV{JOB_ID})
else ()
@ -29,7 +33,7 @@ elseif (WIN32)
add_definitions(-D_USE_MATH_DEFINES) # apparently needed to get M_PI and other defines from cmath/math.h
add_definitions(-DWINDOWS_LEAN_AND_MEAN) # needed to make sure windows doesn't go to crazy with its defines
set(GL_HEADERS "#include <windowshacks.h>\n#include <GL/glew.h>\n#include <GL/glut.h>")
set(GL_HEADERS "#include <windowshacks.h>\n#include <GL/glew.h>\n#include <GL/glut.h>\n#include <GL/wglew.h>")
endif ()
# set up the external glm library
@ -40,11 +44,7 @@ configure_file(InterfaceConfig.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceCon
configure_file(InterfaceVersion.h.in "${PROJECT_BINARY_DIR}/includes/InterfaceVersion.h")
# grab the implementation and header files from src dirs
file(GLOB INTERFACE_SRCS src/*.cpp src/*.h)
foreach(SUBDIR avatar devices renderer ui starfield location scripting voxels particles entities gpu)
file(GLOB_RECURSE SUBDIR_SRCS src/${SUBDIR}/*.cpp src/${SUBDIR}/*.h)
set(INTERFACE_SRCS ${INTERFACE_SRCS} "${SUBDIR_SRCS}")
endforeach(SUBDIR)
file(GLOB_RECURSE INTERFACE_SRCS "src/*.cpp" "src/*.h")
# Add SpeechRecognizer if on OS X, otherwise remove
if (APPLE)
@ -166,6 +166,13 @@ if (QXMPP_FOUND AND NOT DISABLE_QXMPP AND WIN32)
add_definitions(-DQXMPP_STATIC)
endif ()
if (GVERB_FOUND)
file(GLOB GVERB_SRCS ${GVERB_SRC_DIRS}/*.c)
include_directories(${GVERB_INCLUDE_DIRS})
add_library(gverb STATIC ${GVERB_SRCS})
target_link_libraries(${TARGET_NAME} gverb)
endif (GVERB_FOUND)
# include headers for interface and InterfaceConfig.
include_directories("${PROJECT_SOURCE_DIR}/src" "${PROJECT_BINARY_DIR}/includes")
@ -215,11 +222,20 @@ else (APPLE)
if (WIN32)
find_package(GLEW REQUIRED)
include_directories(${GLEW_INCLUDE_DIRS})
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARIES}" wsock32.lib opengl32.lib)
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARIES}" "${NSIGHT_LIBRARIES}" wsock32.lib opengl32.lib)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif ()
endif()
endif (APPLE)

15
interface/external/gverb/readme.txt vendored Normal file
View file

@ -0,0 +1,15 @@
Instructions for adding the Gverb library to Interface
(This is a required library)
Clément Brisset, Octobre 22nd, 2014
1. Go to https://github.com/highfidelity/gverb
Or download the sources directly via this link:
https://github.com/highfidelity/gverb/archive/master.zip
2. Extract the archive
3. Place the directories “include” and “src” in interface/external/gverb
(Normally next to this readme)
4. Clear your build directory, run cmake, build and you should be all set.

View file

@ -26,11 +26,14 @@ varying vec4 normal;
void main(void) {
// transform and store the normal for interpolation
vec2 heightCoord = gl_MultiTexCoord0.st;
float deltaX = texture2D(heightMap, heightCoord - vec2(heightScale, 0.0)).r -
texture2D(heightMap, heightCoord + vec2(heightScale, 0.0)).r;
float deltaZ = texture2D(heightMap, heightCoord - vec2(0.0, heightScale)).r -
texture2D(heightMap, heightCoord + vec2(0.0, heightScale)).r;
normal = normalize(gl_ModelViewMatrix * vec4(deltaX, heightScale, deltaZ, 0.0));
vec4 neighborHeights = vec4(texture2D(heightMap, heightCoord - vec2(heightScale, 0.0)).r,
texture2D(heightMap, heightCoord + vec2(heightScale, 0.0)).r,
texture2D(heightMap, heightCoord - vec2(0.0, heightScale)).r,
texture2D(heightMap, heightCoord + vec2(0.0, heightScale)).r);
vec4 neighborsZero = step(1.0 / 255.0, neighborHeights);
normal = normalize(gl_ModelViewMatrix * vec4(
(neighborHeights.x - neighborHeights.y) * neighborsZero.x * neighborsZero.y, heightScale,
(neighborHeights.z - neighborHeights.w) * neighborsZero.z * neighborsZero.w, 0.0));
// add the height to the position
float height = texture2D(heightMap, heightCoord).r;

View file

@ -55,6 +55,8 @@
#include <AccountManager.h>
#include <AudioInjector.h>
#include <EntityScriptingInterface.h>
#include <HFActionEvent.h>
#include <HFBackEvent.h>
#include <LocalVoxelsList.h>
#include <Logging.h>
#include <NetworkAccessManager.h>
@ -67,14 +69,16 @@
#include <UUID.h>
#include "Application.h"
#include "ui/DataWebDialog.h"
#include "InterfaceVersion.h"
#include "Menu.h"
#include "ModelUploader.h"
#include "Util.h"
#include "devices/Leapmotion.h"
#include "devices/MIDIManager.h"
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "renderer/ProgramObject.h"
#include "scripting/AccountScriptingInterface.h"
@ -87,12 +91,12 @@
#include "scripting/SettingsScriptingInterface.h"
#include "scripting/WindowScriptingInterface.h"
#include "ui/DataWebDialog.h"
#include "ui/InfoView.h"
#include "ui/Snapshot.h"
#include "ui/Stats.h"
#include "ui/TextRenderer.h"
#include "devices/Leapmotion.h"
using namespace std;
@ -136,6 +140,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_glWidget(new GLCanvas()),
_nodeThread(new QThread(this)),
_datagramProcessor(),
_undoStack(),
_undoStackScriptingInterface(&_undoStack),
_frameCount(0),
_fps(60.0f),
_justStarted(true),
@ -177,8 +183,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_trayIcon(new QSystemTrayIcon(_window)),
_lastNackTime(usecTimestampNow()),
_lastSendDownstreamAudioStats(usecTimestampNow()),
_renderTargetFramerate(0),
_isVSyncOn(true),
_renderResolutionScale(1.0f)
{
// read the ApplicationInfo.ini file for Name/Version/Domain information
QSettings applicationInfo(Application::resourcesPath() + "info/ApplicationInfo.ini", QSettings::IniFormat);
@ -409,6 +418,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
MIDIManager& midiManagerInstance = MIDIManager::getInstance();
midiManagerInstance.openDefaultPort();
#endif
this->installEventFilter(this);
}
Application::~Application() {
@ -518,8 +529,20 @@ void Application::initializeGL() {
qDebug("Error: %s\n", glewGetErrorString(err));
}
qDebug("Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
qDebug("V-Sync is %s\n", (swapInterval > 0 ? "ON" : "OFF"));
}
#endif
#if defined(Q_OS_LINUX)
// TODO: Write the correct code for Linux...
/* if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
qDebug("V-Sync is %s\n", (swapInterval > 0 ? "ON" : "OFF"));
}*/
#endif
// Before we render anything, let's set up our viewFrustumOffsetCamera with a sufficiently large
// field of view and near and far clip to make it interesting.
@ -816,9 +839,26 @@ bool Application::event(QEvent* event) {
return false;
}
if (HFActionEvent::types().contains(event->type())) {
_controllerScriptingInterface.handleMetaEvent(static_cast<HFMetaEvent*>(event));
}
return QApplication::event(event);
}
bool Application::eventFilter(QObject* object, QEvent* event) {
if (event->type() == QEvent::ShortcutOverride) {
// Filter out captured keys before they're used for shortcut actions.
if (_controllerScriptingInterface.isKeyCaptured(static_cast<QKeyEvent*>(event))) {
event->accept();
return true;
}
}
return false;
}
void Application::keyPressEvent(QKeyEvent* event) {
_keysPressed.insert(event->key());
@ -1077,9 +1117,23 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_Equal:
_myAvatar->resetSize();
break;
case Qt::Key_Escape:
OculusManager::abandonCalibration();
case Qt::Key_Space: {
// this starts an HFActionEvent
HFActionEvent startActionEvent(HFActionEvent::startType(), getViewportCenter());
sendEvent(this, &startActionEvent);
break;
}
case Qt::Key_Escape: {
OculusManager::abandonCalibration();
// this starts the HFCancelEvent
HFBackEvent startBackEvent(HFBackEvent::startType());
sendEvent(this, &startBackEvent);
break;
}
default:
event->ignore();
break;
@ -1150,6 +1204,20 @@ void Application::keyReleaseEvent(QKeyEvent* event) {
case Qt::Key_Alt:
_myAvatar->clearDriveKeys();
break;
case Qt::Key_Space: {
// this ends the HFActionEvent
HFActionEvent endActionEvent(HFActionEvent::endType(), getViewportCenter());
sendEvent(this, &endActionEvent);
break;
}
case Qt::Key_Escape: {
// this ends the HFCancelEvent
HFBackEvent endBackEvent(HFBackEvent::endType());
sendEvent(this, &endBackEvent);
break;
}
default:
event->ignore();
break;
@ -1222,6 +1290,10 @@ void Application::mousePressEvent(QMouseEvent* event, unsigned int deviceID) {
// stop propagation
return;
}
// nobody handled this - make it an action event on the _window object
HFActionEvent actionEvent(HFActionEvent::startType(), event->localPos());
sendEvent(this, &actionEvent);
} else if (event->button() == Qt::RightButton) {
// right click items here
@ -1242,12 +1314,17 @@ void Application::mouseReleaseEvent(QMouseEvent* event, unsigned int deviceID) {
_mouseX = event->x();
_mouseY = event->y();
_mousePressed = false;
checkBandwidthMeterClick();
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
// let's set horizontal offset to give stats some margin to mirror
int horizontalOffset = MIRROR_VIEW_WIDTH;
Stats::getInstance()->checkClick(_mouseX, _mouseY, _mouseDragStartedX, _mouseDragStartedY, horizontalOffset);
}
// fire an action end event
HFActionEvent actionEvent(HFActionEvent::endType(), event->localPos());
sendEvent(this, &actionEvent);
}
}
}
@ -1391,9 +1468,14 @@ void Application::idle() {
PerformanceWarning warn(showWarnings, "idle()");
// Only run simulation code if more than IDLE_SIMULATE_MSECS have passed since last time we ran
double targetFramePeriod = 0.0;
if (_renderTargetFramerate > 0) {
targetFramePeriod = 1000.0 / _renderTargetFramerate;
} else if (_renderTargetFramerate < 0) {
targetFramePeriod = IDLE_SIMULATE_MSECS;
}
double timeSinceLastUpdate = (double)_lastTimeUpdated.nsecsElapsed() / 1000000.0;
if (timeSinceLastUpdate > IDLE_SIMULATE_MSECS) {
if (timeSinceLastUpdate > targetFramePeriod) {
_lastTimeUpdated.start();
{
PerformanceTimer perfTimer("update");
@ -1472,9 +1554,12 @@ void Application::setEnableVRMode(bool enableVRMode) {
OculusManager::disconnect();
OculusManager::connect();
}
int oculusMaxFPS = Menu::getInstance()->getOculusUIMaxFPS();
setRenderTargetFramerate(oculusMaxFPS);
OculusManager::recalibrate();
} else {
OculusManager::abandonCalibration();
setRenderTargetFramerate(0);
}
resizeGL(_glWidget->getDeviceWidth(), _glWidget->getDeviceHeight());
@ -3083,7 +3168,6 @@ void Application::renderRearViewMirror(const QRect& region, bool billboard) {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = getTextureCache()->getFrameBufferSize();
float ratio = QApplication::desktop()->windowHandle()->devicePixelRatio();
ratio = size.height() / (float)_glWidget->getDeviceHeight();
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
glViewport(x, size.height() - y - height, width, height);
glScissor(x, size.height() - y - height, width, height);
@ -3766,8 +3850,9 @@ ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUser
// AvatarManager has some custom types
AvatarManager::registerMetaTypes(scriptEngine);
// hook our avatar object into this script engine
// hook our avatar and avatar hash map object into this script engine
scriptEngine->setAvatarData(_myAvatar, "MyAvatar"); // leave it as a MyAvatar class to expose thrust features
scriptEngine->setAvatarHashMap(&_avatarManager, "AvatarList");
CameraScriptableObject* cameraScriptable = new CameraScriptableObject(&_myCamera, &_viewFrustum);
scriptEngine->registerGlobalObject("Camera", cameraScriptable);
@ -3810,6 +3895,8 @@ ScriptEngine* Application::loadScript(const QString& scriptFilename, bool isUser
scriptEngine->registerGlobalObject("Joysticks", &JoystickScriptingInterface::getInstance());
qScriptRegisterMetaType(scriptEngine, joystickToScriptValue, joystickFromScriptValue);
scriptEngine->registerGlobalObject("UndoStack", &_undoStackScriptingInterface);
#ifdef HAVE_RTMIDI
scriptEngine->registerGlobalObject("MIDI", &MIDIManager::getInstance());
#endif
@ -4138,6 +4225,53 @@ void Application::takeSnapshot() {
_snapshotShareDialog->show();
}
void Application::setRenderTargetFramerate(unsigned int framerate, bool vsyncOn) {
if (vsyncOn != _isVSyncOn) {
#if defined(Q_OS_WIN)
if (wglewGetExtension("WGL_EXT_swap_control")) {
wglSwapIntervalEXT(vsyncOn);
int swapInterval = wglGetSwapIntervalEXT();
_isVSyncOn = swapInterval;
qDebug("V-Sync is %s\n", (swapInterval > 0 ? "ON" : "OFF"));
} else {
qDebug("V-Sync is FORCED ON on this system\n");
}
#elif defined(Q_OS_LINUX)
// TODO: write the poper code for linux
/*
if (glQueryExtension.... ("GLX_EXT_swap_control")) {
glxSwapIntervalEXT(vsyncOn);
int swapInterval = xglGetSwapIntervalEXT();
_isVSyncOn = swapInterval;
qDebug("V-Sync is %s\n", (swapInterval > 0 ? "ON" : "OFF"));
} else {
qDebug("V-Sync is FORCED ON on this system\n");
}
*/
#else
qDebug("V-Sync is FORCED ON on this system\n");
#endif
}
_renderTargetFramerate = framerate;
}
bool Application::isVSyncEditable() {
#if defined(Q_OS_WIN)
if (wglewGetExtension("WGL_EXT_swap_control")) {
return true;
}
#elif defined(Q_OS_LINUX)
// TODO: write the poper code for linux
/*
if (glQueryExtension.... ("GLX_EXT_swap_control")) {
return true;
}
*/
#else
#endif
return false;
}
void Application::setRenderResolutionScale(float scale) {
_renderResolutionScale = scale;
}

View file

@ -91,6 +91,9 @@
#include "voxels/VoxelSystem.h"
#include "UndoStackScriptingInterface.h"
class QAction;
class QActionGroup;
class QGLWidget;
@ -169,6 +172,7 @@ public:
void dropEvent(QDropEvent *event);
bool event(QEvent* event);
bool eventFilter(QObject* object, QEvent* event);
void makeVoxel(glm::vec3 position,
float scale,
@ -280,6 +284,8 @@ public:
PointShader& getPointShader() { return _pointShader; }
FileLogger* getLogger() { return _logger; }
QPointF getViewportCenter() const
{ return QPointF(_glWidget->getDeviceWidth() / 2.0f, _glWidget->getDeviceHeight() / 2.0f); }
glm::vec2 getViewportDimensions() const { return glm::vec2(_glWidget->getDeviceWidth(), _glWidget->getDeviceHeight()); }
NodeToJurisdictionMap& getVoxelServerJurisdictions() { return _voxelServerJurisdictions; }
NodeToJurisdictionMap& getEntityServerJurisdictions() { return _entityServerJurisdictions; }
@ -358,6 +364,11 @@ public slots:
void domainSettingsReceived(const QJsonObject& domainSettingsObject);
void setRenderTargetFramerate(unsigned int framerate, bool vsyncOn = true);
bool isVSyncOn() { return _isVSyncOn; }
bool isVSyncEditable();
unsigned int getRenderTargetFramerate() const { return _renderTargetFramerate; }
void setRenderResolutionScale(float scale);
void resetSensors();
@ -450,6 +461,7 @@ private:
int _numChangedSettings;
QUndoStack _undoStack;
UndoStackScriptingInterface _undoStackScriptingInterface;
glm::vec3 _gravity;
@ -609,6 +621,8 @@ private:
quint64 _lastNackTime;
quint64 _lastSendDownstreamAudioStats;
int _renderTargetFramerate;
bool _isVSyncOn;
float _renderResolutionScale;
};

View file

@ -92,6 +92,8 @@ Audio::Audio(QObject* parent) :
_collisionSoundDuration(0.0f),
_proceduralEffectSample(0),
_muted(false),
_reverb(false),
_reverbOptions(&_scriptReverbOptions),
_processSpatialAudio(false),
_spatialAudioStart(0),
_spatialAudioFinish(0),
@ -123,11 +125,14 @@ Audio::Audio(QObject* parent) :
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
// Create the noise sample array
_noiseSampleFrames = new float[NUMBER_OF_NOISE_SAMPLE_FRAMES];
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedSilence, this, &Audio::addStereoSilenceToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade, this, &Audio::addLastFrameRepeatedWithFadeToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedStereoSamples, this, &Audio::addStereoSamplesToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection);
// Initialize GVerb
initGverb();
}
void Audio::init(QGLWidget *parent) {
@ -489,6 +494,69 @@ bool Audio::switchOutputToAudioDevice(const QString& outputDeviceName) {
return switchOutputToAudioDevice(getNamedAudioDeviceForMode(QAudio::AudioOutput, outputDeviceName));
}
void Audio::initGverb() {
// Initialize a new gverb instance
_gverb = gverb_new(_outputFormat.sampleRate(), _reverbOptions->getMaxRoomSize(), _reverbOptions->getRoomSize(),
_reverbOptions->getReverbTime(), _reverbOptions->getDamping(), _reverbOptions->getSpread(),
_reverbOptions->getInputBandwidth(), _reverbOptions->getEarlyLevel(),
_reverbOptions->getTailLevel());
// Configure the instance (these functions are not super well named - they actually set several internal variables)
gverb_set_roomsize(_gverb, _reverbOptions->getRoomSize());
gverb_set_revtime(_gverb, _reverbOptions->getReverbTime());
gverb_set_damping(_gverb, _reverbOptions->getDamping());
gverb_set_inputbandwidth(_gverb, _reverbOptions->getInputBandwidth());
gverb_set_earlylevel(_gverb, DB_CO(_reverbOptions->getEarlyLevel()));
gverb_set_taillevel(_gverb, DB_CO(_reverbOptions->getTailLevel()));
}
void Audio::setReverbOptions(const AudioEffectOptions* options) {
// Save the new options
_scriptReverbOptions.setMaxRoomSize(options->getMaxRoomSize());
_scriptReverbOptions.setRoomSize(options->getRoomSize());
_scriptReverbOptions.setReverbTime(options->getReverbTime());
_scriptReverbOptions.setDamping(options->getDamping());
_scriptReverbOptions.setSpread(options->getSpread());
_scriptReverbOptions.setInputBandwidth(options->getInputBandwidth());
_scriptReverbOptions.setEarlyLevel(options->getEarlyLevel());
_scriptReverbOptions.setTailLevel(options->getTailLevel());
_scriptReverbOptions.setDryLevel(options->getDryLevel());
_scriptReverbOptions.setWetLevel(options->getWetLevel());
if (_reverbOptions == &_scriptReverbOptions) {
// Apply them to the reverb instance(s)
initGverb();
}
}
void Audio::addReverb(int16_t* samplesData, int numSamples, QAudioFormat& audioFormat) {
float dryFraction = DB_CO(_reverbOptions->getDryLevel());
float wetFraction = DB_CO(_reverbOptions->getWetLevel());
float lValue,rValue;
for (int sample = 0; sample < numSamples; sample += audioFormat.channelCount()) {
// Run GVerb
float value = (float)samplesData[sample];
gverb_do(_gverb, value, &lValue, &rValue);
// Mix, accounting for clipping, the left and right channels. Ignore the rest.
for (unsigned int j = sample; j < sample + audioFormat.channelCount(); j++) {
if (j == sample) {
// left channel
int lResult = glm::clamp((int)(samplesData[j] * dryFraction + lValue * wetFraction), -32768, 32767);
samplesData[j] = (int16_t)lResult;
} else if (j == (sample + 1)) {
// right channel
int rResult = glm::clamp((int)(samplesData[j] * dryFraction + rValue * wetFraction), -32768, 32767);
samplesData[j] = (int16_t)rResult;
} else {
// ignore channels above 2
}
}
}
}
void Audio::handleAudioInput() {
static char audioDataPacket[MAX_PACKET_SIZE];
@ -720,7 +788,6 @@ void Audio::handleAudioInput() {
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
if (_recorder && _recorder.data()->isRecording()) {
_recorder.data()->record(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
}
@ -840,12 +907,10 @@ void Audio::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
}
void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
const int16_t* receivedSamples;
@ -884,10 +949,37 @@ void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& ou
numNetworkOutputSamples,
numDeviceOutputSamples,
_desiredOutputFormat, _outputFormat);
if(_reverb || _receivedAudioStream.hasReverb()) {
bool reverbChanged = false;
if (_receivedAudioStream.hasReverb()) {
if (_zoneReverbOptions.getReverbTime() != _receivedAudioStream.getRevebTime()) {
_zoneReverbOptions.setReverbTime(_receivedAudioStream.getRevebTime());
reverbChanged = true;
}
if (_zoneReverbOptions.getWetLevel() != _receivedAudioStream.getWetLevel()) {
_zoneReverbOptions.setWetLevel(_receivedAudioStream.getWetLevel());
reverbChanged = true;
}
if (_reverbOptions != &_zoneReverbOptions) {
_reverbOptions = &_zoneReverbOptions;
reverbChanged = true;
}
} else if (_reverbOptions != &_scriptReverbOptions) {
_reverbOptions = &_scriptReverbOptions;
reverbChanged = true;
}
if (reverbChanged) {
initGverb();
}
addReverb((int16_t*)outputBuffer.data(), numDeviceOutputSamples, _outputFormat);
}
}
void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
if (_audioOutput) {
// Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(audioByteArray);

View file

@ -43,6 +43,14 @@
#include <StdDev.h>
#include "MixedProcessedAudioStream.h"
#include "AudioEffectOptions.h"
#include <AudioRingBuffer.h>
#include <StdDev.h>
extern "C" {
#include <gverb.h>
#include <gverbdsp.h>
}
static const int NUM_AUDIO_CHANNELS = 2;
@ -159,6 +167,8 @@ public slots:
float getInputVolume() const { return (_audioInput) ? _audioInput->volume() : 0.0f; }
void setInputVolume(float volume) { if (_audioInput) _audioInput->setVolume(volume); }
void setReverb(bool reverb) { _reverb = reverb; }
void setReverbOptions(const AudioEffectOptions* options);
const AudioStreamStats& getAudioMixerAvatarStreamAudioStats() const { return _audioMixerAvatarStreamAudioStats; }
const QHash<QUuid, AudioStreamStats>& getAudioMixerInjectedStreamAudioStatsMap() const { return _audioMixerInjectedStreamAudioStatsMap; }
@ -230,6 +240,11 @@ private:
int _proceduralEffectSample;
bool _muted;
bool _localEcho;
bool _reverb;
AudioEffectOptions _scriptReverbOptions;
AudioEffectOptions _zoneReverbOptions;
AudioEffectOptions* _reverbOptions;
ty_gverb *_gverb;
GLuint _micTextureId;
GLuint _muteTextureId;
GLuint _boxTextureId;
@ -249,6 +264,10 @@ private:
// 2. Mix with the audio input
void processProceduralAudio(int16_t* monoInput, int numSamples);
// Adds Reverb
void initGverb();
void addReverb(int16_t* samples, int numSamples, QAudioFormat& format);
// Add sounds that we want the user to not hear themselves, by adding on top of mic input signal
void addProceduralSounds(int16_t* monoInput, int numSamples);

View file

@ -21,6 +21,32 @@
#include "devices/OculusManager.h"
CameraMode stringToMode(const QString& mode) {
if (mode == "third person") {
return CAMERA_MODE_THIRD_PERSON;
} else if (mode == "first person") {
return CAMERA_MODE_FIRST_PERSON;
} else if (mode == "mirror") {
return CAMERA_MODE_MIRROR;
} else if (mode == "independent") {
return CAMERA_MODE_INDEPENDENT;
}
return CAMERA_MODE_NULL;
}
QString modeToString(CameraMode mode) {
if (mode == CAMERA_MODE_THIRD_PERSON) {
return "third person";
} else if (mode == CAMERA_MODE_FIRST_PERSON) {
return "first person";
} else if (mode == CAMERA_MODE_MIRROR) {
return "mirror";
} else if (mode == CAMERA_MODE_INDEPENDENT) {
return "independent";
}
return "unknown";
}
Camera::Camera() :
_mode(CAMERA_MODE_THIRD_PERSON),
_position(0.0f, 0.0f, 0.0f),
@ -48,6 +74,7 @@ float Camera::getFarClip() const {
void Camera::setMode(CameraMode m) {
_mode = m;
emit modeUpdated(m);
}
@ -70,6 +97,7 @@ void Camera::setFarClip(float f) {
CameraScriptableObject::CameraScriptableObject(Camera* camera, ViewFrustum* viewFrustum) :
_camera(camera), _viewFrustum(viewFrustum)
{
connect(_camera, &Camera::modeUpdated, this, &CameraScriptableObject::onModeUpdated);
}
PickRay CameraScriptableObject::computePickRay(float x, float y) {
@ -86,24 +114,7 @@ PickRay CameraScriptableObject::computePickRay(float x, float y) {
}
QString CameraScriptableObject::getMode() const {
QString mode("unknown");
switch(_camera->getMode()) {
case CAMERA_MODE_THIRD_PERSON:
mode = "third person";
break;
case CAMERA_MODE_FIRST_PERSON:
mode = "first person";
break;
case CAMERA_MODE_MIRROR:
mode = "mirror";
break;
case CAMERA_MODE_INDEPENDENT:
mode = "independent";
break;
default:
break;
}
return mode;
return modeToString(_camera->getMode());
}
void CameraScriptableObject::setMode(const QString& mode) {
@ -131,5 +142,9 @@ void CameraScriptableObject::setMode(const QString& mode) {
}
}
void CameraScriptableObject::onModeUpdated(CameraMode m) {
emit modeUpdated(modeToString(m));
}

View file

@ -27,8 +27,11 @@ enum CameraMode
NUM_CAMERA_MODES
};
class Camera {
Q_DECLARE_METATYPE(CameraMode);
static int cameraModeId = qRegisterMetaType<CameraMode>();
class Camera : public QObject {
Q_OBJECT
public:
Camera();
@ -63,6 +66,9 @@ public:
const glm::vec3& getEyeOffsetPosition() const { return _eyeOffsetPosition; }
const glm::quat& getEyeOffsetOrientation() const { return _eyeOffsetOrientation; }
float getScale() const { return _scale; }
signals:
void modeUpdated(CameraMode newMode);
private:
@ -100,6 +106,12 @@ public slots:
PickRay computePickRay(float x, float y);
signals:
void modeUpdated(const QString& newMode);
private slots:
void onModeUpdated(CameraMode m);
private:
Camera* _camera;
ViewFrustum* _viewFrustum;

View file

@ -20,7 +20,7 @@
const int MSECS_PER_FRAME_WHEN_THROTTLED = 66;
GLCanvas::GLCanvas() : QGLWidget(QGLFormat(QGL::NoDepthBuffer)),
GLCanvas::GLCanvas() : QGLWidget(QGL::NoDepthBuffer | QGL::NoStencilBuffer),
_throttleRendering(false),
_idleRenderInterval(MSECS_PER_FRAME_WHEN_THROTTLED)
{

View file

@ -38,7 +38,7 @@ Hair::Hair(int strands,
_acceleration(0.0f),
_angularVelocity(0.0f),
_angularAcceleration(0.0f),
_gravity(0.0f),
_gravity(DEFAULT_GRAVITY),
_loudness(0.0f)
{
_hairPosition = new glm::vec3[_strands * _links];
@ -53,7 +53,7 @@ Hair::Hair(int strands,
for (int strand = 0; strand < _strands; strand++) {
float strandAngle = randFloat() * PI;
float azimuth;
float elevation = PI_OVER_TWO - (randFloat() * 0.10f * PI);
float elevation = - (randFloat() * PI);
azimuth = PI_OVER_TWO;
if (randFloat() < 0.5f) {
azimuth *= -1.0f;
@ -92,7 +92,7 @@ Hair::Hair(int strands,
}
}
const float SOUND_THRESHOLD = 50.0f;
const float SOUND_THRESHOLD = 40.0f;
void Hair::simulate(float deltaTime) {
deltaTime = glm::clamp(deltaTime, 0.0f, 1.0f / 30.0f);
@ -121,13 +121,13 @@ void Hair::simulate(float deltaTime) {
(_radius - glm::length(_hairPosition[vertexIndex]));
}
// Add random thing driven by loudness
float loudnessFactor = (_loudness > SOUND_THRESHOLD) ? logf(_loudness - SOUND_THRESHOLD) / 8000.0f : 0.0f;
float loudnessFactor = (_loudness > SOUND_THRESHOLD) ? logf(_loudness - SOUND_THRESHOLD) / 2000.0f : 0.0f;
const float QUIESCENT_LOUDNESS = 0.0f;
_hairPosition[vertexIndex] += randVector() * (QUIESCENT_LOUDNESS + loudnessFactor) * ((float)link / (float)_links);
// Add gravity
const float SCALE_GRAVITY = 0.10f;
const float SCALE_GRAVITY = 0.001f;
_hairPosition[vertexIndex] += _gravity * deltaTime * SCALE_GRAVITY;
// Add linear acceleration

View file

@ -25,9 +25,10 @@ const int HAIR_CONSTRAINTS = 2;
const int DEFAULT_HAIR_STRANDS = 20;
const int DEFAULT_HAIR_LINKS = 10;
const float DEFAULT_HAIR_RADIUS = 0.15f;
const float DEFAULT_HAIR_LINK_LENGTH = 0.07f;
const float DEFAULT_HAIR_RADIUS = 0.075f;
const float DEFAULT_HAIR_LINK_LENGTH = 0.06f;
const float DEFAULT_HAIR_THICKNESS = 0.025f;
const glm::vec3 DEFAULT_GRAVITY(0.0f, -9.8f, 0.0f);
class Hair {
public:
@ -41,7 +42,6 @@ public:
void setAcceleration(const glm::vec3& acceleration) { _acceleration = acceleration; }
void setAngularVelocity(const glm::vec3& angularVelocity) { _angularVelocity = angularVelocity; }
void setAngularAcceleration(const glm::vec3& angularAcceleration) { _angularAcceleration = angularAcceleration; }
void setGravity(const glm::vec3& gravity) { _gravity = gravity; }
void setLoudness(const float loudness) { _loudness = loudness; }
private:

View file

@ -105,6 +105,7 @@ Menu::Menu() :
_maxVoxels(DEFAULT_MAX_VOXELS_PER_SYSTEM),
_voxelSizeScale(DEFAULT_OCTREE_SIZE_SCALE),
_oculusUIAngularSize(DEFAULT_OCULUS_UI_ANGULAR_SIZE),
_oculusUIMaxFPS(DEFAULT_OCULUS_UI_MAX_FPS),
_sixenseReticleMoveSpeed(DEFAULT_SIXENSE_RETICLE_MOVE_SPEED),
_invertSixenseButtons(DEFAULT_INVERT_SIXENSE_MOUSE_BUTTONS),
_automaticAvatarLOD(true),
@ -373,6 +374,24 @@ Menu::Menu() :
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::SimpleShadows, 0, false));
shadowGroup->addAction(addCheckableActionToQMenuAndActionHash(shadowMenu, MenuOption::CascadedShadows, 0, false));
{
QMenu* framerateMenu = renderOptionsMenu->addMenu(MenuOption::RenderTargetFramerate);
QActionGroup* framerateGroup = new QActionGroup(framerateMenu);
framerateGroup->addAction(addCheckableActionToQMenuAndActionHash(framerateMenu, MenuOption::RenderTargetFramerateUnlimited, 0, true));
framerateGroup->addAction(addCheckableActionToQMenuAndActionHash(framerateMenu, MenuOption::RenderTargetFramerate60, 0, false));
framerateGroup->addAction(addCheckableActionToQMenuAndActionHash(framerateMenu, MenuOption::RenderTargetFramerate50, 0, false));
framerateGroup->addAction(addCheckableActionToQMenuAndActionHash(framerateMenu, MenuOption::RenderTargetFramerate40, 0, false));
framerateGroup->addAction(addCheckableActionToQMenuAndActionHash(framerateMenu, MenuOption::RenderTargetFramerate30, 0, false));
connect(framerateMenu, SIGNAL(triggered(QAction*)), this, SLOT(changeRenderTargetFramerate(QAction*)));
#if defined(Q_OS_MAC)
#else
QAction* vsyncAction = addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true, this, SLOT(changeVSync()));
#endif
}
QMenu* resolutionMenu = renderOptionsMenu->addMenu(MenuOption::RenderResolution);
QActionGroup* resolutionGroup = new QActionGroup(resolutionMenu);
resolutionGroup->addAction(addCheckableActionToQMenuAndActionHash(resolutionMenu, MenuOption::RenderResolutionOne, 0, false));
@ -432,6 +451,8 @@ Menu::Menu() :
QMenu* metavoxelOptionsMenu = developerMenu->addMenu("Metavoxels");
addCheckableActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::DisplayHermiteData, 0, false,
Application::getInstance()->getMetavoxels(), SLOT(refreshVoxelData()));
addCheckableActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::RenderHeightfields, 0, true);
addCheckableActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::RenderDualContourSurfaces, 0, true);
addActionToQMenuAndActionHash(metavoxelOptionsMenu, MenuOption::NetworkSimulator, 0, this,
SLOT(showMetavoxelNetworkSimulator()));
@ -762,6 +783,8 @@ void Menu::loadSettings(QSettings* settings) {
settings->endGroup();
_walletPrivateKey = settings->value("privateKey").toByteArray();
_oculusUIMaxFPS = loadSetting(settings, "oculusUIMaxFPS", 0.0f);
scanMenuBar(&loadAction, settings);
Application::getInstance()->getAvatar()->loadData(settings);
@ -823,6 +846,9 @@ void Menu::saveSettings(QSettings* settings) {
settings->setValue("viewFrustumOffsetUp", _viewFrustumOffset.up);
settings->endGroup();
settings->setValue("privateKey", _walletPrivateKey);
// Oculus Rift settings
settings->setValue("oculusUIMaxFPS", _oculusUIMaxFPS);
scanMenuBar(&saveAction, settings);
Application::getInstance()->getAvatar()->saveData(settings);
@ -1233,6 +1259,33 @@ void Menu::muteEnvironment() {
free(packet);
}
void Menu::changeVSync() {
Application::getInstance()->setRenderTargetFramerate(
Application::getInstance()->getRenderTargetFramerate(),
isOptionChecked(MenuOption::RenderTargetFramerateVSyncOn));
}
void Menu::changeRenderTargetFramerate(QAction* action) {
bool vsynOn = Application::getInstance()->isVSyncOn();
unsigned int framerate = Application::getInstance()->getRenderTargetFramerate();
QString text = action->text();
if (text == MenuOption::RenderTargetFramerateUnlimited) {
Application::getInstance()->setRenderTargetFramerate(0, vsynOn);
}
else if (text == MenuOption::RenderTargetFramerate60) {
Application::getInstance()->setRenderTargetFramerate(60, vsynOn);
}
else if (text == MenuOption::RenderTargetFramerate50) {
Application::getInstance()->setRenderTargetFramerate(50, vsynOn);
}
else if (text == MenuOption::RenderTargetFramerate40) {
Application::getInstance()->setRenderTargetFramerate(40, vsynOn);
}
else if (text == MenuOption::RenderTargetFramerate30) {
Application::getInstance()->setRenderTargetFramerate(30, vsynOn);
}
}
void Menu::changeRenderResolution(QAction* action) {
QString text = action->text();
if (text == MenuOption::RenderResolutionOne) {

View file

@ -100,6 +100,8 @@ public:
void setRealWorldFieldOfView(float realWorldFieldOfView) { _realWorldFieldOfView = realWorldFieldOfView; bumpSettings(); }
float getOculusUIAngularSize() const { return _oculusUIAngularSize; }
void setOculusUIAngularSize(float oculusUIAngularSize) { _oculusUIAngularSize = oculusUIAngularSize; bumpSettings(); }
int getOculusUIMaxFPS() const { return _oculusUIMaxFPS; }
void setOculusUIMaxFPS(int oculusUIMaxFPS) { _oculusUIMaxFPS = oculusUIMaxFPS; bumpSettings(); }
float getSixenseReticleMoveSpeed() const { return _sixenseReticleMoveSpeed; }
void setSixenseReticleMoveSpeed(float sixenseReticleMoveSpeed) { _sixenseReticleMoveSpeed = sixenseReticleMoveSpeed; bumpSettings(); }
bool getInvertSixenseButtons() const { return _invertSixenseButtons; }
@ -229,6 +231,8 @@ private slots:
void displayAddressOfflineMessage();
void displayAddressNotFoundMessage();
void muteEnvironment();
void changeRenderTargetFramerate(QAction* action);
void changeVSync();
void changeRenderResolution(QAction* action);
private:
@ -290,6 +294,7 @@ private:
int _maxVoxels;
float _voxelSizeScale;
float _oculusUIAngularSize;
int _oculusUIMaxFPS;
float _sixenseReticleMoveSpeed;
bool _invertSixenseButtons;
bool _automaticAvatarLOD;
@ -446,10 +451,20 @@ namespace MenuOption {
const QString Quit = "Quit";
const QString ReloadAllScripts = "Reload All Scripts";
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderDualContourSurfaces = "Render Dual Contour Surfaces";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
const QString RenderHeightfields = "Render Heightfields";
const QString RenderLookAtVectors = "Show Look-at Vectors";
const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes";
const QString RenderTargetFramerate = "Framerate";
const QString RenderTargetFramerateUnlimited = "Unlimited";
const QString RenderTargetFramerate60 = "60";
const QString RenderTargetFramerate50 = "50";
const QString RenderTargetFramerate40 = "40";
const QString RenderTargetFramerate30 = "30";
const QString RenderTargetFramerateVSyncOn = "V-Sync On";
const QString RenderResolution = "Scale Resolution";
const QString RenderResolutionOne = "1";
const QString RenderResolutionTwoThird = "2/3";

View file

@ -48,6 +48,13 @@ MetavoxelSystem::NetworkSimulation::NetworkSimulation(float dropRate, float repe
bandwidthLimit(bandwidthLimit) {
}
MetavoxelSystem::~MetavoxelSystem() {
// kill the updater before we delete our network simulation objects
_updater->thread()->quit();
_updater->thread()->wait();
_updater = NULL;
}
void MetavoxelSystem::init() {
MetavoxelClientManager::init();
DefaultMetavoxelRendererImplementation::init();
@ -110,11 +117,9 @@ int SimulateVisitor::visit(MetavoxelInfo& info) {
void MetavoxelSystem::simulate(float deltaTime) {
// update the lod
{
// the LOD threshold is temporarily tied to the avatar LOD parameter
QWriteLocker locker(&_lodLock);
const float BASE_LOD_THRESHOLD = 0.01f;
_lod = MetavoxelLOD(Application::getInstance()->getCamera()->getPosition(),
BASE_LOD_THRESHOLD * Menu::getInstance()->getAvatarLODDistanceMultiplier());
const float DEFAULT_LOD_THRESHOLD = 0.01f;
_lod = MetavoxelLOD(Application::getInstance()->getCamera()->getPosition(), DEFAULT_LOD_THRESHOLD);
}
SimulateVisitor simulateVisitor(deltaTime, getLOD());
@ -2772,40 +2777,39 @@ void DefaultMetavoxelRendererImplementation::render(MetavoxelData& data, Metavox
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
_baseHeightfieldProgram.bind();
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
BufferRenderVisitor heightfieldRenderVisitor(Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute());
data.guide(heightfieldRenderVisitor);
_baseHeightfieldProgram.release();
glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderHeightfields)) {
_baseHeightfieldProgram.bind();
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
BufferRenderVisitor heightfieldRenderVisitor(Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute());
data.guide(heightfieldRenderVisitor);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
_baseHeightfieldProgram.release();
}
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
_baseVoxelProgram.bind();
BufferRenderVisitor voxelRenderVisitor(Application::getInstance()->getMetavoxels()->getVoxelBufferAttribute());
data.guide(voxelRenderVisitor);
_baseVoxelProgram.release();
if (Menu::getInstance()->isOptionChecked(MenuOption::RenderDualContourSurfaces)) {
glEnableClientState(GL_COLOR_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
_baseVoxelProgram.bind();
BufferRenderVisitor voxelRenderVisitor(Application::getInstance()->getMetavoxels()->getVoxelBufferAttribute());
data.guide(voxelRenderVisitor);
_baseVoxelProgram.release();
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
}
glDisable(GL_ALPHA_TEST);
glDisable(GL_CULL_FACE);
glEnable(GL_BLEND);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_COLOR_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(true, false);
}

View file

@ -43,6 +43,8 @@ public:
int maximumDelay = 0, int bandwidthLimit = 0);
};
virtual ~MetavoxelSystem();
virtual void init();
virtual MetavoxelLOD getLOD();

View file

@ -21,7 +21,7 @@
#include "ScriptsModel.h"
static const QString S3_URL = "https://s3.amazonaws.com/hifi-public";
static const QString S3_URL = "http://s3.amazonaws.com/hifi-public";
static const QString PUBLIC_URL = "http://public.highfidelity.io";
static const QString MODELS_LOCATION = "scripts/";

View file

@ -52,6 +52,7 @@ const float DISPLAYNAME_BACKGROUND_ALPHA = 0.4f;
Avatar::Avatar() :
AvatarData(),
_skeletonModel(this),
_skeletonOffset(0.0f),
_bodyYawDelta(0.0f),
_velocity(0.0f),
_positionDeltaAccumulator(0.0f),
@ -191,7 +192,6 @@ void Avatar::simulate(float deltaTime) {
_hair.setAcceleration(getAcceleration() * getHead()->getFinalOrientationInWorldFrame());
_hair.setAngularVelocity((getAngularVelocity() + getHead()->getAngularVelocity()) * getHead()->getFinalOrientationInWorldFrame());
_hair.setAngularAcceleration(getAngularAcceleration() * getHead()->getFinalOrientationInWorldFrame());
_hair.setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()) * getHead()->getFinalOrientationInWorldFrame());
_hair.setLoudness((float) getHeadData()->getAudioLoudness());
_hair.simulate(deltaTime);
}
@ -764,6 +764,20 @@ bool Avatar::findCollisions(const QVector<const Shape*>& shapes, CollisionList&
return collided;
}
void Avatar::setSkeletonOffset(const glm::vec3& offset) {
const float MAX_OFFSET_LENGTH = _scale * 0.5f;
float offsetLength = glm::length(offset);
if (offsetLength > MAX_OFFSET_LENGTH) {
_skeletonOffset = (MAX_OFFSET_LENGTH / offsetLength) * offset;
} else {
_skeletonOffset = offset;
}
}
glm::vec3 Avatar::getSkeletonPosition() const {
return _position + _skeletonOffset;
}
QVector<glm::quat> Avatar::getJointRotations() const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointRotations();

View file

@ -69,6 +69,7 @@ class Texture;
class Avatar : public AvatarData {
Q_OBJECT
Q_PROPERTY(quint32 collisionGroups READ getCollisionGroups WRITE setCollisionGroups)
Q_PROPERTY(glm::vec3 skeletonOffset READ getSkeletonOffset WRITE setSkeletonOffset)
public:
Avatar();
@ -146,6 +147,10 @@ public:
quint32 getCollisionGroups() const { return _collisionGroups; }
virtual void setCollisionGroups(quint32 collisionGroups) { _collisionGroups = (collisionGroups & VALID_COLLISION_GROUPS); }
Q_INVOKABLE void setSkeletonOffset(const glm::vec3& offset);
Q_INVOKABLE glm::vec3 getSkeletonOffset() { return _skeletonOffset; }
virtual glm::vec3 getSkeletonPosition() const;
Q_INVOKABLE glm::vec3 getJointPosition(int index) const;
Q_INVOKABLE glm::vec3 getJointPosition(const QString& name) const;
@ -184,6 +189,7 @@ signals:
protected:
Hair _hair;
SkeletonModel _skeletonModel;
glm::vec3 _skeletonOffset;
QVector<Model*> _attachmentModels;
float _bodyYawDelta;

View file

@ -35,6 +35,9 @@ Head::Head(Avatar* owningAvatar) :
_longTermAverageLoudness(-1.0f),
_audioAttack(0.0f),
_audioJawOpen(0.0f),
_mouth2(0.0f),
_mouth3(0.0f),
_mouth4(0.0f),
_angularVelocity(0,0,0),
_renderLookatVectors(false),
_saccade(0.0f, 0.0f, 0.0f),
@ -166,6 +169,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
// use data to update fake Faceshift blendshape coefficients
const float JAW_OPEN_SCALE = 0.015f;
const float JAW_OPEN_RATE = 0.9f;
const float JAW_CLOSE_RATE = 0.90f;
@ -177,10 +181,28 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
_audioJawOpen = glm::clamp(_audioJawOpen, 0.0f, 1.0f);
// _mouth2 = "mmmm" shape
// _mouth3 = "funnel" shape
// _mouth4 = "smile" shape
const float FUNNEL_PERIOD = 0.985f;
const float FUNNEL_RANDOM_PERIOD = 0.01f;
const float MMMM_POWER = 0.25f;
const float MMMM_PERIOD = 0.91f;
const float MMMM_RANDOM_PERIOD = 0.15f;
const float SMILE_PERIOD = 0.925f;
const float SMILE_RANDOM_PERIOD = 0.05f;
_mouth3 = glm::mix(_audioJawOpen, _mouth3, FUNNEL_PERIOD + randFloat() * FUNNEL_RANDOM_PERIOD);
_mouth2 = glm::mix(_audioJawOpen * MMMM_POWER, _mouth2, MMMM_PERIOD + randFloat() * MMMM_RANDOM_PERIOD);
_mouth4 = glm::mix(_audioJawOpen, _mouth4, SMILE_PERIOD + randFloat() * SMILE_RANDOM_PERIOD);
Application::getInstance()->getFaceshift()->updateFakeCoefficients(_leftEyeBlink,
_rightEyeBlink,
_browAudioLift,
_audioJawOpen,
_mouth2,
_mouth3,
_mouth4,
_blendshapeCoefficients);
}

View file

@ -129,6 +129,9 @@ private:
float _longTermAverageLoudness;
float _audioAttack;
float _audioJawOpen;
float _mouth2;
float _mouth3;
float _mouth4;
glm::vec3 _angularVelocity;
bool _renderLookatVectors;
glm::vec3 _saccade;

View file

@ -108,6 +108,20 @@ MyAvatar::~MyAvatar() {
_lookAtTargetAvatar.clear();
}
QByteArray MyAvatar::toByteArray() {
CameraMode mode = Application::getInstance()->getCamera()->getMode();
if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_INDEPENDENT) {
// fake the avatar position that is sent up to the AvatarMixer
glm::vec3 oldPosition = _position;
_position += _skeletonOffset;
QByteArray array = AvatarData::toByteArray();
// copy the correct position back
_position = oldPosition;
return array;
}
return AvatarData::toByteArray();
}
void MyAvatar::reset() {
_skeletonModel.reset();
getHead()->reset();
@ -217,7 +231,6 @@ void MyAvatar::simulate(float deltaTime) {
_hair.setAcceleration(getAcceleration() * getHead()->getFinalOrientationInWorldFrame());
_hair.setAngularVelocity((getAngularVelocity() + getHead()->getAngularVelocity()) * getHead()->getFinalOrientationInWorldFrame());
_hair.setAngularAcceleration(getAngularAcceleration() * getHead()->getFinalOrientationInWorldFrame());
_hair.setGravity(Application::getInstance()->getEnvironment()->getGravity(getPosition()) * getHead()->getFinalOrientationInWorldFrame());
_hair.setLoudness((float)getHeadData()->getAudioLoudness());
_hair.simulate(deltaTime);
}
@ -1055,6 +1068,14 @@ void MyAvatar::setAttachmentData(const QVector<AttachmentData>& attachmentData)
_billboardValid = false;
}
glm::vec3 MyAvatar::getSkeletonPosition() const {
CameraMode mode = Application::getInstance()->getCamera()->getMode();
if (mode == CAMERA_MODE_THIRD_PERSON || mode == CAMERA_MODE_INDEPENDENT) {
return Avatar::getSkeletonPosition();
}
return Avatar::getPosition();
}
QString MyAvatar::getScriptedMotorFrame() const {
QString frame = "avatar";
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {

View file

@ -41,7 +41,8 @@ class MyAvatar : public Avatar {
public:
MyAvatar();
~MyAvatar();
QByteArray toByteArray();
void reset();
void update(float deltaTime);
void simulate(float deltaTime);
@ -134,6 +135,8 @@ public:
virtual void setFaceModelURL(const QUrl& faceModelURL);
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
virtual glm::vec3 getSkeletonPosition() const;
void clearJointAnimationPriorities();

View file

@ -77,7 +77,7 @@ const float PALM_PRIORITY = DEFAULT_PRIORITY;
const float LEAN_PRIORITY = DEFAULT_PRIORITY;
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getPosition());
setTranslation(_owningAvatar->getSkeletonPosition());
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
setRotation(_owningAvatar->getOrientation() * refOrientation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale() * MODEL_SCALE);

View file

@ -125,8 +125,14 @@ void Faceshift::reset() {
}
void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
float jawOpen, QVector<float>& coefficients) const {
coefficients.resize(max((int)coefficients.size(), _jawOpenIndex + 1));
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) const {
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
coefficients.resize(max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
qFill(coefficients.begin(), coefficients.end(), 0.0f);
coefficients[_leftBlinkIndex] = leftBlink;
coefficients[_rightBlinkIndex] = rightBlink;
@ -134,6 +140,9 @@ void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float
coefficients[_browUpLeftIndex] = browUp;
coefficients[_browUpRightIndex] = browUp;
coefficients[_jawOpenIndex] = jawOpen;
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
coefficients[MMMM_BLENDSHAPE] = mouth2;
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
}
void Faceshift::setTCPEnabled(bool enabled) {

View file

@ -61,8 +61,14 @@ public:
void update();
void reset();
void updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
float jawOpen, QVector<float>& coefficients) const;
void updateFakeCoefficients(float leftBlink,
float rightBlink,
float browUp,
float jawOpen,
float mouth2,
float mouth3,
float mouth4,
QVector<float>& coefficients) const;
signals:

View file

@ -45,9 +45,14 @@ void Joystick::closeJoystick() {
#ifdef HAVE_SDL2
void Joystick::handleAxisEvent(const SDL_ControllerAxisEvent& event) {
if (_axes.size() <= event.axis) {
_axes.resize(event.axis + 1);
}
float oldValue = _axes[event.axis];
float newValue = event.value / MAX_AXIS;
_axes[event.axis] = newValue;
emit axisValueChanged(event.axis, newValue, oldValue);
}

View file

@ -66,8 +66,6 @@ glm::vec3 OculusManager::_calibrationPosition;
glm::quat OculusManager::_calibrationOrientation;
quint64 OculusManager::_calibrationStartTime;
int OculusManager::_calibrationMessage = NULL;
QString OculusManager::CALIBRATION_BILLBOARD_URL = "http://hifi-public.s3.amazonaws.com/images/hold-to-calibrate.svg";
float OculusManager::CALIBRATION_BILLBOARD_SCALE = 2.f;
#endif
@ -191,7 +189,7 @@ void OculusManager::disconnect() {
}
#ifdef HAVE_LIBOVR
void OculusManager::positionCalibrationBillboard(BillboardOverlay* billboard) {
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
glm::quat headOrientation = Application::getInstance()->getAvatar()->getHeadOrientation();
headOrientation.x = 0;
headOrientation.z = 0;
@ -204,8 +202,9 @@ void OculusManager::positionCalibrationBillboard(BillboardOverlay* billboard) {
#ifdef HAVE_LIBOVR
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
static QString instructionMessage = "Hold still to calibrate";
static QString progressMessage;
static BillboardOverlay* billboard;
static Text3DOverlay* billboard;
switch (_calibrationState) {
@ -235,9 +234,13 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
if (!_calibrationMessage) {
qDebug() << "Hold still to calibrate HMD";
billboard = new BillboardOverlay();
billboard->setURL(CALIBRATION_BILLBOARD_URL);
billboard->setScale(CALIBRATION_BILLBOARD_SCALE);
billboard = new Text3DOverlay();
billboard->setDimensions(glm::vec2(2.0f, 1.25f));
billboard->setTopMargin(0.35f);
billboard->setLeftMargin(0.28f);
billboard->setText(instructionMessage);
billboard->setAlpha(0.5f);
billboard->setLineHeight(0.1f);
billboard->setIsFacingAvatar(false);
positionCalibrationBillboard(billboard);
@ -275,7 +278,7 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
} else {
progressMessage += ".";
}
//qDebug() << progressMessage; // Progress message ready for 3D text overlays.
billboard->setText(instructionMessage + "\n\n" + progressMessage);
}
}
} else {

View file

@ -18,9 +18,10 @@
#endif
#include "renderer/ProgramObject.h"
#include "ui/overlays/BillboardOverlay.h"
#include "ui/overlays/Text3DOverlay.h"
const float DEFAULT_OCULUS_UI_ANGULAR_SIZE = 72.0f;
const int DEFAULT_OCULUS_UI_MAX_FPS = 75;
class Camera;
class PalmData;
@ -111,7 +112,7 @@ private:
WAITING_FOR_ZERO_HELD,
CALIBRATED
};
static void positionCalibrationBillboard(BillboardOverlay* billboard);
static void positionCalibrationBillboard(Text3DOverlay* message);
static float CALIBRATION_DELTA_MINIMUM_LENGTH;
static float CALIBRATION_DELTA_MINIMUM_ANGLE;
static float CALIBRATION_ZERO_MAXIMUM_LENGTH;
@ -123,8 +124,6 @@ private:
static glm::quat _calibrationOrientation;
static quint64 _calibrationStartTime;
static int _calibrationMessage;
static QString CALIBRATION_BILLBOARD_URL;
static float CALIBRATION_BILLBOARD_SCALE;
#endif

754
interface/src/gpu/Batch.cpp Normal file
View file

@ -0,0 +1,754 @@
//
// Batch.cpp
// interface/src/gpu
//
// Created by Sam Gateau on 10/14/2014.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Batch.h"
#include <QDebug>
#define ADD_COMMAND(call) _commands.push_back(COMMAND_##call); _commandCalls.push_back(&gpu::Batch::do_##call); _commandOffsets.push_back(_params.size());
//#define DO_IT_NOW(call, offset) runLastCommand();
#define DO_IT_NOW(call, offset)
//#define CHECK_GL_ERROR() ::gpu::backend::checkGLError()
#define CHECK_GL_ERROR()
using namespace gpu;
Batch::Batch() :
_commands(),
_commandCalls(),
_commandOffsets(),
_params(),
_resources(),
_data(){
}
Batch::~Batch() {
}
void Batch::clear() {
_commands.clear();
_commandCalls.clear();
_commandOffsets.clear();
_params.clear();
_resources.clear();
_data.clear();
}
uint32 Batch::cacheResource(Resource* res) {
uint32 offset = _resources.size();
_resources.push_back(ResourceCache(res));
return offset;
}
uint32 Batch::cacheResource(const void* pointer) {
uint32 offset = _resources.size();
_resources.push_back(ResourceCache(pointer));
return offset;
}
uint32 Batch::cacheData(uint32 size, const void* data) {
uint32 offset = _data.size();
uint32 nbBytes = size;
_data.resize(offset + nbBytes);
memcpy(_data.data() + offset, data, size);
return offset;
}
#define CASE_COMMAND(call) case COMMAND_##call: { do_##call(offset); } break;
void Batch::runCommand(Command com, uint32 offset) {
switch (com) {
CASE_COMMAND(draw);
CASE_COMMAND(drawIndexed);
CASE_COMMAND(drawInstanced);
CASE_COMMAND(drawIndexedInstanced);
CASE_COMMAND(glEnable);
CASE_COMMAND(glDisable);
CASE_COMMAND(glEnableClientState);
CASE_COMMAND(glDisableClientState);
CASE_COMMAND(glCullFace);
CASE_COMMAND(glAlphaFunc);
CASE_COMMAND(glDepthFunc);
CASE_COMMAND(glDepthMask);
CASE_COMMAND(glDepthRange);
CASE_COMMAND(glBindBuffer);
CASE_COMMAND(glBindTexture);
CASE_COMMAND(glActiveTexture);
CASE_COMMAND(glDrawBuffers);
CASE_COMMAND(glUseProgram);
CASE_COMMAND(glUniform1f);
CASE_COMMAND(glUniformMatrix4fv);
CASE_COMMAND(glMatrixMode);
CASE_COMMAND(glPushMatrix);
CASE_COMMAND(glPopMatrix);
CASE_COMMAND(glMultMatrixf);
CASE_COMMAND(glLoadMatrixf);
CASE_COMMAND(glLoadIdentity);
CASE_COMMAND(glRotatef);
CASE_COMMAND(glScalef);
CASE_COMMAND(glTranslatef);
CASE_COMMAND(glDrawArrays);
CASE_COMMAND(glDrawRangeElements);
CASE_COMMAND(glColorPointer);
CASE_COMMAND(glNormalPointer);
CASE_COMMAND(glTexCoordPointer);
CASE_COMMAND(glVertexPointer);
CASE_COMMAND(glVertexAttribPointer);
CASE_COMMAND(glEnableVertexAttribArray);
CASE_COMMAND(glDisableVertexAttribArray);
CASE_COMMAND(glColor4f);
CASE_COMMAND(glMaterialf);
CASE_COMMAND(glMaterialfv);
}
}
void Batch::draw(Primitive primitiveType, int nbVertices, int startVertex) {
ADD_COMMAND(draw);
_params.push_back(startVertex);
_params.push_back(nbVertices);
_params.push_back(primitiveType);
}
void Batch::drawIndexed(Primitive primitiveType, int nbIndices, int startIndex) {
ADD_COMMAND(drawIndexed);
_params.push_back(startIndex);
_params.push_back(nbIndices);
_params.push_back(primitiveType);
}
void Batch::drawInstanced(uint32 nbInstances, Primitive primitiveType, int nbVertices, int startVertex, int startInstance) {
ADD_COMMAND(drawInstanced);
_params.push_back(startInstance);
_params.push_back(startVertex);
_params.push_back(nbVertices);
_params.push_back(primitiveType);
_params.push_back(nbInstances);
}
void Batch::drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, int nbIndices, int startIndex, int startInstance) {
ADD_COMMAND(drawIndexedInstanced);
_params.push_back(startInstance);
_params.push_back(startIndex);
_params.push_back(nbIndices);
_params.push_back(primitiveType);
_params.push_back(nbInstances);
}
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void Batch::_glEnable(GLenum cap) {
ADD_COMMAND(glEnable);
_params.push_back(cap);
DO_IT_NOW(_glEnable, 1);
}
void Batch::do_glEnable(uint32 paramOffset) {
glEnable(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDisable(GLenum cap) {
ADD_COMMAND(glDisable);
_params.push_back(cap);
DO_IT_NOW(_glDisable, 1);
}
void Batch::do_glDisable(uint32 paramOffset) {
glDisable(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glEnableClientState(GLenum array) {
ADD_COMMAND(glEnableClientState);
_params.push_back(array);
DO_IT_NOW(_glEnableClientState, 1 );
}
void Batch::do_glEnableClientState(uint32 paramOffset) {
glEnableClientState(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDisableClientState(GLenum array) {
ADD_COMMAND(glDisableClientState);
_params.push_back(array);
DO_IT_NOW(_glDisableClientState, 1);
}
void Batch::do_glDisableClientState(uint32 paramOffset) {
glDisableClientState(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glCullFace(GLenum mode) {
ADD_COMMAND(glCullFace);
_params.push_back(mode);
DO_IT_NOW(_glCullFace, 1);
}
void Batch::do_glCullFace(uint32 paramOffset) {
glCullFace(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glAlphaFunc(GLenum func, GLclampf ref) {
ADD_COMMAND(glAlphaFunc);
_params.push_back(ref);
_params.push_back(func);
DO_IT_NOW(_glAlphaFunc, 2);
}
void Batch::do_glAlphaFunc(uint32 paramOffset) {
glAlphaFunc(
_params[paramOffset + 1]._uint,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glDepthFunc(GLenum func) {
ADD_COMMAND(glDepthFunc);
_params.push_back(func);
DO_IT_NOW(_glDepthFunc, 1);
}
void Batch::do_glDepthFunc(uint32 paramOffset) {
glDepthFunc(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDepthMask(GLboolean flag) {
ADD_COMMAND(glDepthMask);
_params.push_back(flag);
DO_IT_NOW(_glDepthMask, 1);
}
void Batch::do_glDepthMask(uint32 paramOffset) {
glDepthMask(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDepthRange(GLclampd zNear, GLclampd zFar) {
ADD_COMMAND(glDepthRange);
_params.push_back(zFar);
_params.push_back(zNear);
DO_IT_NOW(_glDepthRange, 2);
}
void Batch::do_glDepthRange(uint32 paramOffset) {
glDepthRange(
_params[paramOffset + 1]._double,
_params[paramOffset + 0]._double);
CHECK_GL_ERROR();
}
void Batch::_glBindBuffer(GLenum target, GLuint buffer) {
ADD_COMMAND(glBindBuffer);
_params.push_back(buffer);
_params.push_back(target);
DO_IT_NOW(_glBindBuffer, 2);
}
void Batch::do_glBindBuffer(uint32 paramOffset) {
glBindBuffer(
_params[paramOffset + 1]._uint,
_params[paramOffset + 0]._uint);
CHECK_GL_ERROR();
}
void Batch::_glBindTexture(GLenum target, GLuint texture) {
ADD_COMMAND(glBindTexture);
_params.push_back(texture);
_params.push_back(target);
DO_IT_NOW(_glBindTexture, 2);
}
void Batch::do_glBindTexture(uint32 paramOffset) {
glBindTexture(
_params[paramOffset + 1]._uint,
_params[paramOffset + 0]._uint);
CHECK_GL_ERROR();
}
void Batch::_glActiveTexture(GLenum texture) {
ADD_COMMAND(glActiveTexture);
_params.push_back(texture);
DO_IT_NOW(_glActiveTexture, 1);
}
void Batch::do_glActiveTexture(uint32 paramOffset) {
glActiveTexture(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDrawBuffers(GLsizei n, const GLenum* bufs) {
ADD_COMMAND(glDrawBuffers);
_params.push_back(cacheData(n * sizeof(GLenum), bufs));
_params.push_back(n);
DO_IT_NOW(_glDrawBuffers, 2);
}
void Batch::do_glDrawBuffers(uint32 paramOffset) {
glDrawBuffers(
_params[paramOffset + 1]._uint,
(const GLenum*) editData(_params[paramOffset + 0]._uint));
CHECK_GL_ERROR();
}
void Batch::_glUseProgram(GLuint program) {
ADD_COMMAND(glUseProgram);
_params.push_back(program);
DO_IT_NOW(_glUseProgram, 1);
}
void Batch::do_glUseProgram(uint32 paramOffset) {
glUseProgram(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glUniform1f(GLint location, GLfloat v0) {
ADD_COMMAND(glUniform1f);
_params.push_back(v0);
_params.push_back(location);
DO_IT_NOW(_glUniform1f, 1);
}
void Batch::do_glUniform1f(uint32 paramOffset) {
glUniform1f(
_params[paramOffset + 1]._int,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glUniformMatrix4fv(GLint location, GLsizei count, GLboolean transpose, const GLfloat* value) {
ADD_COMMAND(glUniformMatrix4fv);
const int MATRIX4_SIZE = 16 * sizeof(float);
_params.push_back(cacheData(count * MATRIX4_SIZE, value));
_params.push_back(transpose);
_params.push_back(count);
_params.push_back(location);
DO_IT_NOW(_glUniformMatrix4fv, 4);
}
void Batch::do_glUniformMatrix4fv(uint32 paramOffset) {
glUniformMatrix4fv(
_params[paramOffset + 3]._int,
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._uint,
(const GLfloat*) editData(_params[paramOffset + 0]._uint));
CHECK_GL_ERROR();
}
void Batch::_glMatrixMode(GLenum mode) {
ADD_COMMAND(glMatrixMode);
_params.push_back(mode);
DO_IT_NOW(_glMatrixMode, 1);
}
void Batch::do_glMatrixMode(uint32 paramOffset) {
glMatrixMode(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glPushMatrix() {
ADD_COMMAND(glPushMatrix);
DO_IT_NOW(_glPushMatrix, 0);
}
void Batch::do_glPushMatrix(uint32 paramOffset) {
glPushMatrix();
CHECK_GL_ERROR();
}
void Batch::_glPopMatrix() {
ADD_COMMAND(glPopMatrix);
DO_IT_NOW(_glPopMatrix, 0);
}
void Batch::do_glPopMatrix(uint32 paramOffset) {
glPopMatrix();
CHECK_GL_ERROR();
}
void Batch::_glMultMatrixf(const GLfloat *m) {
ADD_COMMAND(glMultMatrixf);
const int MATRIX4_SIZE = 16 * sizeof(float);
_params.push_back(cacheData(MATRIX4_SIZE, m));
DO_IT_NOW(_glMultMatrixf, 1);
}
void Batch::do_glMultMatrixf(uint32 paramOffset) {
glMultMatrixf((const GLfloat*) editData(_params[paramOffset]._uint));
CHECK_GL_ERROR();
}
void Batch::_glLoadMatrixf(const GLfloat *m) {
ADD_COMMAND(glLoadMatrixf);
const int MATRIX4_SIZE = 16 * sizeof(float);
_params.push_back(cacheData(MATRIX4_SIZE, m));
DO_IT_NOW(_glLoadMatrixf, 1);
}
void Batch::do_glLoadMatrixf(uint32 paramOffset) {
glLoadMatrixf((const GLfloat*)editData(_params[paramOffset]._uint));
CHECK_GL_ERROR();
}
void Batch::_glLoadIdentity(void) {
ADD_COMMAND(glLoadIdentity);
DO_IT_NOW(_glLoadIdentity, 0);
}
void Batch::do_glLoadIdentity(uint32 paramOffset) {
glLoadIdentity();
CHECK_GL_ERROR();
}
void Batch::_glRotatef(GLfloat angle, GLfloat x, GLfloat y, GLfloat z) {
ADD_COMMAND(glRotatef);
_params.push_back(z);
_params.push_back(y);
_params.push_back(x);
_params.push_back(angle);
DO_IT_NOW(_glRotatef, 4);
}
void Batch::do_glRotatef(uint32 paramOffset) {
glRotatef(
_params[paramOffset + 3]._float,
_params[paramOffset + 2]._float,
_params[paramOffset + 1]._float,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glScalef(GLfloat x, GLfloat y, GLfloat z) {
ADD_COMMAND(glScalef);
_params.push_back(z);
_params.push_back(y);
_params.push_back(x);
DO_IT_NOW(_glScalef, 3);
}
void Batch::do_glScalef(uint32 paramOffset) {
glScalef(
_params[paramOffset + 2]._float,
_params[paramOffset + 1]._float,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glTranslatef(GLfloat x, GLfloat y, GLfloat z) {
ADD_COMMAND(glTranslatef);
_params.push_back(z);
_params.push_back(y);
_params.push_back(x);
DO_IT_NOW(_glTranslatef, 3);
}
void Batch::do_glTranslatef(uint32 paramOffset) {
glTranslatef(
_params[paramOffset + 2]._float,
_params[paramOffset + 1]._float,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glDrawArrays(GLenum mode, GLint first, GLsizei count) {
ADD_COMMAND(glDrawArrays);
_params.push_back(count);
_params.push_back(first);
_params.push_back(mode);
DO_IT_NOW(_glDrawArrays, 3);
}
void Batch::do_glDrawArrays(uint32 paramOffset) {
glDrawArrays(
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
_params[paramOffset + 0]._int);
CHECK_GL_ERROR();
}
void Batch::_glDrawRangeElements(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices) {
ADD_COMMAND(glDrawRangeElements);
_params.push_back(cacheResource(indices));
_params.push_back(type);
_params.push_back(count);
_params.push_back(end);
_params.push_back(start);
_params.push_back(mode);
DO_IT_NOW(_glDrawRangeElements, 6);
}
void Batch::do_glDrawRangeElements(uint32 paramOffset) {
glDrawRangeElements(
_params[paramOffset + 5]._uint,
_params[paramOffset + 4]._uint,
_params[paramOffset + 3]._uint,
_params[paramOffset + 2]._int,
_params[paramOffset + 1]._uint,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glColorPointer(GLint size, GLenum type, GLsizei stride, const void *pointer) {
ADD_COMMAND(glColorPointer);
_params.push_back(cacheResource(pointer));
_params.push_back(stride);
_params.push_back(type);
_params.push_back(size);
DO_IT_NOW(_glColorPointer, 4);
}
void Batch::do_glColorPointer(uint32 paramOffset) {
glColorPointer(
_params[paramOffset + 3]._int,
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glNormalPointer(GLenum type, GLsizei stride, const void *pointer) {
ADD_COMMAND(glNormalPointer);
_params.push_back(cacheResource(pointer));
_params.push_back(stride);
_params.push_back(type);
DO_IT_NOW(_glNormalPointer, 3);
}
void Batch::do_glNormalPointer(uint32 paramOffset) {
glNormalPointer(
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glTexCoordPointer(GLint size, GLenum type, GLsizei stride, const void *pointer) {
ADD_COMMAND(glTexCoordPointer);
_params.push_back(cacheResource(pointer));
_params.push_back(stride);
_params.push_back(type);
_params.push_back(size);
DO_IT_NOW(_glTexCoordPointer, 4);
}
void Batch::do_glTexCoordPointer(uint32 paramOffset) {
glTexCoordPointer(
_params[paramOffset + 3]._int,
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glVertexPointer(GLint size, GLenum type, GLsizei stride, const void *pointer) {
ADD_COMMAND(glVertexPointer);
_params.push_back(cacheResource(pointer));
_params.push_back(stride);
_params.push_back(type);
_params.push_back(size);
DO_IT_NOW(_glVertexPointer, 4);
}
void Batch::do_glVertexPointer(uint32 paramOffset) {
glVertexPointer(
_params[paramOffset + 3]._int,
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glVertexAttribPointer(GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer) {
ADD_COMMAND(glVertexAttribPointer);
_params.push_back(cacheResource(pointer));
_params.push_back(stride);
_params.push_back(normalized);
_params.push_back(type);
_params.push_back(size);
_params.push_back(index);
DO_IT_NOW(_glVertexAttribPointer, 6);
}
void Batch::do_glVertexAttribPointer(uint32 paramOffset) {
glVertexAttribPointer(
_params[paramOffset + 5]._uint,
_params[paramOffset + 4]._int,
_params[paramOffset + 3]._uint,
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._int,
editResource(_params[paramOffset + 0]._uint)->_pointer);
CHECK_GL_ERROR();
}
void Batch::_glEnableVertexAttribArray(GLint location) {
ADD_COMMAND(glEnableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glEnableVertexAttribArray, 1);
}
void Batch::do_glEnableVertexAttribArray(uint32 paramOffset) {
glEnableVertexAttribArray(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glDisableVertexAttribArray(GLint location) {
ADD_COMMAND(glDisableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glDisableVertexAttribArray, 1);
}
void Batch::do_glDisableVertexAttribArray(uint32 paramOffset) {
glDisableVertexAttribArray(_params[paramOffset]._uint);
CHECK_GL_ERROR();
}
void Batch::_glColor4f(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha) {
ADD_COMMAND(glColor4f);
_params.push_back(alpha);
_params.push_back(blue);
_params.push_back(green);
_params.push_back(red);
DO_IT_NOW(_glColor4f, 4);
}
void Batch::do_glColor4f(uint32 paramOffset) {
glColor4f(
_params[paramOffset + 3]._float,
_params[paramOffset + 2]._float,
_params[paramOffset + 1]._float,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glMaterialf(GLenum face, GLenum pname, GLfloat param) {
ADD_COMMAND(glMaterialf);
_params.push_back(param);
_params.push_back(pname);
_params.push_back(face);
DO_IT_NOW(_glMaterialf, 3);
}
void Batch::do_glMaterialf(uint32 paramOffset) {
glMaterialf(
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._uint,
_params[paramOffset + 0]._float);
CHECK_GL_ERROR();
}
void Batch::_glMaterialfv(GLenum face, GLenum pname, const GLfloat *params) {
ADD_COMMAND(glMaterialfv);
_params.push_back(cacheData(4 * sizeof(float), params));
_params.push_back(pname);
_params.push_back(face);
DO_IT_NOW(_glMaterialfv, 3);
}
void Batch::do_glMaterialfv(uint32 paramOffset) {
glMaterialfv(
_params[paramOffset + 2]._uint,
_params[paramOffset + 1]._uint,
(const GLfloat*) editData(_params[paramOffset + 0]._uint));
CHECK_GL_ERROR();
}
void backend::renderBatch(Batch& batch) {
uint32 numCommands = batch._commands.size();
Batch::CommandCall* call = batch._commandCalls.data();
Batch::CommandOffsets::value_type* offset = batch._commandOffsets.data();
for (int i = 0; i < numCommands; i++) {
(batch.*(*call))(*offset);
call++;
offset++;
}
}
void backend::checkGLError() {
GLenum error = glGetError();
if (!error) {
return;
} else {
switch (error) {
case GL_INVALID_ENUM:
qDebug() << "An unacceptable value is specified for an enumerated argument.The offending command is ignored and has no other side effect than to set the error flag.";
break;
case GL_INVALID_VALUE:
qDebug() << "A numeric argument is out of range.The offending command is ignored and has no other side effect than to set the error flag";
break;
case GL_INVALID_OPERATION:
qDebug() << "The specified operation is not allowed in the current state.The offending command is ignored and has no other side effect than to set the error flag..";
break;
case GL_INVALID_FRAMEBUFFER_OPERATION:
qDebug() << "The framebuffer object is not complete.The offending command is ignored and has no other side effect than to set the error flag.";
break;
case GL_OUT_OF_MEMORY:
qDebug() << "There is not enough memory left to execute the command.The state of the GL is undefined, except for the state of the error flags, after this error is recorded.";
break;
case GL_STACK_UNDERFLOW:
qDebug() << "An attempt has been made to perform an operation that would cause an internal stack to underflow.";
break;
case GL_STACK_OVERFLOW:
qDebug() << "An attempt has been made to perform an operation that would cause an internal stack to overflow.";
break;
}
}
}

335
interface/src/gpu/Batch.h Normal file
View file

@ -0,0 +1,335 @@
//
// Batch.h
// interface/src/gpu
//
// Created by Sam Gateau on 10/14/2014.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_gpu_Batch_h
#define hifi_gpu_Batch_h
#include <assert.h>
#include "InterfaceConfig.h"
#include <vector>
#if defined(NSIGHT_FOUND)
#include "nvToolsExt.h"
class ProfileRange {
public:
ProfileRange(const char *name) {
nvtxRangePush(name);
}
~ProfileRange() {
nvtxRangePop();
}
};
#define PROFILE_RANGE(name) ProfileRange profileRangeThis(name);
#else
#define PROFILE_RANGE(name)
#endif
namespace gpu {
class Batch;
// TODO: move the backend namespace into dedicated files, for now we keep it close to the gpu objects definition for convenience
namespace backend {
void renderBatch(Batch& batch);
void checkGLError();
};
class Buffer;
class Resource;
typedef int Stamp;
typedef unsigned int uint32;
typedef int int32;
enum Primitive {
PRIMITIVE_POINTS = 0,
PRIMITIVE_LINES,
PRIMITIVE_LINE_STRIP,
PRIMITIVE_TRIANGLES,
PRIMITIVE_TRIANGLE_STRIP,
PRIMITIVE_QUADS,
};
class Batch {
public:
Batch();
Batch(const Batch& batch);
~Batch();
void clear();
void draw(Primitive primitiveType, int nbVertices, int startVertex = 0);
void drawIndexed(Primitive primitiveType, int nbIndices, int startIndex = 0);
void drawInstanced(uint32 nbInstances, Primitive primitiveType, int nbVertices, int startVertex = 0, int startInstance = 0);
void drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, int nbIndices, int startIndex = 0, int startInstance = 0);
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void _glEnable(GLenum cap);
void _glDisable(GLenum cap);
void _glEnableClientState(GLenum array);
void _glDisableClientState(GLenum array);
void _glCullFace(GLenum mode);
void _glAlphaFunc(GLenum func, GLclampf ref);
void _glDepthFunc(GLenum func);
void _glDepthMask(GLboolean flag);
void _glDepthRange(GLclampd zNear, GLclampd zFar);
void _glBindBuffer(GLenum target, GLuint buffer);
void _glBindTexture(GLenum target, GLuint texture);
void _glActiveTexture(GLenum texture);
void _glDrawBuffers(GLsizei n, const GLenum* bufs);
void _glUseProgram(GLuint program);
void _glUniform1f(GLint location, GLfloat v0);
void _glUniformMatrix4fv(GLint location, GLsizei count, GLboolean transpose, const GLfloat* value);
void _glMatrixMode(GLenum mode);
void _glPushMatrix();
void _glPopMatrix();
void _glMultMatrixf(const GLfloat *m);
void _glLoadMatrixf(const GLfloat *m);
void _glLoadIdentity(void);
void _glRotatef(GLfloat angle, GLfloat x, GLfloat y, GLfloat z);
void _glScalef(GLfloat x, GLfloat y, GLfloat z);
void _glTranslatef(GLfloat x, GLfloat y, GLfloat z);
void _glDrawArrays(GLenum mode, GLint first, GLsizei count);
void _glDrawRangeElements(GLenum mode, GLuint start, GLuint end, GLsizei count, GLenum type, const void *indices);
void _glColorPointer(GLint size, GLenum type, GLsizei stride, const void *pointer);
void _glNormalPointer(GLenum type, GLsizei stride, const void *pointer);
void _glTexCoordPointer(GLint size, GLenum type, GLsizei stride, const void *pointer);
void _glVertexPointer(GLint size, GLenum type, GLsizei stride, const void *pointer);
void _glVertexAttribPointer(GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void *pointer);
void _glEnableVertexAttribArray(GLint location);
void _glDisableVertexAttribArray(GLint location);
void _glColor4f(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha);
void _glMaterialf(GLenum face, GLenum pname, GLfloat param);
void _glMaterialfv(GLenum face, GLenum pname, const GLfloat *params);
protected:
enum Command {
COMMAND_draw = 0,
COMMAND_drawIndexed,
COMMAND_drawInstanced,
COMMAND_drawIndexedInstanced,
COMMAND_SET_PIPE_STATE,
COMMAND_SET_VIEWPORT,
COMMAND_SET_FRAMEBUFFER,
COMMAND_SET_RESOURCE,
COMMAND_SET_VERTEX_STREAM,
COMMAND_SET_INDEX_STREAM,
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
COMMAND_glEnable,
COMMAND_glDisable,
COMMAND_glEnableClientState,
COMMAND_glDisableClientState,
COMMAND_glCullFace,
COMMAND_glAlphaFunc,
COMMAND_glDepthFunc,
COMMAND_glDepthMask,
COMMAND_glDepthRange,
COMMAND_glBindBuffer,
COMMAND_glBindTexture,
COMMAND_glActiveTexture,
COMMAND_glDrawBuffers,
COMMAND_glUseProgram,
COMMAND_glUniform1f,
COMMAND_glUniformMatrix4fv,
COMMAND_glMatrixMode,
COMMAND_glPushMatrix,
COMMAND_glPopMatrix,
COMMAND_glMultMatrixf,
COMMAND_glLoadMatrixf,
COMMAND_glLoadIdentity,
COMMAND_glRotatef,
COMMAND_glScalef,
COMMAND_glTranslatef,
COMMAND_glDrawArrays,
COMMAND_glDrawRangeElements,
COMMAND_glColorPointer,
COMMAND_glNormalPointer,
COMMAND_glTexCoordPointer,
COMMAND_glVertexPointer,
COMMAND_glVertexAttribPointer,
COMMAND_glEnableVertexAttribArray,
COMMAND_glDisableVertexAttribArray,
COMMAND_glColor4f,
COMMAND_glMaterialf,
COMMAND_glMaterialfv,
};
typedef std::vector<Command> Commands;
typedef void (Batch::*CommandCall)(uint32);
typedef std::vector<CommandCall> CommandCalls;
typedef std::vector<uint32> CommandOffsets;
class Param {
public:
union {
int32 _int;
uint32 _uint;
float _float;
char _chars[4];
double _double;
};
Param(int32 val) : _int(val) {}
Param(uint32 val) : _uint(val) {}
Param(float val) : _float(val) {}
Param(double val) : _double(val) {}
};
typedef std::vector<Param> Params;
class ResourceCache {
public:
union {
Resource* _resource;
const void* _pointer;
};
ResourceCache(Resource* res) : _resource(res) {}
ResourceCache(const void* pointer) : _pointer(pointer) {}
};
typedef std::vector<ResourceCache> Resources;
typedef unsigned char Byte;
typedef std::vector<Byte> Bytes;
Commands _commands;
CommandCalls _commandCalls;
CommandOffsets _commandOffsets;
Params _params;
Resources _resources;
Bytes _data;
uint32 cacheResource(Resource* res);
uint32 cacheResource(const void* pointer);
ResourceCache* editResource(uint32 offset) {
if (offset >= _resources.size())
return 0;
return (_resources.data() + offset);
}
uint32 cacheData(uint32 size, const void* data);
Byte* editData(uint32 offset) {
if (offset >= _data.size())
return 0;
return (_data.data() + offset);
}
void runCommand(uint32 index) {
uint32 offset = _commandOffsets[index];
CommandCall call = _commandCalls[index];
(this->*(call))(offset);
}
void runLastCommand() {
uint32 index = _commands.size() - 1;
runCommand(index);
}
void runCommand(Command com, uint32 offset);
void do_draw(uint32 paramOffset) {}
void do_drawIndexed(uint32 paramOffset) {}
void do_drawInstanced(uint32 paramOffset) {}
void do_drawIndexedInstanced(uint32 paramOffset) {}
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void do_glEnable(uint32 paramOffset);
void do_glDisable(uint32 paramOffset);
void do_glEnableClientState(uint32 paramOffset);
void do_glDisableClientState(uint32 paramOffset);
void do_glCullFace(uint32 paramOffset);
void do_glAlphaFunc(uint32 paramOffset);
void do_glDepthFunc(uint32 paramOffset);
void do_glDepthMask(uint32 paramOffset);
void do_glDepthRange(uint32 paramOffset);
void do_glBindBuffer(uint32 paramOffset);
void do_glBindTexture(uint32 paramOffset);
void do_glActiveTexture(uint32 paramOffset);
void do_glDrawBuffers(uint32 paramOffset);
void do_glUseProgram(uint32 paramOffset);
void do_glUniform1f(uint32 paramOffset);
void do_glUniformMatrix4fv(uint32 paramOffset);
void do_glMatrixMode(uint32 paramOffset);
void do_glPushMatrix(uint32 paramOffset);
void do_glPopMatrix(uint32 paramOffset);
void do_glMultMatrixf(uint32 paramOffset);
void do_glLoadMatrixf(uint32 paramOffset);
void do_glLoadIdentity(uint32 paramOffset);
void do_glRotatef(uint32 paramOffset);
void do_glScalef(uint32 paramOffset);
void do_glTranslatef(uint32 paramOffset);
void do_glDrawArrays(uint32 paramOffset);
void do_glDrawRangeElements(uint32 paramOffset);
void do_glColorPointer(uint32 paramOffset);
void do_glNormalPointer(uint32 paramOffset);
void do_glTexCoordPointer(uint32 paramOffset);
void do_glVertexPointer(uint32 paramOffset);
void do_glVertexAttribPointer(uint32 paramOffset);
void do_glEnableVertexAttribArray(uint32 paramOffset);
void do_glDisableVertexAttribArray(uint32 paramOffset);
void do_glColor4f(uint32 paramOffset);
void do_glMaterialf(uint32 paramOffset);
void do_glMaterialfv(uint32 paramOffset);
friend void backend::renderBatch(Batch& batch);
};
};
#endif

View file

@ -612,6 +612,31 @@ void NetworkGeometry::clearLoadPriority(const QPointer<QObject>& owner) {
}
}
void NetworkGeometry::setTextureWithNameToURL(const QString& name, const QUrl& url) {
for (int i = 0; i < _meshes.size(); i++) {
NetworkMesh& mesh = _meshes[i];
for (int j = 0; j < mesh.parts.size(); j++) {
NetworkMeshPart& part = mesh.parts[j];
QSharedPointer<NetworkTexture> matchingTexture = QSharedPointer<NetworkTexture>();
if (part.diffuseTextureName == name) {
part.diffuseTexture =
Application::getInstance()->getTextureCache()->getTexture(url, DEFAULT_TEXTURE,
_geometry.meshes[i].isEye, QByteArray());
part.diffuseTexture->setLoadPriorities(_loadPriorities);
} else if (part.normalTextureName == name) {
part.normalTexture = Application::getInstance()->getTextureCache()->getTexture(url, DEFAULT_TEXTURE,
false, QByteArray());
part.normalTexture->setLoadPriorities(_loadPriorities);
} else if (part.specularTextureName == name) {
part.specularTexture = Application::getInstance()->getTextureCache()->getTexture(url, DEFAULT_TEXTURE,
false, QByteArray());
part.specularTexture->setLoadPriorities(_loadPriorities);
}
}
}
}
/// Reads geometry in a worker thread.
class GeometryReader : public QRunnable {
public:
@ -727,18 +752,21 @@ void NetworkGeometry::setGeometry(const FBXGeometry& geometry) {
networkPart.diffuseTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.diffuseTexture.filename)), DEFAULT_TEXTURE,
mesh.isEye, part.diffuseTexture.content);
networkPart.diffuseTextureName = part.diffuseTexture.name;
networkPart.diffuseTexture->setLoadPriorities(_loadPriorities);
}
if (!part.normalTexture.filename.isEmpty()) {
networkPart.normalTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.normalTexture.filename)), NORMAL_TEXTURE,
false, part.normalTexture.content);
networkPart.normalTextureName = part.normalTexture.name;
networkPart.normalTexture->setLoadPriorities(_loadPriorities);
}
if (!part.specularTexture.filename.isEmpty()) {
networkPart.specularTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.specularTexture.filename)), SPECULAR_TEXTURE,
false, part.specularTexture.content);
networkPart.specularTextureName = part.specularTexture.name;
networkPart.specularTexture->setLoadPriorities(_loadPriorities);
}
networkMesh.parts.append(networkPart);

View file

@ -107,6 +107,8 @@ public:
virtual void setLoadPriorities(const QHash<QPointer<QObject>, float>& priorities);
virtual void clearLoadPriority(const QPointer<QObject>& owner);
void setTextureWithNameToURL(const QString& name, const QUrl& url);
protected:
virtual void init();
@ -136,10 +138,13 @@ private:
/// The state associated with a single mesh part.
class NetworkMeshPart {
public:
public:
QString diffuseTextureName;
QSharedPointer<NetworkTexture> diffuseTexture;
QString normalTextureName;
QSharedPointer<NetworkTexture> normalTexture;
QString specularTextureName;
QSharedPointer<NetworkTexture> specularTexture;
bool isTranslucent() const;

View file

@ -26,6 +26,10 @@
#include "Application.h"
#include "Model.h"
#include "gpu/Batch.h"
#define GLBATCH( call ) batch._##call
//#define GLBATCH( call ) call
using namespace std;
static int modelPointerTypeId = qRegisterMetaType<QPointer<Model> >();
@ -391,6 +395,7 @@ void Model::setJointStates(QVector<JointState> states) {
}
bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(alpha, mode);
@ -430,97 +435,138 @@ bool Model::render(float alpha, RenderMode mode, RenderArgs* args) {
segregateMeshGroups();
}
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glDisable(GL_COLOR_MATERIAL);
// Let's introduce a gpu::Batch to capture all the calls to the graphics api
gpu::Batch batch;
GLBATCH(glEnableClientState)(GL_VERTEX_ARRAY);
GLBATCH(glEnableClientState)(GL_NORMAL_ARRAY);
GLBATCH(glDisable)(GL_COLOR_MATERIAL);
if (mode == DIFFUSE_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
glDisable(GL_CULL_FACE);
GLBATCH(glDisable)(GL_CULL_FACE);
} else {
glEnable(GL_CULL_FACE);
GLBATCH(glEnable)(GL_CULL_FACE);
if (mode == SHADOW_RENDER_MODE) {
glCullFace(GL_FRONT);
GLBATCH(glCullFace)(GL_FRONT);
}
}
// render opaque meshes with alpha testing
glDisable(GL_BLEND);
glEnable(GL_ALPHA_TEST);
GLBATCH(glDisable)(GL_BLEND);
GLBATCH(glEnable)(GL_ALPHA_TEST);
if (mode == SHADOW_RENDER_MODE) {
glAlphaFunc(GL_EQUAL, 0.0f);
GLBATCH(glAlphaFunc)(GL_EQUAL, 0.0f);
}
Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(
/*Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(
mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE,
mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE,
mode == DEFAULT_RENDER_MODE);
*/
{
GLenum buffers[3];
int bufferCount = 0;
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
}
if (mode == DEFAULT_RENDER_MODE || mode == NORMAL_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
}
if (mode == DEFAULT_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
}
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
const float DEFAULT_ALPHA_THRESHOLD = 0.5f;
//renderMeshes(RenderMode mode, bool translucent, float alphaThreshold, bool hasTangents, bool hasSpecular, book isSkinned, args);
int opaqueMeshPartsRendered = 0;
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, false, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, true, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, false, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, true, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, false, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, true, true, false, args);
opaqueMeshPartsRendered += renderMeshes(mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, false, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, true, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, false, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, true, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, false, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, true, false, args);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, args);
// render translucent meshes afterwards
Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(false, true, true);
//Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(false, true, true);
{
GLenum buffers[2];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
int translucentMeshPartsRendered = 0;
const float MOSTLY_OPAQUE_THRESHOLD = 0.75f;
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, true, true, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
glDisable(GL_ALPHA_TEST);
glEnable(GL_BLEND);
glDepthMask(false);
glDepthFunc(GL_LEQUAL);
Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, true, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, true, false, true, args);
GLBATCH(glDisable)(GL_ALPHA_TEST);
GLBATCH(glEnable)(GL_BLEND);
GLBATCH(glDepthMask)(false);
GLBATCH(glDepthFunc)(GL_LEQUAL);
//Application::getInstance()->getTextureCache()->setPrimaryDrawBuffers(true);
{
GLenum buffers[1];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
if (mode == DEFAULT_RENDER_MODE || mode == DIFFUSE_RENDER_MODE) {
const float MOSTLY_TRANSPARENT_THRESHOLD = 0.0f;
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, true, false, args);
translucentMeshPartsRendered += renderMeshes(mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, true, false, args);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, true, false, true, args);
}
glDepthMask(true);
glDepthFunc(GL_LESS);
glDisable(GL_CULL_FACE);
GLBATCH(glDepthMask)(true);
GLBATCH(glDepthFunc)(GL_LESS);
GLBATCH(glDisable)(GL_CULL_FACE);
if (mode == SHADOW_RENDER_MODE) {
glCullFace(GL_BACK);
GLBATCH(glCullFace)(GL_BACK);
}
// deactivate vertex arrays after drawing
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
GLBATCH(glDisableClientState)(GL_NORMAL_ARRAY);
GLBATCH(glDisableClientState)(GL_VERTEX_ARRAY);
GLBATCH(glDisableClientState)(GL_TEXTURE_COORD_ARRAY);
// bind with 0 to switch back to normal operation
glBindBuffer(GL_ARRAY_BUFFER, 0);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
GLBATCH(glBindBuffer)(GL_ARRAY_BUFFER, 0);
GLBATCH(glBindBuffer)(GL_ELEMENT_ARRAY_BUFFER, 0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
// Render!
{
PROFILE_RANGE("render Batch");
::gpu::backend::renderBatch(batch);
batch.clear();
}
// restore all the default material settings
Application::getInstance()->setupWorldLight();
@ -1506,9 +1552,10 @@ void Model::segregateMeshGroups() {
_meshGroupsKnown = true;
}
int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold,
bool hasTangents, bool hasSpecular, bool isSkinned, RenderArgs* args) {
PROFILE_RANGE(__FUNCTION__);
bool dontCullOutOfViewMeshParts = Menu::getInstance()->isOptionChecked(MenuOption::DontCullOutOfViewMeshParts);
bool cullTooSmallMeshParts = !Menu::getInstance()->isOptionChecked(MenuOption::DontCullTooSmallMeshParts);
bool dontReduceMaterialSwitches = Menu::getInstance()->isOptionChecked(MenuOption::DontReduceMaterialSwitches);
@ -1606,16 +1653,21 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
ProgramObject* activeProgram = program;
Locations* activeLocations = locations;
if (isSkinned) {
skinProgram->bind();
activeProgram = skinProgram;
activeLocations = skinLocations;
} else {
program->bind();
}
activeProgram->setUniformValue(activeLocations->alphaThreshold, alphaThreshold);
// This code replace the "bind()" on the QGLProgram
if (!activeProgram->isLinked()) {
activeProgram->link();
}
GLBATCH(glUseProgram)(activeProgram->programId());
// activeProgram->setUniformValue(activeLocations->alphaThreshold, alphaThreshold);
GLBATCH(glUniform1f)(activeLocations->alphaThreshold, alphaThreshold);
// i is the "index" from the original networkMeshes QVector...
foreach (int i, list) {
@ -1631,7 +1683,8 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
const NetworkMesh& networkMesh = networkMeshes.at(i);
const FBXMesh& mesh = geometry.meshes.at(i);
const_cast<QOpenGLBuffer&>(networkMesh.indexBuffer).bind();
//const_cast<QOpenGLBuffer&>(networkMesh.indexBuffer).bind();
GLBATCH(glBindBuffer)(GL_ELEMENT_ARRAY_BUFFER, const_cast<QOpenGLBuffer&>(networkMesh.indexBuffer).bufferId());
int vertexCount = mesh.vertices.size();
if (vertexCount == 0) {
@ -1666,56 +1719,70 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
}
}
const_cast<QOpenGLBuffer&>(networkMesh.vertexBuffer).bind();
glPushMatrix();
Application::getInstance()->loadTranslatedViewMatrix(_translation);
//const_cast<QOpenGLBuffer&>(networkMesh.vertexBuffer).bind();
GLBATCH(glBindBuffer)(GL_ARRAY_BUFFER, const_cast<QOpenGLBuffer&>(networkMesh.vertexBuffer).bufferId());
GLBATCH(glPushMatrix)();
//Application::getInstance()->loadTranslatedViewMatrix(_translation);
GLBATCH(glLoadMatrixf)((const GLfloat*)&Application::getInstance()->getUntranslatedViewMatrix());
glm::vec3 viewMatTranslation = Application::getInstance()->getViewMatrixTranslation();
GLBATCH(glTranslatef)(_translation.x + viewMatTranslation.x, _translation.y + viewMatTranslation.y,
_translation.z + viewMatTranslation.z);
const MeshState& state = _meshStates.at(i);
if (state.clusterMatrices.size() > 1) {
glUniformMatrix4fvARB(skinLocations->clusterMatrices, state.clusterMatrices.size(), false,
GLBATCH(glUniformMatrix4fv)(skinLocations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
int offset = (mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3) +
mesh.texCoords.size() * sizeof(glm::vec2) +
(mesh.blendshapes.isEmpty() ? vertexCount * 2 * sizeof(glm::vec3) : 0);
skinProgram->setAttributeBuffer(skinLocations->clusterIndices, GL_FLOAT, offset, 4);
skinProgram->setAttributeBuffer(skinLocations->clusterWeights, GL_FLOAT,
offset + vertexCount * sizeof(glm::vec4), 4);
skinProgram->enableAttributeArray(skinLocations->clusterIndices);
skinProgram->enableAttributeArray(skinLocations->clusterWeights);
//skinProgram->setAttributeBuffer(skinLocations->clusterIndices, GL_FLOAT, offset, 4);
GLBATCH(glVertexAttribPointer)(skinLocations->clusterIndices, 4, GL_FLOAT, GL_TRUE, 0, (const void*) offset);
//skinProgram->setAttributeBuffer(skinLocations->clusterWeights, GL_FLOAT,
// offset + vertexCount * sizeof(glm::vec4), 4);
GLBATCH(glVertexAttribPointer)(skinLocations->clusterWeights, 4, GL_FLOAT, GL_TRUE, 0, (const void*) (offset + vertexCount * sizeof(glm::vec4)));
//skinProgram->enableAttributeArray(skinLocations->clusterIndices);
GLBATCH(glEnableVertexAttribArray)(skinLocations->clusterIndices);
//skinProgram->enableAttributeArray(skinLocations->clusterWeights);
GLBATCH(glEnableVertexAttribArray)(skinLocations->clusterWeights);
} else {
glMultMatrixf((const GLfloat*)&state.clusterMatrices[0]);
GLBATCH(glMultMatrixf)((const GLfloat*)&state.clusterMatrices[0]);
}
if (mesh.blendshapes.isEmpty()) {
if (!(mesh.tangents.isEmpty() || mode == SHADOW_RENDER_MODE)) {
activeProgram->setAttributeBuffer(activeLocations->tangent, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
activeProgram->enableAttributeArray(activeLocations->tangent);
//activeProgram->setAttributeBuffer(activeLocations->tangent, GL_FLOAT, vertexCount * 2 * sizeof(glm::vec3), 3);
GLBATCH(glVertexAttribPointer)(activeLocations->tangent, 3, GL_FLOAT, GL_TRUE, 0, (const void*)(vertexCount * 2 * sizeof(glm::vec3)));
//activeProgram->enableAttributeArray(activeLocations->tangent);
GLBATCH(glEnableVertexAttribArray)(activeLocations->tangent);
}
glColorPointer(3, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3) +
GLBATCH(glColorPointer)(3, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3) +
mesh.tangents.size() * sizeof(glm::vec3)));
glTexCoordPointer(2, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3) +
GLBATCH(glTexCoordPointer)(2, GL_FLOAT, 0, (void*)(vertexCount * 2 * sizeof(glm::vec3) +
(mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
} else {
if (!(mesh.tangents.isEmpty() || mode == SHADOW_RENDER_MODE)) {
activeProgram->setAttributeBuffer(activeLocations->tangent, GL_FLOAT, 0, 3);
activeProgram->enableAttributeArray(activeLocations->tangent);
//activeProgram->setAttributeBuffer(activeLocations->tangent, GL_FLOAT, 0, 3);
GLBATCH(glVertexAttribPointer)(activeLocations->tangent, 3, GL_FLOAT, GL_TRUE, 0, 0);
//activeProgram->enableAttributeArray(activeLocations->tangent);
GLBATCH(glEnableVertexAttribArray)(activeLocations->tangent);
}
glColorPointer(3, GL_FLOAT, 0, (void*)(mesh.tangents.size() * sizeof(glm::vec3)));
glTexCoordPointer(2, GL_FLOAT, 0, (void*)((mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
_blendedVertexBuffers[i].bind();
GLBATCH(glColorPointer)(3, GL_FLOAT, 0, (void*)(mesh.tangents.size() * sizeof(glm::vec3)));
GLBATCH(glTexCoordPointer)(2, GL_FLOAT, 0, (void*)((mesh.tangents.size() + mesh.colors.size()) * sizeof(glm::vec3)));
// _blendedVertexBuffers[i].bind();
GLBATCH(glBindBuffer)(GL_ARRAY_BUFFER, _blendedVertexBuffers[i].bufferId());
}
glVertexPointer(3, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, (void*)(vertexCount * sizeof(glm::vec3)));
GLBATCH(glVertexPointer)(3, GL_FLOAT, 0, 0);
GLBATCH(glNormalPointer)(GL_FLOAT, 0, (void*)(vertexCount * sizeof(glm::vec3)));
if (!mesh.colors.isEmpty()) {
glEnableClientState(GL_COLOR_ARRAY);
GLBATCH(glEnableClientState)(GL_COLOR_ARRAY);
} else {
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
GLBATCH(glColor4f)(1.0f, 1.0f, 1.0f, 1.0f);
}
if (!mesh.texCoords.isEmpty()) {
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
GLBATCH(glEnableClientState)(GL_TEXTURE_COORD_ARRAY);
}
qint64 offset = 0;
@ -1726,9 +1793,10 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
offset += (part.quadIndices.size() + part.triangleIndices.size()) * sizeof(int);
continue;
}
// apply material properties
if (mode == SHADOW_RENDER_MODE) {
glBindTexture(GL_TEXTURE_2D, 0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
} else {
if (dontReduceMaterialSwitches || lastMaterialID != part.materialID) {
@ -1741,36 +1809,36 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
glm::vec4 diffuse = glm::vec4(part.diffuseColor, part.opacity);
if (!(translucent && alphaThreshold == 0.0f)) {
glAlphaFunc(GL_EQUAL, diffuse.a = Application::getInstance()->getGlowEffect()->getIntensity());
GLBATCH(glAlphaFunc)(GL_EQUAL, diffuse.a = Application::getInstance()->getGlowEffect()->getIntensity());
}
glm::vec4 specular = glm::vec4(part.specularColor, 1.0f);
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);
glMaterialf(GL_FRONT, GL_SHININESS, part.shininess);
GLBATCH(glMaterialfv)(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
GLBATCH(glMaterialfv)(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
GLBATCH(glMaterialfv)(GL_FRONT, GL_SPECULAR, (const float*)&specular);
GLBATCH(glMaterialf)(GL_FRONT, GL_SHININESS, (part.shininess > 128.f ? 128.f: part.shininess));
Texture* diffuseMap = networkPart.diffuseTexture.data();
if (mesh.isEye && diffuseMap) {
diffuseMap = (_dilatedTextures[i][j] =
static_cast<DilatableNetworkTexture*>(diffuseMap)->getDilatedTexture(_pupilDilation)).data();
}
glBindTexture(GL_TEXTURE_2D, !diffuseMap ?
GLBATCH(glBindTexture)(GL_TEXTURE_2D, !diffuseMap ?
Application::getInstance()->getTextureCache()->getWhiteTextureID() : diffuseMap->getID());
if (!mesh.tangents.isEmpty()) {
glActiveTexture(GL_TEXTURE1);
GLBATCH(glActiveTexture)(GL_TEXTURE1);
Texture* normalMap = networkPart.normalTexture.data();
glBindTexture(GL_TEXTURE_2D, !normalMap ?
GLBATCH(glBindTexture)(GL_TEXTURE_2D, !normalMap ?
Application::getInstance()->getTextureCache()->getBlueTextureID() : normalMap->getID());
glActiveTexture(GL_TEXTURE0);
GLBATCH(glActiveTexture)(GL_TEXTURE0);
}
if (specularTextureUnit) {
glActiveTexture(specularTextureUnit);
GLBATCH(glActiveTexture)(specularTextureUnit);
Texture* specularMap = networkPart.specularTexture.data();
glBindTexture(GL_TEXTURE_2D, !specularMap ?
GLBATCH(glBindTexture)(GL_TEXTURE_2D, !specularMap ?
Application::getInstance()->getTextureCache()->getWhiteTextureID() : specularMap->getID());
glActiveTexture(GL_TEXTURE0);
GLBATCH(glActiveTexture)(GL_TEXTURE0);
}
if (args) {
args->_materialSwitches++;
@ -1783,12 +1851,12 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
meshPartsRendered++;
if (part.quadIndices.size() > 0) {
glDrawRangeElementsEXT(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
GLBATCH(glDrawRangeElements)(GL_QUADS, 0, vertexCount - 1, part.quadIndices.size(), GL_UNSIGNED_INT, (void*)offset);
offset += part.quadIndices.size() * sizeof(int);
}
if (part.triangleIndices.size() > 0) {
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
GLBATCH(glDrawRangeElements)(GL_TRIANGLES, 0, vertexCount - 1, part.triangleIndices.size(),
GL_UNSIGNED_INT, (void*)offset);
offset += part.triangleIndices.size() * sizeof(int);
}
@ -1802,35 +1870,39 @@ int Model::renderMeshes(RenderMode mode, bool translucent, float alphaThreshold,
}
if (!mesh.colors.isEmpty()) {
glDisableClientState(GL_COLOR_ARRAY);
GLBATCH(glDisableClientState)(GL_COLOR_ARRAY);
}
if (!mesh.texCoords.isEmpty()) {
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
GLBATCH(glDisableClientState)(GL_TEXTURE_COORD_ARRAY);
}
if (!(mesh.tangents.isEmpty() || mode == SHADOW_RENDER_MODE)) {
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
GLBATCH(glActiveTexture)(GL_TEXTURE1);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glActiveTexture)(GL_TEXTURE0);
activeProgram->disableAttributeArray(activeLocations->tangent);
// activeProgram->disableAttributeArray(activeLocations->tangent);
GLBATCH(glDisableVertexAttribArray)(activeLocations->tangent);
}
if (specularTextureUnit) {
glActiveTexture(specularTextureUnit);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
GLBATCH(glActiveTexture)(specularTextureUnit);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glActiveTexture)(GL_TEXTURE0);
}
if (state.clusterMatrices.size() > 1) {
skinProgram->disableAttributeArray(skinLocations->clusterIndices);
skinProgram->disableAttributeArray(skinLocations->clusterWeights);
// skinProgram->disableAttributeArray(skinLocations->clusterIndices);
GLBATCH(glDisableVertexAttribArray)(skinLocations->clusterIndices);
// skinProgram->disableAttributeArray(skinLocations->clusterWeights);
GLBATCH(glDisableVertexAttribArray)(skinLocations->clusterWeights);
}
glPopMatrix();
GLBATCH(glPopMatrix)();
}
activeProgram->release();
//activeProgram->release();
GLBATCH(glUseProgram)(0);
return meshPartsRendered;
}

View file

@ -36,6 +36,10 @@ class ViewFrustum;
typedef QSharedPointer<AnimationHandle> AnimationHandlePointer;
typedef QWeakPointer<AnimationHandle> WeakAnimationHandlePointer;
namespace gpu {
class Batch;
}
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject, public PhysicsEntity {
Q_OBJECT
@ -183,6 +187,9 @@ public:
void inverseKinematics(int jointIndex, glm::vec3 position, const glm::quat& rotation, float priority);
Q_INVOKABLE void setTextureWithNameToURL(const QString& name, const QUrl& url)
{ _geometry->setTextureWithNameToURL(name, url); }
protected:
QSharedPointer<NetworkGeometry> _geometry;
@ -252,7 +259,7 @@ private:
void applyNextGeometry();
void deleteGeometry();
int renderMeshes(RenderMode mode, bool translucent, float alphaThreshold, bool hasTangents, bool hasSpecular, bool isSkinned, RenderArgs* args = NULL);
int renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold, bool hasTangents, bool hasSpecular, bool isSkinned, RenderArgs* args = NULL);
QVector<JointState> createJointStates(const FBXGeometry& geometry);
void initJointTransforms();

View file

@ -156,7 +156,6 @@ protected:
virtual void imageLoaded(const QImage& image);
private:
TextureType _type;
bool _translucent;
QColor _averageColor;

View file

@ -70,3 +70,11 @@ float AudioDeviceScriptingInterface::getInputVolume() {
void AudioDeviceScriptingInterface::setInputVolume(float volume) {
Application::getInstance()->getAudio()->setInputVolume(volume);
}
void AudioDeviceScriptingInterface::setReverb(bool reverb) {
Application::getInstance()->getAudio()->setReverb(reverb);
}
void AudioDeviceScriptingInterface::setReverbOptions(const AudioEffectOptions* options) {
Application::getInstance()->getAudio()->setReverbOptions(options);
}

View file

@ -39,6 +39,8 @@ public slots:
float getInputVolume();
void setInputVolume(float volume);
void setReverb(bool reverb);
void setReverbOptions(const AudioEffectOptions* options);
};
#endif // hifi_AudioDeviceScriptingInterface_h

View file

@ -10,18 +10,33 @@
//
#include <HandData.h>
#include <HFBackEvent.h>
#include "Application.h"
#include "devices/MotionTracker.h"
#include "devices/SixenseManager.h"
#include "ControllerScriptingInterface.h"
#include "devices/MotionTracker.h"
ControllerScriptingInterface::ControllerScriptingInterface() :
_mouseCaptured(false),
_touchCaptured(false),
_wheelCaptured(false)
{
}
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
if (event->type() == HFActionEvent::startType()) {
emit actionStartEvent(static_cast<HFActionEvent&>(*event));
} else if (event->type() == HFActionEvent::endType()) {
emit actionEndEvent(static_cast<HFActionEvent&>(*event));
} else if (event->type() == HFBackEvent::startType()) {
emit backStartEvent();
} else if (event->type() == HFBackEvent::endType()) {
emit backEndEvent();
}
}
const PalmData* ControllerScriptingInterface::getPrimaryPalm() const {
int leftPalmIndex, rightPalmIndex;
@ -213,10 +228,7 @@ bool ControllerScriptingInterface::isKeyCaptured(QKeyEvent* event) const {
bool ControllerScriptingInterface::isKeyCaptured(const KeyEvent& event) const {
// if we've captured some combination of this key it will be in the map
if (_capturedKeys.contains(event.key, event)) {
return true;
}
return false;
return _capturedKeys.contains(event.key, event);
}
void ControllerScriptingInterface::captureKeyEvents(const KeyEvent& event) {

View file

@ -46,7 +46,7 @@ private:
signals:
};
/// handles scripting of input controller commands from JS
class ControllerScriptingInterface : public AbstractControllerScriptingInterface {
@ -56,6 +56,8 @@ public:
ControllerScriptingInterface();
void emitKeyPressEvent(QKeyEvent* event) { emit keyPressEvent(KeyEvent(*event)); }
void emitKeyReleaseEvent(QKeyEvent* event) { emit keyReleaseEvent(KeyEvent(*event)); }
void handleMetaEvent(HFMetaEvent* event);
void emitMouseMoveEvent(QMouseEvent* event, unsigned int deviceID = 0) { emit mouseMoveEvent(MouseEvent(*event, deviceID)); }
void emitMousePressEvent(QMouseEvent* event, unsigned int deviceID = 0) { emit mousePressEvent(MouseEvent(*event, deviceID)); }

View file

@ -9,6 +9,7 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qapplication.h>
#include <QtDebug>
#include <QScriptValue>
@ -17,8 +18,12 @@
#undef main
#endif
#include <HFActionEvent.h>
#include <HFBackEvent.h>
#include <PerfStat.h>
#include "Application.h"
#include "JoystickScriptingInterface.h"
#ifdef HAVE_SDL2
@ -108,6 +113,28 @@ void JoystickScriptingInterface::update() {
if (joystick) {
joystick->handleButtonEvent(event.cbutton);
}
if (event.cbutton.button == SDL_CONTROLLER_BUTTON_BACK) {
// this will either start or stop a global back event
QEvent::Type backType = (event.type == SDL_CONTROLLERBUTTONDOWN)
? HFBackEvent::startType()
: HFBackEvent::endType();
HFBackEvent backEvent(backType);
qApp->sendEvent(qApp, &backEvent);
} else if (event.cbutton.button == SDL_CONTROLLER_BUTTON_A) {
// this will either start or stop a global action event
QEvent::Type actionType = (event.type == SDL_CONTROLLERBUTTONDOWN)
? HFActionEvent::startType()
: HFActionEvent::endType();
// global action events fire in the center of the screen
QPointF centerPoint = Application::getInstance()->getViewportCenter();
HFActionEvent actionEvent(actionType, centerPoint);
qApp->sendEvent(qApp, &actionEvent);
}
} else if (event.type == SDL_CONTROLLERDEVICEADDED) {
SDL_GameController* controller = SDL_GameControllerOpen(event.cdevice.which);

View file

@ -9,6 +9,9 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qnetworkrequest.h>
#include <AddressManager.h>
#include <OAuthNetworkAccessManager.h>
#include "DataWebPage.h"
@ -19,13 +22,22 @@ DataWebPage::DataWebPage(QObject* parent) :
// use an OAuthNetworkAccessManager instead of regular QNetworkAccessManager so our requests are authed
setNetworkAccessManager(OAuthNetworkAccessManager::getInstance());
// have the page delegate external links so they can be captured by the Application in case they are a hifi link
setLinkDelegationPolicy(QWebPage::DelegateExternalLinks);
// give the page an empty stylesheet
settings()->setUserStyleSheetUrl(QUrl());
}
void DataWebPage::javaScriptConsoleMessage(const QString& message, int lineNumber, const QString& sourceID) {
qDebug() << "JS console message at line" << lineNumber << "from" << sourceID << "-" << message;
}
bool DataWebPage::acceptNavigationRequest(QWebFrame* frame, const QNetworkRequest& request, QWebPage::NavigationType type) {
if (!request.url().toString().startsWith(HIFI_URL_SCHEME)) {
return true;
} else {
// this is a hifi URL - have the AddressManager handle it
QMetaObject::invokeMethod(&AddressManager::getInstance(), "handleLookupString",
Qt::AutoConnection, Q_ARG(const QString&, request.url().toString()));
return false;
}
}

View file

@ -19,6 +19,7 @@ public:
DataWebPage(QObject* parent = 0);
protected:
void javaScriptConsoleMessage(const QString & message, int lineNumber, const QString & sourceID);
bool acceptNavigationRequest(QWebFrame* frame, const QNetworkRequest& request, QWebPage::NavigationType type);
};
#endif // hifi_DataWebPage_h

View file

@ -26,7 +26,7 @@
const char* MODEL_TYPE_NAMES[] = { "entities", "heads", "skeletons", "attachments" };
static const QString S3_URL = "https://s3.amazonaws.com/hifi-public";
static const QString S3_URL = "http://s3.amazonaws.com/hifi-public";
static const QString PUBLIC_URL = "http://public.highfidelity.io";
static const QString MODELS_LOCATION = "models/";

View file

@ -147,6 +147,8 @@ void PreferencesDialog::loadPreferences() {
ui.maxVoxelsPPSSpin->setValue(menuInstance->getMaxVoxelPacketsPerSecond());
ui.oculusUIAngularSizeSpin->setValue(menuInstance->getOculusUIAngularSize());
ui.oculusUIMaxFPSSpin->setValue(menuInstance->getOculusUIMaxFPS());
ui.sixenseReticleMoveSpeedSpin->setValue(menuInstance->getSixenseReticleMoveSpeed());
@ -229,6 +231,8 @@ void PreferencesDialog::savePreferences() {
Menu::getInstance()->setMaxVoxelPacketsPerSecond(ui.maxVoxelsPPSSpin->value());
Menu::getInstance()->setOculusUIAngularSize(ui.oculusUIAngularSizeSpin->value());
Menu::getInstance()->setOculusUIMaxFPS(ui.oculusUIMaxFPSSpin->value());
Menu::getInstance()->setSixenseReticleMoveSpeed(ui.sixenseReticleMoveSpeedSpin->value());

View file

@ -102,6 +102,20 @@ void ModelOverlay::setProperties(const QScriptValue &properties) {
}
_updateModel = true;
}
QScriptValue texturesValue = properties.property("textures");
if (texturesValue.isValid()) {
QVariantMap textureMap = texturesValue.toVariant().toMap();
foreach(const QString& key, textureMap.keys()) {
QUrl newTextureURL = textureMap[key].toUrl();
qDebug() << "Updating texture named" << key << "to texture at URL" << newTextureURL;
QMetaObject::invokeMethod(&_model, "setTextureWithNameToURL", Qt::AutoConnection,
Q_ARG(const QString&, key),
Q_ARG(const QUrl&, newTextureURL));
}
}
if (properties.property("position").isValid()) {
_updateModel = true;

View file

@ -32,6 +32,7 @@ public:
float getTopMargin() const { return _topMargin; }
float getRightMargin() const { return _rightMargin; }
float getBottomMargin() const { return _bottomMargin; }
bool getIsFacingAvatar() const { return _isFacingAvatar; }
xColor getBackgroundColor();
// setters
@ -41,6 +42,7 @@ public:
void setTopMargin(float margin) { _topMargin = margin; }
void setRightMargin(float margin) { _rightMargin = margin; }
void setBottomMargin(float margin) { _bottomMargin = margin; }
void setIsFacingAvatar(bool isFacingAvatar) { _isFacingAvatar = isFacingAvatar; }
virtual void setProperties(const QScriptValue& properties);

View file

@ -61,7 +61,7 @@
<x>0</x>
<y>0</y>
<width>500</width>
<height>1386</height>
<height>1459</height>
</rect>
</property>
<layout class="QVBoxLayout" name="verticalLayout_2">
@ -1772,24 +1772,24 @@
</item>
<item>
<widget class="QLineEdit" name="faceshiftHostnameEdit">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="layoutDirection">
<enum>Qt::LeftToRight</enum>
</property>
<property name="styleSheet">
<string notr="true"/>
</property>
<property name="placeholderText">
<string>localhost</string>
</property>
</widget>
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="layoutDirection">
<enum>Qt::LeftToRight</enum>
</property>
<property name="styleSheet">
<string notr="true"/>
</property>
<property name="placeholderText">
<string>localhost</string>
</property>
</widget>
</item>
</layout>
</item>
</item>
<item>
<widget class="QLabel" name="voxelsTitleLabel">
<property name="sizePolicy">
@ -2084,6 +2084,85 @@
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_17">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>7</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>7</number>
</property>
<item>
<widget class="QLabel" name="label_15">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="text">
<string>Oculus Rift FPS</string>
</property>
<property name="indent">
<number>0</number>
</property>
<property name="buddy">
<cstring>maxVoxelsSpin</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_18">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QSpinBox" name="oculusUIMaxFPSSpin">
<property name="minimumSize">
<size>
<width>100</width>
<height>0</height>
</size>
</property>
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="minimum">
<number>30</number>
</property>
<property name="maximum">
<number>95</number>
</property>
<property name="singleStep">
<number>1</number>
</property>
<property name="value">
<number>75</number>
</property>
</widget>
</item>
</layout>
</item>
<item>
<widget class="QLabel" name="sixenseControllersTitleLabel">
<property name="sizePolicy">

View file

@ -0,0 +1,88 @@
//
// AudioEffectOptions.cpp
// libraries/audio/src
//
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "AudioEffectOptions.h"
static const QString MAX_ROOM_SIZE_HANDLE = "maxRoomSize";
static const QString ROOM_SIZE_HANDLE = "roomSize";
static const QString REVERB_TIME_HANDLE = "reverbTime";
static const QString DAMPIMG_HANDLE = "damping";
static const QString SPREAD_HANDLE = "spread";
static const QString INPUT_BANDWIDTH_HANDLE = "inputBandwidth";
static const QString EARLY_LEVEL_HANDLE = "earlyLevel";
static const QString TAIL_LEVEL_HANDLE = "tailLevel";
static const QString DRY_LEVEL_HANDLE = "dryLevel";
static const QString WET_LEVEL_HANDLE = "wetLevel";
AudioEffectOptions::AudioEffectOptions(QScriptValue arguments) :
_maxRoomSize(50.0f),
_roomSize(50.0f),
_reverbTime(4.0f),
_damping(0.5f),
_spread(15.0f),
_inputBandwidth(0.75f),
_earlyLevel(-22.0f),
_tailLevel(-28.0f),
_dryLevel(0.0f),
_wetLevel(6.0f) {
if (arguments.property(MAX_ROOM_SIZE_HANDLE).isNumber()) {
_maxRoomSize = arguments.property(MAX_ROOM_SIZE_HANDLE).toNumber();
}
if (arguments.property(ROOM_SIZE_HANDLE).isNumber()) {
_roomSize = arguments.property(ROOM_SIZE_HANDLE).toNumber();
}
if (arguments.property(REVERB_TIME_HANDLE).isNumber()) {
_reverbTime = arguments.property(REVERB_TIME_HANDLE).toNumber();
}
if (arguments.property(DAMPIMG_HANDLE).isNumber()) {
_damping = arguments.property(DAMPIMG_HANDLE).toNumber();
}
if (arguments.property(SPREAD_HANDLE).isNumber()) {
_spread = arguments.property(SPREAD_HANDLE).toNumber();
}
if (arguments.property(INPUT_BANDWIDTH_HANDLE).isNumber()) {
_inputBandwidth = arguments.property(INPUT_BANDWIDTH_HANDLE).toNumber();
}
if (arguments.property(EARLY_LEVEL_HANDLE).isNumber()) {
_earlyLevel = arguments.property(EARLY_LEVEL_HANDLE).toNumber();
}
if (arguments.property(TAIL_LEVEL_HANDLE).isNumber()) {
_tailLevel = arguments.property(TAIL_LEVEL_HANDLE).toNumber();
}
if (arguments.property(DRY_LEVEL_HANDLE).isNumber()) {
_dryLevel = arguments.property(DRY_LEVEL_HANDLE).toNumber();
}
if (arguments.property(WET_LEVEL_HANDLE).isNumber()) {
_wetLevel = arguments.property(WET_LEVEL_HANDLE).toNumber();
}
}
AudioEffectOptions::AudioEffectOptions(const AudioEffectOptions &other) {
*this = other;
}
AudioEffectOptions& AudioEffectOptions::operator=(const AudioEffectOptions &other) {
_maxRoomSize = other._maxRoomSize;
_roomSize = other._roomSize;
_reverbTime = other._reverbTime;
_damping = other._damping;
_spread = other._spread;
_inputBandwidth = other._inputBandwidth;
_earlyLevel = other._earlyLevel;
_tailLevel = other._tailLevel;
_dryLevel = other._dryLevel;
_wetLevel = other._wetLevel;
return *this;
}
QScriptValue AudioEffectOptions::constructor(QScriptContext* context, QScriptEngine* engine) {
return engine->newQObject(new AudioEffectOptions(context->argument(0)));
}

View file

@ -0,0 +1,106 @@
//
// AudioEffectOptions.h
// libraries/audio/src
//
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioEffectOptions_h
#define hifi_AudioEffectOptions_h
#include <QObject>
#include <QtScript/QScriptContext>
#include <QtScript/QScriptEngine>
class AudioEffectOptions : public QObject {
Q_OBJECT
// Meters Square
Q_PROPERTY(float maxRoomSize READ getMaxRoomSize WRITE setMaxRoomSize)
Q_PROPERTY(float roomSize READ getRoomSize WRITE setRoomSize)
// Seconds
Q_PROPERTY(float reverbTime READ getReverbTime WRITE setReverbTime)
// Ratio between 0 and 1
Q_PROPERTY(float damping READ getDamping WRITE setDamping)
// (?) Does not appear to be set externally very often
Q_PROPERTY(float spread READ getSpread WRITE setSpread)
// Ratio between 0 and 1
Q_PROPERTY(float inputBandwidth READ getInputBandwidth WRITE setInputBandwidth)
// in dB
Q_PROPERTY(float earlyLevel READ getEarlyLevel WRITE setEarlyLevel)
Q_PROPERTY(float tailLevel READ getTailLevel WRITE setTailLevel)
Q_PROPERTY(float dryLevel READ getDryLevel WRITE setDryLevel)
Q_PROPERTY(float wetLevel READ getWetLevel WRITE setWetLevel)
public:
AudioEffectOptions(QScriptValue arguments = QScriptValue());
AudioEffectOptions(const AudioEffectOptions &other);
AudioEffectOptions& operator=(const AudioEffectOptions &other);
static QScriptValue constructor(QScriptContext* context, QScriptEngine* engine);
float getRoomSize() const { return _roomSize; }
void setRoomSize(float roomSize ) { _roomSize = roomSize; }
float getMaxRoomSize() const { return _maxRoomSize; }
void setMaxRoomSize(float maxRoomSize ) { _maxRoomSize = maxRoomSize; }
float getReverbTime() const { return _reverbTime; }
void setReverbTime(float reverbTime ) { _reverbTime = reverbTime; }
float getDamping() const { return _damping; }
void setDamping(float damping ) { _damping = damping; }
float getSpread() const { return _spread; }
void setSpread(float spread ) { _spread = spread; }
float getInputBandwidth() const { return _inputBandwidth; }
void setInputBandwidth(float inputBandwidth ) { _inputBandwidth = inputBandwidth; }
float getEarlyLevel() const { return _earlyLevel; }
void setEarlyLevel(float earlyLevel ) { _earlyLevel = earlyLevel; }
float getTailLevel() const { return _tailLevel; }
void setTailLevel(float tailLevel ) { _tailLevel = tailLevel; }
float getDryLevel() const { return _dryLevel; }
void setDryLevel(float dryLevel) { _dryLevel = dryLevel; }
float getWetLevel() const { return _wetLevel; }
void setWetLevel(float wetLevel) { _wetLevel = wetLevel; }
private:
// http://wiki.audacityteam.org/wiki/GVerb#Instant_Reverberb_settings
// Meters Square
float _maxRoomSize;
float _roomSize;
// Seconds
float _reverbTime;
// Ratio between 0 and 1
float _damping;
// ? (Does not appear to be set externally very often)
float _spread;
// Ratio between 0 and 1
float _inputBandwidth;
// dB
float _earlyLevel;
float _tailLevel;
float _dryLevel;
float _wetLevel;
};
#endif // hifi_AudioEffectOptions_h

View file

@ -44,7 +44,8 @@ InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacit
_framesAvailableStat(),
_currentJitterBufferFrames(0),
_timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_repetitionWithFade(settings._repetitionWithFade)
_repetitionWithFade(settings._repetitionWithFade),
_hasReverb(false)
{
}
@ -162,9 +163,22 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
}
int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
int read = 0;
if (type == PacketTypeMixedAudio) {
memcpy(&_hasReverb, packetAfterSeqNum.data() + read, sizeof(bool));
read += sizeof(bool);
if (_hasReverb) {
memcpy(&_reverbTime, packetAfterSeqNum.data() + read, sizeof(float));
read += sizeof(float);
memcpy(&_wetLevel, packetAfterSeqNum.data() + read, sizeof(float));
read += sizeof(float);
}
}
// mixed audio packets do not have any info between the seq num and the audio data.
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
return 0;
numAudioSamples = (packetAfterSeqNum.size() - read) / sizeof(int16_t);
return read;
}
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {

View file

@ -154,6 +154,10 @@ public:
int getOverflowCount() const { return _ringBuffer.getOverflowCount(); }
int getPacketsReceived() const { return _incomingSequenceNumberStats.getReceived(); }
bool hasReverb() const { return _hasReverb; }
float getRevebTime() const { return _reverbTime; }
float getWetLevel() const { return _wetLevel; }
public slots:
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
@ -243,6 +247,11 @@ protected:
MovingMinMaxAvg<quint64> _timeGapStatsForStatsPacket;
bool _repetitionWithFade;
// Reverb properties
bool _hasReverb;
float _reverbTime;
float _wetLevel;
};
float calculateRepeatedFrameFadeFactor(int indexOfRepeat);

View file

@ -67,7 +67,7 @@ AvatarData::~AvatarData() {
delete _referential;
}
const glm::vec3& AvatarData::getPosition() {
const glm::vec3& AvatarData::getPosition() const {
if (_referential) {
_referential->update();
}

View file

@ -138,15 +138,15 @@ public:
AvatarData();
virtual ~AvatarData();
const QUuid& getSessionUUID() { return _sessionUUID; }
const QUuid& getSessionUUID() const { return _sessionUUID; }
const glm::vec3& getPosition();
const glm::vec3& getPosition() const;
virtual void setPosition(const glm::vec3 position, bool overideReferential = false);
glm::vec3 getHandPosition() const;
void setHandPosition(const glm::vec3& handPosition);
QByteArray toByteArray();
virtual QByteArray toByteArray();
/// \return true if an error should be logged
bool shouldLogError(const quint64& now);
@ -381,6 +381,8 @@ private:
AvatarData& operator= (const AvatarData&);
};
Q_DECLARE_METATYPE(AvatarData*)
class JointData {
public:
bool valid;

View file

@ -21,6 +21,7 @@ AvatarHashMap::AvatarHashMap() :
connect(NodeList::getInstance(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
}
AvatarHash::iterator AvatarHashMap::erase(const AvatarHash::iterator& iterator) {
qDebug() << "Removing Avatar with UUID" << iterator.key() << "from AvatarHashMap.";
return _avatarHash.erase(iterator);
@ -53,29 +54,26 @@ void AvatarHashMap::processAvatarMixerDatagram(const QByteArray& datagram, const
}
bool AvatarHashMap::containsAvatarWithDisplayName(const QString& displayName) {
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
AvatarSharedPointer sharedAvatar = avatarIterator.value();
if (avatarIterator.value()->getDisplayName() == displayName) {
return avatarWithDisplayName(displayName) == NULL ? false : true;
}
AvatarData* AvatarHashMap::avatarWithDisplayName(const QString& displayName) {
foreach(const AvatarSharedPointer& sharedAvatar, _avatarHash) {
if (sharedAvatar->getDisplayName() == displayName) {
// this is a match
// check if this avatar should still be around
if (!shouldKillAvatar(sharedAvatar)) {
// we have a match, return true
return true;
// we have a match, return the AvatarData
return sharedAvatar.data();
} else {
// we should remove this avatar, do that now
erase(avatarIterator);
// we should remove this avatar, but we might not be on a thread that is allowed
// so we just return NULL to the caller
return NULL;
}
break;
} else {
++avatarIterator;
}
}
// return false, no match
return false;
return NULL;
}
AvatarSharedPointer AvatarHashMap::newSharedAvatar() {

View file

@ -21,6 +21,7 @@
#include "AvatarData.h"
typedef QSharedPointer<AvatarData> AvatarSharedPointer;
typedef QWeakPointer<AvatarData> AvatarWeakPointer;
typedef QHash<QUuid, AvatarSharedPointer> AvatarHash;
class AvatarHashMap : public QObject {
@ -34,6 +35,7 @@ public:
public slots:
void processAvatarMixerDatagram(const QByteArray& datagram, const QWeakPointer<Node>& mixerWeakPointer);
bool containsAvatarWithDisplayName(const QString& displayName);
AvatarData* avatarWithDisplayName(const QString& displayname);
private slots:
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);

View file

@ -212,7 +212,7 @@ void Player::loadRecording(RecordingPointer recording) {
void Player::play() {
computeCurrentFrame();
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) {
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 2)) { // -2 because of interpolation
if (_loop) {
loopRecording();
} else {

View file

@ -984,10 +984,13 @@ public:
QVector<float> values;
};
FBXTexture getTexture(const QString& textureID, const QHash<QString, QByteArray>& textureFilenames,
const QHash<QByteArray, QByteArray>& textureContent) {
FBXTexture getTexture(const QString& textureID,
const QHash<QString, QString>& textureNames,
const QHash<QString, QByteArray>& textureFilenames,
const QHash<QByteArray, QByteArray>& textureContent) {
FBXTexture texture;
texture.filename = textureFilenames.value(textureID);
texture.name = textureNames.value(textureID);
texture.content = textureContent.value(texture.filename);
return texture;
}
@ -1012,6 +1015,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
QHash<QString, FBXModel> models;
QHash<QString, Cluster> clusters;
QHash<QString, AnimationCurve> animationCurves;
QHash<QString, QString> textureNames;
QHash<QString, QByteArray> textureFilenames;
QHash<QByteArray, QByteArray> textureContent;
QHash<QString, Material> materials;
@ -1278,6 +1282,11 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
QByteArray filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
textureFilenames.insert(getID(object.properties), filename);
} else if (subobject.name == "TextureName") {
// trim the name from the timestamp
QString name = QString(subobject.properties.at(0).toByteArray());
name = name.left(name.indexOf('['));
textureNames.insert(getID(object.properties), name);
}
}
} else if (object.name == "Video") {
@ -1612,12 +1621,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
FBXTexture diffuseTexture;
QString diffuseTextureID = diffuseTextures.value(childID);
if (!diffuseTextureID.isNull()) {
diffuseTexture = getTexture(diffuseTextureID, textureFilenames, textureContent);
diffuseTexture = getTexture(diffuseTextureID, textureNames, textureFilenames, textureContent);
// FBX files generated by 3DSMax have an intermediate texture parent, apparently
foreach (const QString& childTextureID, childMap.values(diffuseTextureID)) {
if (textureFilenames.contains(childTextureID)) {
diffuseTexture = getTexture(diffuseTextureID, textureFilenames, textureContent);
diffuseTexture = getTexture(diffuseTextureID, textureNames, textureFilenames, textureContent);
}
}
}
@ -1625,14 +1634,14 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
FBXTexture normalTexture;
QString bumpTextureID = bumpTextures.value(childID);
if (!bumpTextureID.isNull()) {
normalTexture = getTexture(bumpTextureID, textureFilenames, textureContent);
normalTexture = getTexture(bumpTextureID, textureNames, textureFilenames, textureContent);
generateTangents = true;
}
FBXTexture specularTexture;
QString specularTextureID = specularTextures.value(childID);
if (!specularTextureID.isNull()) {
specularTexture = getTexture(specularTextureID, textureFilenames, textureContent);
specularTexture = getTexture(specularTextureID, textureNames, textureFilenames, textureContent);
}
for (int j = 0; j < extracted.partMaterialTextures.size(); j++) {
@ -1658,7 +1667,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
materialIndex++;
} else if (textureFilenames.contains(childID)) {
FBXTexture texture = getTexture(childID, textureFilenames, textureContent);
FBXTexture texture = getTexture(childID, textureNames, textureFilenames, textureContent);
for (int j = 0; j < extracted.partMaterialTextures.size(); j++) {
int partTexture = extracted.partMaterialTextures.at(j).second;
if (partTexture == textureIndex && !(partTexture == 0 && materialsHaveTextures)) {

View file

@ -95,7 +95,7 @@ public:
/// A texture map in an FBX document.
class FBXTexture {
public:
QString name;
QByteArray filename;
QByteArray content;
};

View file

@ -301,10 +301,10 @@ void DatagramSequencer::clearReliableChannel(QObject* object) {
void DatagramSequencer::sendRecordAcknowledged(const SendRecord& record) {
// stop acknowledging the recorded packets
while (!_receiveRecords.isEmpty() && _receiveRecords.first().packetNumber <= record.lastReceivedPacketNumber) {
emit receiveAcknowledged(0);
const ReceiveRecord& received = _receiveRecords.first();
_inputStream.persistReadMappings(received.mappings);
_receivedHighPriorityMessages -= received.newHighPriorityMessages;
emit receiveAcknowledged(0);
_receiveRecords.removeFirst();
}
_outputStream.persistWriteMappings(record.mappings);

View file

@ -108,6 +108,9 @@ public:
/// Returns the intput channel at the specified index, creating it if necessary.
ReliableChannel* getReliableInputChannel(int index = 0);
/// Returns a reference to the stored receive mappings at the specified index.
const Bitstream::ReadMappings& getReadMappings(int index) const { return _receiveRecords.at(index).mappings; }
/// Adds stats for all reliable channels to the referenced variables.
void addReliableChannelStats(int& sendProgress, int& sendTotal, int& receiveProgress, int& receiveTotal) const;

View file

@ -27,8 +27,10 @@ MetavoxelClientManager::MetavoxelClientManager() :
}
MetavoxelClientManager::~MetavoxelClientManager() {
_updater->thread()->quit();
_updater->thread()->wait();
if (_updater) {
_updater->thread()->quit();
_updater->thread()->wait();
}
}
void MetavoxelClientManager::init() {
@ -188,7 +190,9 @@ MetavoxelClient::MetavoxelClient(const SharedNodePointer& node, MetavoxelUpdater
Endpoint(node, new PacketRecord(), new PacketRecord()),
_updater(updater),
_reliableDeltaChannel(NULL),
_reliableDeltaID(0) {
_reliableDeltaID(0),
_dummyInputStream(_dummyDataStream),
_dummyPacketNumber(0) {
connect(_sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX),
SIGNAL(receivedMessage(const QVariant&, Bitstream&)), SLOT(handleMessage(const QVariant&, Bitstream&)));
@ -234,9 +238,9 @@ PacketRecord* MetavoxelClient::getAcknowledgedReceiveRecord(int packetNumber) co
if (lastAcknowledged->getPacketNumber() == packetNumber) {
return lastAcknowledged;
}
foreach (PacketRecord* record, _clearedReceiveRecords) {
if (record->getPacketNumber() == packetNumber) {
return record;
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
if (record.first->getPacketNumber() == packetNumber) {
return record.first;
}
}
return NULL;
@ -257,8 +261,8 @@ void MetavoxelClient::recordReceive() {
}
_clearedSendRecords.clear();
foreach (PacketRecord* record, _clearedReceiveRecords) {
delete record;
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
delete record.first;
}
_clearedReceiveRecords.clear();
}
@ -273,10 +277,16 @@ void MetavoxelClient::clearSendRecordsBefore(int index) {
}
void MetavoxelClient::clearReceiveRecordsBefore(int index) {
// copy the mappings on first call per packet
if (_sequencer.getIncomingPacketNumber() > _dummyPacketNumber) {
_dummyPacketNumber = _sequencer.getIncomingPacketNumber();
_dummyInputStream.copyPersistentMappings(_sequencer.getInputStream());
}
// move to cleared list
QList<PacketRecord*>::iterator end = _receiveRecords.begin() + index + 1;
for (QList<PacketRecord*>::const_iterator it = _receiveRecords.begin(); it != end; it++) {
_clearedReceiveRecords.append(*it);
_clearedReceiveRecords.append(ClearedReceiveRecord(*it, _sequencer.getReadMappings(index)));
}
_receiveRecords.erase(_receiveRecords.begin(), end);
}
@ -289,7 +299,6 @@ void MetavoxelClient::writeUpdateMessage(Bitstream& out) {
void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
int userType = message.userType();
if (userType == MetavoxelDeltaMessage::Type) {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
if (_reliableDeltaChannel) {
MetavoxelData reference = _remoteData;
MetavoxelLOD referenceLOD = _remoteDataLOD;
@ -299,6 +308,7 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
_reliableDeltaChannel = NULL;
} else {
PacketRecord* receiveRecord = getLastAcknowledgedReceiveRecord();
_remoteData.readDelta(receiveRecord->getData(), receiveRecord->getLOD(), in,
_remoteDataLOD = getLastAcknowledgedSendRecord()->getLOD());
in.reset();
@ -319,8 +329,6 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
MetavoxelDeltaPendingMessage pending = message.value<MetavoxelDeltaPendingMessage>();
if (pending.id > _reliableDeltaID) {
_reliableDeltaID = pending.id;
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
PacketRecord* sendRecord = getAcknowledgedSendRecord(pending.receivedPacketNumber);
if (!sendRecord) {
qWarning() << "Missing send record for delta" << pending.receivedPacketNumber;
@ -334,6 +342,20 @@ void MetavoxelClient::handleMessage(const QVariant& message, Bitstream& in) {
}
_remoteDataLOD = receiveRecord->getLOD();
_remoteData = receiveRecord->getData();
_reliableDeltaChannel = _sequencer.getReliableInputChannel(RELIABLE_DELTA_CHANNEL_INDEX);
if (receiveRecord == getLastAcknowledgedReceiveRecord()) {
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_sequencer.getInputStream());
} else {
_reliableDeltaChannel->getBitstream().copyPersistentMappings(_dummyInputStream);
foreach (const ClearedReceiveRecord& record, _clearedReceiveRecords) {
_reliableDeltaChannel->getBitstream().persistReadMappings(record.second);
if (record.first == receiveRecord) {
break;
}
}
}
}
} else {
Endpoint::handleMessage(message, in);

View file

@ -145,8 +145,13 @@ protected:
MetavoxelData _dataCopy;
QReadWriteLock _dataCopyLock;
QDataStream _dummyDataStream;
Bitstream _dummyInputStream;
int _dummyPacketNumber;
QList<PacketRecord*> _clearedSendRecords;
QList<PacketRecord*> _clearedReceiveRecords;
typedef QPair<PacketRecord*, Bitstream::ReadMappings> ClearedReceiveRecord;
QList<ClearedReceiveRecord> _clearedReceiveRecords;
};
#endif // hifi_MetavoxelClientManager_h

View file

@ -1040,6 +1040,9 @@ MetavoxelNode* MetavoxelNode::readSubdivision(MetavoxelStreamState& state) {
}
void MetavoxelNode::writeSubdivision(MetavoxelStreamState& state) const {
if (!state.shouldSubdivide()) {
return;
}
bool leaf = isLeaf();
if (!state.shouldSubdivideReference()) {
state.base.stream << leaf;
@ -2486,8 +2489,39 @@ bool Heightfield::intersects(const glm::vec3& start, const glm::vec3& end, float
if (!getBounds().findRayIntersection(start, direction, rayDistance) || rayDistance > 1.0f) {
return false;
}
glm::vec3 entry = (start + direction * rayDistance - getBounds().minimum) / _increment;
direction /= _increment;
glm::vec3 entry = start + direction * rayDistance;
const float DISTANCE_THRESHOLD = 0.001f;
if (glm::abs(entry.x - getBounds().minimum.x) < DISTANCE_THRESHOLD) {
normal = glm::vec3(-1.0f, 0.0f, 0.0f);
distance = rayDistance;
return true;
} else if (glm::abs(entry.x - getBounds().maximum.x) < DISTANCE_THRESHOLD) {
normal = glm::vec3(1.0f, 0.0f, 0.0f);
distance = rayDistance;
return true;
} else if (glm::abs(entry.y - getBounds().minimum.y) < DISTANCE_THRESHOLD) {
normal = glm::vec3(0.0f, -1.0f, 0.0f);
distance = rayDistance;
return true;
} else if (glm::abs(entry.y - getBounds().maximum.y) < DISTANCE_THRESHOLD) {
normal = glm::vec3(0.0f, 1.0f, 0.0f);
distance = rayDistance;
return true;
} else if (glm::abs(entry.z - getBounds().minimum.z) < DISTANCE_THRESHOLD) {
normal = glm::vec3(0.0f, 0.0f, -1.0f);
distance = rayDistance;
return true;
} else if (glm::abs(entry.z - getBounds().maximum.z) < DISTANCE_THRESHOLD) {
normal = glm::vec3(0.0f, 0.0f, 1.0f);
distance = rayDistance;
return true;
}
entry = (entry - getBounds().minimum) / _increment;
glm::vec3 floors = glm::floor(entry);
glm::vec3 ceils = glm::ceil(entry);
if (floors.x == ceils.x) {

View file

@ -701,8 +701,7 @@ int VoxelMaterialSpannerEditVisitor::visit(MetavoxelInfo& info) {
int sizeY = (int)overlap.maximum.y - minY + 1;
int sizeZ = (int)overlap.maximum.z - minZ + 1;
QRgb rgb = _color.rgba();
bool flipped = (qAlpha(rgb) == 0);
bool flipped = false;
float step = 1.0f / scale;
glm::vec3 position(0.0f, 0.0f, info.minimum.z + minZ * step);
if (_spanner->hasOwnColors()) {
@ -720,6 +719,8 @@ int VoxelMaterialSpannerEditVisitor::visit(MetavoxelInfo& info) {
}
}
} else {
QRgb rgb = _color.rgba();
flipped = (qAlpha(rgb) == 0);
for (QRgb* destZ = colorContents.data() + minZ * VOXEL_BLOCK_AREA + minY * VOXEL_BLOCK_SAMPLES + minX,
*endZ = destZ + sizeZ * VOXEL_BLOCK_AREA; destZ != endZ; destZ += VOXEL_BLOCK_AREA, position.z += step) {
position.y = info.minimum.y + minY * step;
@ -997,10 +998,13 @@ int HeightfieldClearFetchVisitor::visit(MetavoxelInfo& info) {
_spannerBounds.maximum = (glm::ceil(_bounds.maximum / increment) + glm::vec3(1.0f, 0.0f, 1.0f)) * increment;
_spannerBounds.minimum.y = bounds.minimum.y;
_spannerBounds.maximum.y = bounds.maximum.y;
_heightfieldWidth = (int)glm::round((_spannerBounds.maximum.x - _spannerBounds.minimum.x) / increment) + 1;
_heightfieldHeight = (int)glm::round((_spannerBounds.maximum.z - _spannerBounds.minimum.z) / increment) + 1;
_heightfieldWidth = (int)glm::round((_spannerBounds.maximum.x - _spannerBounds.minimum.x) / increment);
_heightfieldHeight = (int)glm::round((_spannerBounds.maximum.z - _spannerBounds.minimum.z) / increment);
int heightfieldArea = _heightfieldWidth * _heightfieldHeight;
_spanner = spanner = new Heightfield(_spannerBounds, increment, QByteArray(heightfieldArea, 0),
Box innerBounds = _spannerBounds;
innerBounds.maximum.x -= increment;
innerBounds.maximum.z -= increment;
_spanner = spanner = new Heightfield(innerBounds, increment, QByteArray(heightfieldArea, 0),
QByteArray(heightfieldArea * DataBlock::COLOR_BYTES, 0), QByteArray(heightfieldArea, 0),
QVector<SharedObjectPointer>());
}
@ -1048,18 +1052,20 @@ int HeightfieldClearFetchVisitor::visit(MetavoxelInfo& info) {
}
// if all is gone, clear the node
if (!foundNonZero) {
info.outputValues[0] = AttributeValue(_outputs.at(0),
encodeInline<HeightfieldHeightDataPointer>(HeightfieldHeightDataPointer()));
info.outputValues[1] = AttributeValue(_outputs.at(1),
encodeInline<HeightfieldColorDataPointer>(HeightfieldColorDataPointer()));
info.outputValues[2] = AttributeValue(_outputs.at(2),
encodeInline<HeightfieldMaterialDataPointer>(HeightfieldMaterialDataPointer()));
return STOP_RECURSION;
if (foundNonZero) {
HeightfieldHeightDataPointer newHeightPointer(new HeightfieldHeightData(contents));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldHeightDataPointer>(newHeightPointer));
} else {
info.outputValues[0] = AttributeValue(_outputs.at(0));
}
HeightfieldHeightDataPointer newHeightPointer(new HeightfieldHeightData(contents));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldHeightDataPointer>(newHeightPointer));
// allow a border for what we clear in terms of color/material
innerBounds.minimum.x += increment;
innerBounds.minimum.z += increment;
innerBounds.maximum.x -= increment;
innerBounds.maximum.z -= increment;
innerOverlap = bounds.getIntersection(innerBounds);
HeightfieldColorDataPointer colorPointer = info.inputValues.at(1).getInlineValue<HeightfieldColorDataPointer>();
if (colorPointer) {
@ -1082,18 +1088,25 @@ int HeightfieldClearFetchVisitor::visit(MetavoxelInfo& info) {
memcpy(dest, src, destWidth * DataBlock::COLOR_BYTES);
}
destX = (innerOverlap.minimum.x - info.minimum.x) * heightScale;
destY = (innerOverlap.minimum.z - info.minimum.z) * heightScale;
destWidth = glm::ceil((innerOverlap.maximum.x - innerOverlap.minimum.x) * heightScale);
destHeight = glm::ceil((innerOverlap.maximum.z - innerOverlap.minimum.z) * heightScale);
dest = contents.data() + (destY * size + destX) * DataBlock::COLOR_BYTES;
for (int y = 0; y < destHeight; y++, dest += size * DataBlock::COLOR_BYTES) {
memset(dest, 0, destWidth * DataBlock::COLOR_BYTES);
if (foundNonZero) {
destX = (innerOverlap.minimum.x - info.minimum.x) * heightScale;
destY = (innerOverlap.minimum.z - info.minimum.z) * heightScale;
destWidth = glm::ceil((innerOverlap.maximum.x - innerOverlap.minimum.x) * heightScale);
destHeight = glm::ceil((innerOverlap.maximum.z - innerOverlap.minimum.z) * heightScale);
if (destWidth > 0 && destHeight > 0) {
dest = contents.data() + (destY * size + destX) * DataBlock::COLOR_BYTES;
for (int y = 0; y < destHeight; y++, dest += size * DataBlock::COLOR_BYTES) {
memset(dest, 0, destWidth * DataBlock::COLOR_BYTES);
}
HeightfieldColorDataPointer newColorPointer(new HeightfieldColorData(contents));
info.outputValues[1] = AttributeValue(_outputs.at(1),
encodeInline<HeightfieldColorDataPointer>(newColorPointer));
}
} else {
info.outputValues[1] = AttributeValue(_outputs.at(1));
}
HeightfieldColorDataPointer newColorPointer(new HeightfieldColorData(contents));
info.outputValues[1] = AttributeValue(_outputs.at(1), encodeInline<HeightfieldColorDataPointer>(newColorPointer));
}
HeightfieldMaterialDataPointer materialPointer = info.inputValues.at(2).getInlineValue<HeightfieldMaterialDataPointer>();
@ -1129,20 +1142,26 @@ int HeightfieldClearFetchVisitor::visit(MetavoxelInfo& info) {
}
}
destX = (innerOverlap.minimum.x - info.minimum.x) * heightScale;
destY = (innerOverlap.minimum.z - info.minimum.z) * heightScale;
destWidth = glm::ceil((innerOverlap.maximum.x - innerOverlap.minimum.x) * heightScale);
destHeight = glm::ceil((innerOverlap.maximum.z - innerOverlap.minimum.z) * heightScale);
dest = (uchar*)contents.data() + destY * size + destX;
for (int y = 0; y < destHeight; y++, dest += size) {
memset(dest, 0, destWidth);
if (foundNonZero) {
destX = (innerOverlap.minimum.x - info.minimum.x) * heightScale;
destY = (innerOverlap.minimum.z - info.minimum.z) * heightScale;
destWidth = glm::ceil((innerOverlap.maximum.x - innerOverlap.minimum.x) * heightScale);
destHeight = glm::ceil((innerOverlap.maximum.z - innerOverlap.minimum.z) * heightScale);
if (destWidth > 0 && destHeight > 0) {
dest = (uchar*)contents.data() + destY * size + destX;
for (int y = 0; y < destHeight; y++, dest += size) {
memset(dest, 0, destWidth);
}
clearUnusedMaterials(materials, contents);
HeightfieldMaterialDataPointer newMaterialPointer(new HeightfieldMaterialData(contents, materials));
info.outputValues[2] = AttributeValue(_outputs.at(2),
encodeInline<HeightfieldMaterialDataPointer>(newMaterialPointer));
}
} else {
info.outputValues[2] = AttributeValue(_outputs.at(2));
}
clearUnusedMaterials(materials, contents);
HeightfieldMaterialDataPointer newMaterialPointer(new HeightfieldMaterialData(contents, materials));
info.outputValues[2] = AttributeValue(_outputs.at(2),
encodeInline<HeightfieldMaterialDataPointer>(newMaterialPointer));
}
return STOP_RECURSION;

View file

@ -53,7 +53,7 @@ PacketVersion versionForPacketType(PacketType type) {
case PacketTypeSilentAudioFrame:
return 4;
case PacketTypeMixedAudio:
return 1;
return 2;
case PacketTypeAvatarData:
return 3;
case PacketTypeAvatarIdentity:

View file

@ -5,7 +5,7 @@ setup_hifi_library(Gui Network Script Widgets)
include_glm()
link_hifi_libraries(shared octree voxels fbx entities animation)
link_hifi_libraries(shared octree voxels fbx entities animation audio)
# call macro to link our dependencies and bubble them up via a property on our target
link_shared_dependencies()

View file

@ -17,7 +17,12 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include "EventTypes.h"
#include "HFActionEvent.h"
#include "KeyEvent.h"
#include "MouseEvent.h"
#include "SpatialEvent.h"
#include "TouchEvent.h"
#include "WheelEvent.h"
class AbstractInputController : public QObject {
Q_OBJECT
@ -88,6 +93,12 @@ public slots:
signals:
void keyPressEvent(const KeyEvent& event);
void keyReleaseEvent(const KeyEvent& event);
void actionStartEvent(const HFActionEvent& event);
void actionEndEvent(const HFActionEvent& event);
void backStartEvent();
void backEndEvent();
void mouseMoveEvent(const MouseEvent& event, unsigned int deviceID = 0);
void mousePressEvent(const MouseEvent& event, unsigned int deviceID = 0);

View file

@ -9,634 +9,20 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <QDebug>
#include <RegisteredMetaTypes.h>
#include "HFActionEvent.h"
#include "KeyEvent.h"
#include "MouseEvent.h"
#include "SpatialEvent.h"
#include "TouchEvent.h"
#include "WheelEvent.h"
#include "EventTypes.h"
void registerEventTypes(QScriptEngine* engine) {
qScriptRegisterMetaType(engine, keyEventToScriptValue, keyEventFromScriptValue);
qScriptRegisterMetaType(engine, mouseEventToScriptValue, mouseEventFromScriptValue);
qScriptRegisterMetaType(engine, touchEventToScriptValue, touchEventFromScriptValue);
qScriptRegisterMetaType(engine, wheelEventToScriptValue, wheelEventFromScriptValue);
qScriptRegisterMetaType(engine, spatialEventToScriptValue, spatialEventFromScriptValue);
}
KeyEvent::KeyEvent() :
key(0),
text(""),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false),
isKeypad(false),
isValid(false)
{
};
KeyEvent::KeyEvent(const QKeyEvent& event) {
key = event.key();
text = event.text();
isShifted = event.modifiers().testFlag(Qt::ShiftModifier);
isMeta = event.modifiers().testFlag(Qt::MetaModifier);
isControl = event.modifiers().testFlag(Qt::ControlModifier);
isAlt = event.modifiers().testFlag(Qt::AltModifier);
isKeypad = event.modifiers().testFlag(Qt::KeypadModifier);
isValid = true;
// handle special text for special characters...
if (key == Qt::Key_F1) {
text = "F1";
} else if (key == Qt::Key_F2) {
text = "F2";
} else if (key == Qt::Key_F3) {
text = "F3";
} else if (key == Qt::Key_F4) {
text = "F4";
} else if (key == Qt::Key_F5) {
text = "F5";
} else if (key == Qt::Key_F6) {
text = "F6";
} else if (key == Qt::Key_F7) {
text = "F7";
} else if (key == Qt::Key_F8) {
text = "F8";
} else if (key == Qt::Key_F9) {
text = "F9";
} else if (key == Qt::Key_F10) {
text = "F10";
} else if (key == Qt::Key_F11) {
text = "F11";
} else if (key == Qt::Key_F12) {
text = "F12";
} else if (key == Qt::Key_Up) {
text = "UP";
} else if (key == Qt::Key_Down) {
text = "DOWN";
} else if (key == Qt::Key_Left) {
text = "LEFT";
} else if (key == Qt::Key_Right) {
text = "RIGHT";
} else if (key == Qt::Key_Space) {
text = "SPACE";
} else if (key == Qt::Key_Escape) {
text = "ESC";
} else if (key == Qt::Key_Tab) {
text = "TAB";
} else if (key == Qt::Key_Delete) {
text = "DELETE";
} else if (key == Qt::Key_Backspace) {
text = "BACKSPACE";
} else if (key == Qt::Key_Shift) {
text = "SHIFT";
} else if (key == Qt::Key_Alt) {
text = "ALT";
} else if (key == Qt::Key_Control) {
text = "CONTROL";
} else if (key == Qt::Key_Meta) {
text = "META";
} else if (key == Qt::Key_PageDown) {
text = "PAGE DOWN";
} else if (key == Qt::Key_PageUp) {
text = "PAGE UP";
} else if (key == Qt::Key_Home) {
text = "HOME";
} else if (key == Qt::Key_End) {
text = "END";
} else if (key == Qt::Key_Help) {
text = "HELP";
} else if (key == Qt::Key_CapsLock) {
text = "CAPS LOCK";
} else if (key >= Qt::Key_A && key <= Qt::Key_Z && (isMeta || isControl || isAlt)) {
// this little bit of hackery will fix the text character keys like a-z in cases of control/alt/meta where
// qt doesn't always give you the key characters and will sometimes give you crazy non-printable characters
const int lowerCaseAdjust = 0x20;
QString unicode;
if (isShifted) {
text = QString(QChar(key));
} else {
text = QString(QChar(key + lowerCaseAdjust));
}
}
}
bool KeyEvent::operator==(const KeyEvent& other) const {
return other.key == key
&& other.isShifted == isShifted
&& other.isControl == isControl
&& other.isMeta == isMeta
&& other.isAlt == isAlt
&& other.isKeypad == isKeypad;
}
KeyEvent::operator QKeySequence() const {
int resultCode = 0;
if (text.size() == 1 && text >= "a" && text <= "z") {
resultCode = text.toUpper().at(0).unicode();
} else {
resultCode = key;
}
if (isMeta) {
resultCode |= Qt::META;
}
if (isAlt) {
resultCode |= Qt::ALT;
}
if (isControl) {
resultCode |= Qt::CTRL;
}
if (isShifted) {
resultCode |= Qt::SHIFT;
}
return QKeySequence(resultCode);
}
QScriptValue keyEventToScriptValue(QScriptEngine* engine, const KeyEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("key", event.key);
obj.setProperty("text", event.text);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
obj.setProperty("isKeypad", event.isKeypad);
return obj;
}
void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event) {
event.isValid = false; // assume the worst
event.isMeta = object.property("isMeta").toVariant().toBool();
event.isControl = object.property("isControl").toVariant().toBool();
event.isAlt = object.property("isAlt").toVariant().toBool();
event.isKeypad = object.property("isKeypad").toVariant().toBool();
QScriptValue key = object.property("key");
if (key.isValid()) {
event.key = key.toVariant().toInt();
event.text = QString(QChar(event.key));
event.isValid = true;
} else {
QScriptValue text = object.property("text");
if (text.isValid()) {
event.text = object.property("text").toVariant().toString();
// if the text is a special command, then map it here...
// TODO: come up with more elegant solution here, a map? is there a Qt function that gives nice names for keys?
if (event.text.toUpper() == "F1") {
event.key = Qt::Key_F1;
} else if (event.text.toUpper() == "F2") {
event.key = Qt::Key_F2;
} else if (event.text.toUpper() == "F3") {
event.key = Qt::Key_F3;
} else if (event.text.toUpper() == "F4") {
event.key = Qt::Key_F4;
} else if (event.text.toUpper() == "F5") {
event.key = Qt::Key_F5;
} else if (event.text.toUpper() == "F6") {
event.key = Qt::Key_F6;
} else if (event.text.toUpper() == "F7") {
event.key = Qt::Key_F7;
} else if (event.text.toUpper() == "F8") {
event.key = Qt::Key_F8;
} else if (event.text.toUpper() == "F9") {
event.key = Qt::Key_F9;
} else if (event.text.toUpper() == "F10") {
event.key = Qt::Key_F10;
} else if (event.text.toUpper() == "F11") {
event.key = Qt::Key_F11;
} else if (event.text.toUpper() == "F12") {
event.key = Qt::Key_F12;
} else if (event.text.toUpper() == "UP") {
event.key = Qt::Key_Up;
event.isKeypad = true;
} else if (event.text.toUpper() == "DOWN") {
event.key = Qt::Key_Down;
event.isKeypad = true;
} else if (event.text.toUpper() == "LEFT") {
event.key = Qt::Key_Left;
event.isKeypad = true;
} else if (event.text.toUpper() == "RIGHT") {
event.key = Qt::Key_Right;
event.isKeypad = true;
} else if (event.text.toUpper() == "SPACE") {
event.key = Qt::Key_Space;
} else if (event.text.toUpper() == "ESC") {
event.key = Qt::Key_Escape;
} else if (event.text.toUpper() == "TAB") {
event.key = Qt::Key_Tab;
} else if (event.text.toUpper() == "DELETE") {
event.key = Qt::Key_Delete;
} else if (event.text.toUpper() == "BACKSPACE") {
event.key = Qt::Key_Backspace;
} else if (event.text.toUpper() == "SHIFT") {
event.key = Qt::Key_Shift;
} else if (event.text.toUpper() == "ALT") {
event.key = Qt::Key_Alt;
} else if (event.text.toUpper() == "CONTROL") {
event.key = Qt::Key_Control;
} else if (event.text.toUpper() == "META") {
event.key = Qt::Key_Meta;
} else if (event.text.toUpper() == "PAGE DOWN") {
event.key = Qt::Key_PageDown;
} else if (event.text.toUpper() == "PAGE UP") {
event.key = Qt::Key_PageUp;
} else if (event.text.toUpper() == "HOME") {
event.key = Qt::Key_Home;
} else if (event.text.toUpper() == "END") {
event.key = Qt::Key_End;
} else if (event.text.toUpper() == "HELP") {
event.key = Qt::Key_Help;
} else if (event.text.toUpper() == "CAPS LOCK") {
event.key = Qt::Key_CapsLock;
} else {
// Key values do not distinguish between uppercase and lowercase
// and use the uppercase key value.
event.key = event.text.toUpper().at(0).unicode();
}
event.isValid = true;
}
}
QScriptValue isShifted = object.property("isShifted");
if (isShifted.isValid()) {
event.isShifted = isShifted.toVariant().toBool();
} else {
// if no isShifted was included, get it from the text
QChar character = event.text.at(0);
if (character.isLetter() && character.isUpper()) {
event.isShifted = true;
} else {
// if it's a symbol, then attempt to detect shifted-ness
if (QString("~!@#$%^&*()_+{}|:\"<>?").contains(character)) {
event.isShifted = true;
}
}
}
const bool wantDebug = false;
if (wantDebug) {
qDebug() << "event.key=" << event.key
<< " event.text=" << event.text
<< " event.isShifted=" << event.isShifted
<< " event.isControl=" << event.isControl
<< " event.isMeta=" << event.isMeta
<< " event.isAlt=" << event.isAlt
<< " event.isKeypad=" << event.isKeypad;
}
}
MouseEvent::MouseEvent() :
x(0.0f),
y(0.0f),
isLeftButton(false),
isRightButton(false),
isMiddleButton(false),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false)
{
};
MouseEvent::MouseEvent(const QMouseEvent& event, const unsigned int deviceID) :
x(event.x()),
y(event.y()),
deviceID(deviceID),
isLeftButton(event.buttons().testFlag(Qt::LeftButton)),
isRightButton(event.buttons().testFlag(Qt::RightButton)),
isMiddleButton(event.buttons().testFlag(Qt::MiddleButton)),
isShifted(event.modifiers().testFlag(Qt::ShiftModifier)),
isControl(event.modifiers().testFlag(Qt::ControlModifier)),
isMeta(event.modifiers().testFlag(Qt::MetaModifier)),
isAlt(event.modifiers().testFlag(Qt::AltModifier))
{
// single button that caused the event
switch (event.button()) {
case Qt::LeftButton:
button = "LEFT";
isLeftButton = true;
break;
case Qt::RightButton:
button = "RIGHT";
isRightButton = true;
break;
case Qt::MiddleButton:
button = "MIDDLE";
isMiddleButton = true;
break;
default:
button = "NONE";
break;
}
}
QScriptValue mouseEventToScriptValue(QScriptEngine* engine, const MouseEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", event.x);
obj.setProperty("y", event.y);
obj.setProperty("button", event.button);
obj.setProperty("deviceID", event.deviceID);
obj.setProperty("isLeftButton", event.isLeftButton);
obj.setProperty("isRightButton", event.isRightButton);
obj.setProperty("isMiddleButton", event.isMiddleButton);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
return obj;
}
void mouseEventFromScriptValue(const QScriptValue& object, MouseEvent& event) {
// nothing for now...
}
TouchEvent::TouchEvent() :
x(0.0f),
y(0.0f),
isPressed(false),
isMoved(false),
isStationary(false),
isReleased(false),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false),
touchPoints(0),
points(),
radius(0.0f),
isPinching(false),
isPinchOpening(false),
angles(),
angle(0.0f),
deltaAngle(0.0f),
isRotating(false),
rotating("none")
{
};
TouchEvent::TouchEvent(const QTouchEvent& event) :
// these values are not set by initWithQTouchEvent() because they only apply to comparing to other events
isPinching(false),
isPinchOpening(false),
deltaAngle(0.0f),
isRotating(false),
rotating("none")
{
initWithQTouchEvent(event);
}
TouchEvent::TouchEvent(const QTouchEvent& event, const TouchEvent& other) {
initWithQTouchEvent(event);
calculateMetaAttributes(other);
}
// returns the angle (in degrees) between two points (note: 0 degrees is 'east')
float angleBetweenPoints(const glm::vec2& a, const glm::vec2& b ) {
glm::vec2 length = b - a;
float angle = DEGREES_PER_RADIAN * std::atan2(length.y, length.x);
if (angle < 0) {
angle += 360.0f;
};
return angle;
}
void TouchEvent::initWithQTouchEvent(const QTouchEvent& event) {
// convert the touch points into an average
const QList<QTouchEvent::TouchPoint>& tPoints = event.touchPoints();
float touchAvgX = 0.0f;
float touchAvgY = 0.0f;
touchPoints = tPoints.count();
if (touchPoints > 1) {
for (int i = 0; i < touchPoints; ++i) {
touchAvgX += tPoints[i].pos().x();
touchAvgY += tPoints[i].pos().y();
// add it to our points vector
glm::vec2 thisPoint(tPoints[i].pos().x(), tPoints[i].pos().y());
points << thisPoint;
}
touchAvgX /= (float)(touchPoints);
touchAvgY /= (float)(touchPoints);
} else {
// I'm not sure this should ever happen, why would Qt send us a touch event for only one point?
// maybe this happens in the case of a multi-touch where all but the last finger is released?
touchAvgX = tPoints[0].pos().x();
touchAvgY = tPoints[0].pos().y();
}
x = touchAvgX;
y = touchAvgY;
// after calculating the center point (average touch point), determine the maximum radius
// also calculate the rotation angle for each point
float maxRadius = 0.0f;
glm::vec2 center(x,y);
for (int i = 0; i < touchPoints; ++i) {
glm::vec2 touchPoint(tPoints[i].pos().x(), tPoints[i].pos().y());
float thisRadius = glm::distance(center,touchPoint);
if (thisRadius > maxRadius) {
maxRadius = thisRadius;
}
// calculate the angle for this point
float thisAngle = angleBetweenPoints(center,touchPoint);
angles << thisAngle;
}
radius = maxRadius;
// after calculating the angles for each touch point, determine the average angle
float totalAngle = 0.0f;
for (int i = 0; i < touchPoints; ++i) {
totalAngle += angles[i];
}
angle = totalAngle/(float)touchPoints;
isPressed = event.touchPointStates().testFlag(Qt::TouchPointPressed);
isMoved = event.touchPointStates().testFlag(Qt::TouchPointMoved);
isStationary = event.touchPointStates().testFlag(Qt::TouchPointStationary);
isReleased = event.touchPointStates().testFlag(Qt::TouchPointReleased);
// keyboard modifiers
isShifted = event.modifiers().testFlag(Qt::ShiftModifier);
isMeta = event.modifiers().testFlag(Qt::MetaModifier);
isControl = event.modifiers().testFlag(Qt::ControlModifier);
isAlt = event.modifiers().testFlag(Qt::AltModifier);
}
void TouchEvent::calculateMetaAttributes(const TouchEvent& other) {
// calculate comparative event attributes...
if (other.radius > radius) {
isPinching = true;
isPinchOpening = false;
} else if (other.radius < radius) {
isPinchOpening = true;
isPinching = false;
} else {
isPinching = other.isPinching;
isPinchOpening = other.isPinchOpening;
}
// determine if the points are rotating...
// note: if the number of touch points change between events, then we don't consider ourselves to be rotating
if (touchPoints == other.touchPoints) {
deltaAngle = angle - other.angle;
if (other.angle < angle) {
isRotating = true;
rotating = "clockwise";
} else if (other.angle > angle) {
isRotating = true;
rotating = "counterClockwise";
} else {
isRotating = false;
rotating = "none";
}
} else {
deltaAngle = 0.0f;
isRotating = false;
rotating = "none";
}
}
QScriptValue touchEventToScriptValue(QScriptEngine* engine, const TouchEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", event.x);
obj.setProperty("y", event.y);
obj.setProperty("isPressed", event.isPressed);
obj.setProperty("isMoved", event.isMoved);
obj.setProperty("isStationary", event.isStationary);
obj.setProperty("isReleased", event.isReleased);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
obj.setProperty("touchPoints", event.touchPoints);
QScriptValue pointsObj = engine->newArray();
int index = 0;
foreach (glm::vec2 point, event.points) {
QScriptValue thisPoint = vec2toScriptValue(engine, point);
pointsObj.setProperty(index, thisPoint);
index++;
}
obj.setProperty("points", pointsObj);
obj.setProperty("radius", event.radius);
obj.setProperty("isPinching", event.isPinching);
obj.setProperty("isPinchOpening", event.isPinchOpening);
obj.setProperty("angle", event.angle);
obj.setProperty("deltaAngle", event.deltaAngle);
QScriptValue anglesObj = engine->newArray();
index = 0;
foreach (float angle, event.angles) {
anglesObj.setProperty(index, angle);
index++;
}
obj.setProperty("angles", anglesObj);
obj.setProperty("isRotating", event.isRotating);
obj.setProperty("rotating", event.rotating);
return obj;
}
void touchEventFromScriptValue(const QScriptValue& object, TouchEvent& event) {
// nothing for now...
}
WheelEvent::WheelEvent() :
x(0.0f),
y(0.0f),
delta(0.0f),
orientation("UNKNOwN"),
isLeftButton(false),
isRightButton(false),
isMiddleButton(false),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false)
{
};
WheelEvent::WheelEvent(const QWheelEvent& event) {
x = event.x();
y = event.y();
delta = event.delta();
if (event.orientation() == Qt::Horizontal) {
orientation = "HORIZONTAL";
} else {
orientation = "VERTICAL";
}
// button pressed state
isLeftButton = (event.buttons().testFlag(Qt::LeftButton));
isRightButton = (event.buttons().testFlag(Qt::RightButton));
isMiddleButton = (event.buttons().testFlag(Qt::MiddleButton));
// keyboard modifiers
isShifted = event.modifiers().testFlag(Qt::ShiftModifier);
isMeta = event.modifiers().testFlag(Qt::MetaModifier);
isControl = event.modifiers().testFlag(Qt::ControlModifier);
isAlt = event.modifiers().testFlag(Qt::AltModifier);
}
QScriptValue wheelEventToScriptValue(QScriptEngine* engine, const WheelEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", event.x);
obj.setProperty("y", event.y);
obj.setProperty("delta", event.delta);
obj.setProperty("orientation", event.orientation);
obj.setProperty("isLeftButton", event.isLeftButton);
obj.setProperty("isRightButton", event.isRightButton);
obj.setProperty("isMiddleButton", event.isMiddleButton);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
return obj;
}
void wheelEventFromScriptValue(const QScriptValue& object, WheelEvent& event) {
// nothing for now...
}
SpatialEvent::SpatialEvent() :
locTranslation(0.0f),
locRotation(),
absTranslation(0.0f),
absRotation()
{
};
SpatialEvent::SpatialEvent(const SpatialEvent& event) {
locTranslation = event.locTranslation;
locRotation = event.locRotation;
absTranslation = event.absTranslation;
absRotation = event.absRotation;
}
QScriptValue spatialEventToScriptValue(QScriptEngine* engine, const SpatialEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("locTranslation", vec3toScriptValue(engine, event.locTranslation) );
obj.setProperty("locRotation", quatToScriptValue(engine, event.locRotation) );
obj.setProperty("absTranslation", vec3toScriptValue(engine, event.absTranslation) );
obj.setProperty("absRotation", quatToScriptValue(engine, event.absRotation) );
return obj;
}
void spatialEventFromScriptValue(const QScriptValue& object,SpatialEvent& event) {
// nothing for now...
qScriptRegisterMetaType(engine, HFActionEvent::toScriptValue, HFActionEvent::fromScriptValue);
qScriptRegisterMetaType(engine, KeyEvent::toScriptValue, KeyEvent::fromScriptValue);
qScriptRegisterMetaType(engine, MouseEvent::toScriptValue, MouseEvent::fromScriptValue);
qScriptRegisterMetaType(engine, TouchEvent::toScriptValue, TouchEvent::fromScriptValue);
qScriptRegisterMetaType(engine, WheelEvent::toScriptValue, WheelEvent::fromScriptValue);
qScriptRegisterMetaType(engine, SpatialEvent::toScriptValue, SpatialEvent::fromScriptValue);
}

View file

@ -12,138 +12,8 @@
#ifndef hifi_EventTypes_h
#define hifi_EventTypes_h
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <QtScript/QScriptEngine>
#include <QKeyEvent>
#include <QMouseEvent>
#include <QTouchEvent>
#include <QWheelEvent>
class KeyEvent {
public:
KeyEvent();
KeyEvent(const QKeyEvent& event);
bool operator==(const KeyEvent& other) const;
operator QKeySequence() const;
int key;
QString text;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
bool isKeypad;
bool isValid;
};
class MouseEvent {
public:
MouseEvent();
MouseEvent(const QMouseEvent& event, const unsigned int deviceID = 0);
int x;
int y;
unsigned int deviceID;
QString button;
bool isLeftButton;
bool isRightButton;
bool isMiddleButton;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
};
class TouchEvent {
public:
TouchEvent();
TouchEvent(const QTouchEvent& event);
TouchEvent(const QTouchEvent& event, const TouchEvent& other);
float x;
float y;
bool isPressed;
bool isMoved;
bool isStationary;
bool isReleased;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
int touchPoints;
QVector<glm::vec2> points;
float radius;
bool isPinching;
bool isPinchOpening;
// angles are in degrees
QVector<float> angles; // angle from center to each point
float angle; // the average of the angles
float deltaAngle; // the change in average angle from last event
bool isRotating;
QString rotating;
private:
void initWithQTouchEvent(const QTouchEvent& event);
void calculateMetaAttributes(const TouchEvent& other);
};
class WheelEvent {
public:
WheelEvent();
WheelEvent(const QWheelEvent& event);
int x;
int y;
int delta;
QString orientation;
bool isLeftButton;
bool isRightButton;
bool isMiddleButton;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
};
class SpatialEvent {
public:
SpatialEvent();
SpatialEvent(const SpatialEvent& other);
glm::vec3 locTranslation;
glm::quat locRotation;
glm::vec3 absTranslation;
glm::quat absRotation;
private:
};
Q_DECLARE_METATYPE(KeyEvent)
Q_DECLARE_METATYPE(MouseEvent)
Q_DECLARE_METATYPE(TouchEvent)
Q_DECLARE_METATYPE(WheelEvent)
Q_DECLARE_METATYPE(SpatialEvent)
#include <qscriptengine.h>
void registerEventTypes(QScriptEngine* engine);
QScriptValue keyEventToScriptValue(QScriptEngine* engine, const KeyEvent& event);
void keyEventFromScriptValue(const QScriptValue& object, KeyEvent& event);
QScriptValue mouseEventToScriptValue(QScriptEngine* engine, const MouseEvent& event);
void mouseEventFromScriptValue(const QScriptValue& object, MouseEvent& event);
QScriptValue touchEventToScriptValue(QScriptEngine* engine, const TouchEvent& event);
void touchEventFromScriptValue(const QScriptValue& object, TouchEvent& event);
QScriptValue wheelEventToScriptValue(QScriptEngine* engine, const WheelEvent& event);
void wheelEventFromScriptValue(const QScriptValue& object, WheelEvent& event);
QScriptValue spatialEventToScriptValue(QScriptEngine* engine, const SpatialEvent& event);
void spatialEventFromScriptValue(const QScriptValue& object, SpatialEvent& event);
#endif // hifi_EventTypes_h

View file

@ -0,0 +1,41 @@
//
// HFActionEvent.cpp
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HFActionEvent.h"
HFActionEvent::HFActionEvent(QEvent::Type type, const QPointF& localPosition) :
HFMetaEvent(type),
localPosition(localPosition)
{
}
QEvent::Type HFActionEvent::startType() {
static QEvent::Type startType = HFMetaEvent::newEventType();
return startType;
}
QEvent::Type HFActionEvent::endType() {
static QEvent::Type endType = HFMetaEvent::newEventType();
return endType;
}
QScriptValue HFActionEvent::toScriptValue(QScriptEngine* engine, const HFActionEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", event.localPosition.x());
obj.setProperty("y", event.localPosition.y());
return obj;
}
void HFActionEvent::fromScriptValue(const QScriptValue& object, HFActionEvent& event) {
// not yet implemented
}

View file

@ -0,0 +1,35 @@
//
// HFActionEvent.h
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_HFActionEvent_h
#define hifi_HFActionEvent_h
#include "HFMetaEvent.h"
#include <qscriptengine.h>
class HFActionEvent : public HFMetaEvent {
public:
HFActionEvent() {};
HFActionEvent(QEvent::Type type, const QPointF& localPosition);
static QEvent::Type startType();
static QEvent::Type endType();
static QScriptValue toScriptValue(QScriptEngine* engine, const HFActionEvent& event);
static void fromScriptValue(const QScriptValue& object, HFActionEvent& event);
QPointF localPosition;
};
Q_DECLARE_METATYPE(HFActionEvent)
#endif // hifi_HFActionEvent_h

View file

@ -0,0 +1,28 @@
//
// HFBackEvent.cpp
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HFBackEvent.h"
HFBackEvent::HFBackEvent(QEvent::Type type) :
HFMetaEvent(type)
{
}
QEvent::Type HFBackEvent::startType() {
static QEvent::Type startType = HFMetaEvent::newEventType();
return startType;
}
QEvent::Type HFBackEvent::endType() {
static QEvent::Type endType = HFMetaEvent::newEventType();
return endType;
}

View file

@ -0,0 +1,29 @@
//
// HFBackEvent.h
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_HFBackEvent_h
#define hifi_HFBackEvent_h
#include <qevent.h>
#include <qscriptengine.h>
#include "HFMetaEvent.h"
class HFBackEvent : public HFMetaEvent {
public:
HFBackEvent() {};
HFBackEvent(QEvent::Type type);
static QEvent::Type startType();
static QEvent::Type endType();
};
#endif // hifi_HFBackEvent_h

View file

@ -0,0 +1,20 @@
//
// HFMetaEvent.cpp
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HFMetaEvent.h"
QSet<QEvent::Type> HFMetaEvent::_types = QSet<QEvent::Type>();
QEvent::Type HFMetaEvent::newEventType() {
QEvent::Type newType = static_cast<QEvent::Type>(QEvent::registerEventType());
_types.insert(newType);
return newType;
}

View file

@ -0,0 +1,28 @@
//
// HFMetaEvent.h
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_HFMetaEvent_h
#define hifi_HFMetaEvent_h
#include <qevent.h>
class HFMetaEvent : public QEvent {
public:
HFMetaEvent() : QEvent(HFMetaEvent::newEventType()) {};
HFMetaEvent(QEvent::Type type) : QEvent(type) {};
static const QSet<QEvent::Type>& types() { return HFMetaEvent::_types; }
protected:
static QEvent::Type newEventType();
static QSet<QEvent::Type> _types;
};
#endif // hifi_HFMetaEvent_h

View file

@ -0,0 +1,290 @@
//
// KeyEvent.cpp
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qdebug.h>
#include <qscriptengine.h>
#include "KeyEvent.h"
KeyEvent::KeyEvent() :
key(0),
text(""),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false),
isKeypad(false),
isValid(false),
isAutoRepeat(false)
{
}
KeyEvent::KeyEvent(const QKeyEvent& event) {
key = event.key();
text = event.text();
isShifted = event.modifiers().testFlag(Qt::ShiftModifier);
isMeta = event.modifiers().testFlag(Qt::MetaModifier);
isControl = event.modifiers().testFlag(Qt::ControlModifier);
isAlt = event.modifiers().testFlag(Qt::AltModifier);
isKeypad = event.modifiers().testFlag(Qt::KeypadModifier);
isValid = true;
isAutoRepeat = event.isAutoRepeat();
// handle special text for special characters...
if (key == Qt::Key_F1) {
text = "F1";
} else if (key == Qt::Key_F2) {
text = "F2";
} else if (key == Qt::Key_F3) {
text = "F3";
} else if (key == Qt::Key_F4) {
text = "F4";
} else if (key == Qt::Key_F5) {
text = "F5";
} else if (key == Qt::Key_F6) {
text = "F6";
} else if (key == Qt::Key_F7) {
text = "F7";
} else if (key == Qt::Key_F8) {
text = "F8";
} else if (key == Qt::Key_F9) {
text = "F9";
} else if (key == Qt::Key_F10) {
text = "F10";
} else if (key == Qt::Key_F11) {
text = "F11";
} else if (key == Qt::Key_F12) {
text = "F12";
} else if (key == Qt::Key_Up) {
text = "UP";
} else if (key == Qt::Key_Down) {
text = "DOWN";
} else if (key == Qt::Key_Left) {
text = "LEFT";
} else if (key == Qt::Key_Right) {
text = "RIGHT";
} else if (key == Qt::Key_Space) {
text = "SPACE";
} else if (key == Qt::Key_Escape) {
text = "ESC";
} else if (key == Qt::Key_Tab) {
text = "TAB";
} else if (key == Qt::Key_Delete) {
text = "DELETE";
} else if (key == Qt::Key_Backspace) {
text = "BACKSPACE";
} else if (key == Qt::Key_Shift) {
text = "SHIFT";
} else if (key == Qt::Key_Alt) {
text = "ALT";
} else if (key == Qt::Key_Control) {
text = "CONTROL";
} else if (key == Qt::Key_Meta) {
text = "META";
} else if (key == Qt::Key_PageDown) {
text = "PAGE DOWN";
} else if (key == Qt::Key_PageUp) {
text = "PAGE UP";
} else if (key == Qt::Key_Home) {
text = "HOME";
} else if (key == Qt::Key_End) {
text = "END";
} else if (key == Qt::Key_Help) {
text = "HELP";
} else if (key == Qt::Key_CapsLock) {
text = "CAPS LOCK";
} else if (key >= Qt::Key_A && key <= Qt::Key_Z && (isMeta || isControl || isAlt)) {
// this little bit of hackery will fix the text character keys like a-z in cases of control/alt/meta where
// qt doesn't always give you the key characters and will sometimes give you crazy non-printable characters
const int lowerCaseAdjust = 0x20;
QString unicode;
if (isShifted) {
text = QString(QChar(key));
} else {
text = QString(QChar(key + lowerCaseAdjust));
}
}
}
bool KeyEvent::operator==(const KeyEvent& other) const {
return other.key == key
&& other.isShifted == isShifted
&& other.isControl == isControl
&& other.isMeta == isMeta
&& other.isAlt == isAlt
&& other.isKeypad == isKeypad
&& other.isAutoRepeat == isAutoRepeat;
}
KeyEvent::operator QKeySequence() const {
int resultCode = 0;
if (text.size() == 1 && text >= "a" && text <= "z") {
resultCode = text.toUpper().at(0).unicode();
} else {
resultCode = key;
}
if (isMeta) {
resultCode |= Qt::META;
}
if (isAlt) {
resultCode |= Qt::ALT;
}
if (isControl) {
resultCode |= Qt::CTRL;
}
if (isShifted) {
resultCode |= Qt::SHIFT;
}
return QKeySequence(resultCode);
}
QScriptValue KeyEvent::toScriptValue(QScriptEngine* engine, const KeyEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("key", event.key);
obj.setProperty("text", event.text);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
obj.setProperty("isKeypad", event.isKeypad);
obj.setProperty("isAutoRepeat", event.isAutoRepeat);
return obj;
}
void KeyEvent::fromScriptValue(const QScriptValue& object, KeyEvent& event) {
event.isValid = false; // assume the worst
event.isMeta = object.property("isMeta").toVariant().toBool();
event.isControl = object.property("isControl").toVariant().toBool();
event.isAlt = object.property("isAlt").toVariant().toBool();
event.isKeypad = object.property("isKeypad").toVariant().toBool();
event.isAutoRepeat = object.property("isAutoRepeat").toVariant().toBool();
QScriptValue key = object.property("key");
if (key.isValid()) {
event.key = key.toVariant().toInt();
event.text = QString(QChar(event.key));
event.isValid = true;
} else {
QScriptValue text = object.property("text");
if (text.isValid()) {
event.text = object.property("text").toVariant().toString();
// if the text is a special command, then map it here...
// TODO: come up with more elegant solution here, a map? is there a Qt function that gives nice names for keys?
if (event.text.toUpper() == "F1") {
event.key = Qt::Key_F1;
} else if (event.text.toUpper() == "F2") {
event.key = Qt::Key_F2;
} else if (event.text.toUpper() == "F3") {
event.key = Qt::Key_F3;
} else if (event.text.toUpper() == "F4") {
event.key = Qt::Key_F4;
} else if (event.text.toUpper() == "F5") {
event.key = Qt::Key_F5;
} else if (event.text.toUpper() == "F6") {
event.key = Qt::Key_F6;
} else if (event.text.toUpper() == "F7") {
event.key = Qt::Key_F7;
} else if (event.text.toUpper() == "F8") {
event.key = Qt::Key_F8;
} else if (event.text.toUpper() == "F9") {
event.key = Qt::Key_F9;
} else if (event.text.toUpper() == "F10") {
event.key = Qt::Key_F10;
} else if (event.text.toUpper() == "F11") {
event.key = Qt::Key_F11;
} else if (event.text.toUpper() == "F12") {
event.key = Qt::Key_F12;
} else if (event.text.toUpper() == "UP") {
event.key = Qt::Key_Up;
event.isKeypad = true;
} else if (event.text.toUpper() == "DOWN") {
event.key = Qt::Key_Down;
event.isKeypad = true;
} else if (event.text.toUpper() == "LEFT") {
event.key = Qt::Key_Left;
event.isKeypad = true;
} else if (event.text.toUpper() == "RIGHT") {
event.key = Qt::Key_Right;
event.isKeypad = true;
} else if (event.text.toUpper() == "SPACE") {
event.key = Qt::Key_Space;
} else if (event.text.toUpper() == "ESC") {
event.key = Qt::Key_Escape;
} else if (event.text.toUpper() == "TAB") {
event.key = Qt::Key_Tab;
} else if (event.text.toUpper() == "DELETE") {
event.key = Qt::Key_Delete;
} else if (event.text.toUpper() == "BACKSPACE") {
event.key = Qt::Key_Backspace;
} else if (event.text.toUpper() == "SHIFT") {
event.key = Qt::Key_Shift;
} else if (event.text.toUpper() == "ALT") {
event.key = Qt::Key_Alt;
} else if (event.text.toUpper() == "CONTROL") {
event.key = Qt::Key_Control;
} else if (event.text.toUpper() == "META") {
event.key = Qt::Key_Meta;
} else if (event.text.toUpper() == "PAGE DOWN") {
event.key = Qt::Key_PageDown;
} else if (event.text.toUpper() == "PAGE UP") {
event.key = Qt::Key_PageUp;
} else if (event.text.toUpper() == "HOME") {
event.key = Qt::Key_Home;
} else if (event.text.toUpper() == "END") {
event.key = Qt::Key_End;
} else if (event.text.toUpper() == "HELP") {
event.key = Qt::Key_Help;
} else if (event.text.toUpper() == "CAPS LOCK") {
event.key = Qt::Key_CapsLock;
} else {
// Key values do not distinguish between uppercase and lowercase
// and use the uppercase key value.
event.key = event.text.toUpper().at(0).unicode();
}
event.isValid = true;
}
}
QScriptValue isShifted = object.property("isShifted");
if (isShifted.isValid()) {
event.isShifted = isShifted.toVariant().toBool();
} else {
// if no isShifted was included, get it from the text
QChar character = event.text.at(0);
if (character.isLetter() && character.isUpper()) {
event.isShifted = true;
} else {
// if it's a symbol, then attempt to detect shifted-ness
if (QString("~!@#$%^&*()_+{}|:\"<>?").contains(character)) {
event.isShifted = true;
}
}
}
const bool wantDebug = false;
if (wantDebug) {
qDebug() << "event.key=" << event.key
<< " event.text=" << event.text
<< " event.isShifted=" << event.isShifted
<< " event.isControl=" << event.isControl
<< " event.isMeta=" << event.isMeta
<< " event.isAlt=" << event.isAlt
<< " event.isKeypad=" << event.isKeypad
<< " event.isAutoRepeat=" << event.isAutoRepeat;
}
}

View file

@ -0,0 +1,41 @@
//
// KeyEvent.h
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_KeyEvent_h
#define hifi_KeyEvent_h
#include <QKeyEvent>
#include <qscriptvalue.h>
class KeyEvent {
public:
KeyEvent();
KeyEvent(const QKeyEvent& event);
bool operator==(const KeyEvent& other) const;
operator QKeySequence() const;
static QScriptValue toScriptValue(QScriptEngine* engine, const KeyEvent& event);
static void fromScriptValue(const QScriptValue& object, KeyEvent& event);
int key;
QString text;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
bool isKeypad;
bool isValid;
bool isAutoRepeat;
};
Q_DECLARE_METATYPE(KeyEvent)
#endif // hifi_KeyEvent_h

View file

@ -82,7 +82,7 @@ void menuItemPropertiesFromScriptValue(const QScriptValue& object, MenuItemPrope
} else {
QScriptValue shortcutKeyEventValue = object.property("shortcutKeyEvent");
if (shortcutKeyEventValue.isValid()) {
keyEventFromScriptValue(shortcutKeyEventValue, properties.shortcutKeyEvent);
KeyEvent::fromScriptValue(shortcutKeyEventValue, properties.shortcutKeyEvent);
properties.shortcutKeySequence = properties.shortcutKeyEvent;
}
}

View file

@ -14,7 +14,7 @@
#include <QtScript/QScriptEngine>
#include "EventTypes.h"
#include "KeyEvent.h"
const int UNSPECIFIED_POSITION = -1;

View file

@ -0,0 +1,83 @@
//
// MouseEvent.cpp
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qscriptengine.h>
#include <qscriptvalue.h>
#include "MouseEvent.h"
MouseEvent::MouseEvent() :
x(0.0f),
y(0.0f),
isLeftButton(false),
isRightButton(false),
isMiddleButton(false),
isShifted(false),
isControl(false),
isMeta(false),
isAlt(false)
{
}
MouseEvent::MouseEvent(const QMouseEvent& event, const unsigned int deviceID) :
x(event.x()),
y(event.y()),
deviceID(deviceID),
isLeftButton(event.buttons().testFlag(Qt::LeftButton)),
isRightButton(event.buttons().testFlag(Qt::RightButton)),
isMiddleButton(event.buttons().testFlag(Qt::MiddleButton)),
isShifted(event.modifiers().testFlag(Qt::ShiftModifier)),
isControl(event.modifiers().testFlag(Qt::ControlModifier)),
isMeta(event.modifiers().testFlag(Qt::MetaModifier)),
isAlt(event.modifiers().testFlag(Qt::AltModifier))
{
// single button that caused the event
switch (event.button()) {
case Qt::LeftButton:
button = "LEFT";
isLeftButton = true;
break;
case Qt::RightButton:
button = "RIGHT";
isRightButton = true;
break;
case Qt::MiddleButton:
button = "MIDDLE";
isMiddleButton = true;
break;
default:
button = "NONE";
break;
}
}
QScriptValue MouseEvent::toScriptValue(QScriptEngine* engine, const MouseEvent& event) {
QScriptValue obj = engine->newObject();
obj.setProperty("x", event.x);
obj.setProperty("y", event.y);
obj.setProperty("button", event.button);
obj.setProperty("deviceID", event.deviceID);
obj.setProperty("isLeftButton", event.isLeftButton);
obj.setProperty("isRightButton", event.isRightButton);
obj.setProperty("isMiddleButton", event.isMiddleButton);
obj.setProperty("isShifted", event.isShifted);
obj.setProperty("isMeta", event.isMeta);
obj.setProperty("isControl", event.isControl);
obj.setProperty("isAlt", event.isAlt);
return obj;
}
void MouseEvent::fromScriptValue(const QScriptValue& object, MouseEvent& event) {
// nothing for now...
}

View file

@ -0,0 +1,40 @@
//
// MouseEvent.h
// script-engine/src
//
// Created by Stephen Birarda on 2014-10-27.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_MouseEvent_h
#define hifi_MouseEvent_h
#include <QMouseEvent>
class MouseEvent {
public:
MouseEvent();
MouseEvent(const QMouseEvent& event, const unsigned int deviceID = 0);
static QScriptValue toScriptValue(QScriptEngine* engine, const MouseEvent& event);
static void fromScriptValue(const QScriptValue& object, MouseEvent& event);
int x;
int y;
unsigned int deviceID;
QString button;
bool isLeftButton;
bool isRightButton;
bool isMiddleButton;
bool isShifted;
bool isControl;
bool isMeta;
bool isAlt;
};
Q_DECLARE_METATYPE(MouseEvent)
#endif // hifi_MouseEvent_h

View file

@ -17,6 +17,7 @@
#include <QtNetwork/QNetworkReply>
#include <QScriptEngine>
#include <AudioEffectOptions.h>
#include <AudioInjector.h>
#include <AudioRingBuffer.h>
#include <AvatarData.h>
@ -34,13 +35,15 @@
#include "AnimationObject.h"
#include "ArrayBufferViewClass.h"
#include "DataViewClass.h"
#include "EventTypes.h"
#include "MenuItemProperties.h"
#include "MIDIEvent.h"
#include "LocalVoxels.h"
#include "ScriptEngine.h"
#include "TypedArrays.h"
#include "XMLHttpRequestClass.h"
#include "MIDIEvent.h"
VoxelsScriptingInterface ScriptEngine::_voxelsScriptingInterface;
EntityScriptingInterface ScriptEngine::_entityScriptingInterface;
@ -63,7 +66,15 @@ static QScriptValue debugPrint(QScriptContext* context, QScriptEngine* engine){
return QScriptValue();
}
QScriptValue injectorToScriptValue(QScriptEngine *engine, AudioInjector* const &in) {
QScriptValue avatarDataToScriptValue(QScriptEngine* engine, AvatarData* const &in) {
return engine->newQObject(in);
}
void avatarDataFromScriptValue(const QScriptValue &object, AvatarData* &out) {
out = qobject_cast<AvatarData*>(object.toQObject());
}
QScriptValue injectorToScriptValue(QScriptEngine* engine, AudioInjector* const &in) {
return engine->newQObject(in);
}
@ -269,10 +280,13 @@ void ScriptEngine::init() {
QScriptValue localVoxelsValue = scriptValueFromQMetaObject<LocalVoxels>();
globalObject().setProperty("LocalVoxels", localVoxelsValue);
QScriptValue audioEffectOptionsConstructorValue = newFunction(AudioEffectOptions::constructor);
globalObject().setProperty("AudioEffectOptions", audioEffectOptionsConstructorValue);
qScriptRegisterMetaType(this, injectorToScriptValue, injectorFromScriptValue);
qScriptRegisterMetaType(this, inputControllerToScriptValue, inputControllerFromScriptValue);
qScriptRegisterMetaType(this, avatarDataToScriptValue, avatarDataFromScriptValue);
qScriptRegisterMetaType(this, animationDetailsToScriptValue, animationDetailsFromScriptValue);
registerGlobalObject("Script", this);

Some files were not shown because too many files have changed in this diff Show more