3
0
Fork 0
mirror of https://github.com/lubosz/overte.git synced 2025-04-12 10:22:26 +02:00

Merge remote-tracking branch 'upstream/master' into ovr6

This commit is contained in:
Brad Davis 2015-06-16 09:58:20 -07:00
commit 4f8ea3f624
114 changed files with 2752 additions and 1392 deletions
CMakeLists.txt
assignment-client/src
cmake
examples
interface
libraries

View file

@ -53,7 +53,7 @@ else ()
endif ()
endif(WIN32)
if (NOT MSVC12)
if ((NOT MSVC12) AND (NOT MSVC14))
include(CheckCXXCompilerFlag)
CHECK_CXX_COMPILER_FLAG("-std=c++11" COMPILER_SUPPORTS_CXX11)
CHECK_CXX_COMPILER_FLAG("-std=c++0x" COMPILER_SUPPORTS_CXX0X)

View file

@ -66,6 +66,9 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
// set the logging target to the the CHILD_TARGET_NAME
LogHandler::getInstance().setTargetName(ASSIGNMENT_CLIENT_TARGET_NAME);
// make sure we output process IDs for a child AC otherwise it's insane to parse
LogHandler::getInstance().setShouldOutputPID(true);
// setup our _requestAssignment member variable from the passed arguments
_requestAssignment = Assignment(Assignment::RequestCommand, requestAssignmentType, assignmentPool);

View file

@ -39,9 +39,9 @@ AssignmentClientMonitor::AssignmentClientMonitor(const unsigned int numAssignmen
_walletUUID(walletUUID),
_assignmentServerHostname(assignmentServerHostname),
_assignmentServerPort(assignmentServerPort)
{
{
qDebug() << "_requestAssignmentType =" << _requestAssignmentType;
// start the Logging class with the parent's target name
LogHandler::getInstance().setTargetName(ASSIGNMENT_CLIENT_MONITOR_TARGET_NAME);
@ -77,13 +77,13 @@ void AssignmentClientMonitor::simultaneousWaitOnChildren(int waitMsecs) {
while(_childProcesses.size() > 0 && !waitTimer.hasExpired(waitMsecs)) {
// continue processing events so we can handle a process finishing up
QCoreApplication::processEvents();
}
}
}
void AssignmentClientMonitor::childProcessFinished() {
QProcess* childProcess = qobject_cast<QProcess*>(sender());
qint64 processID = _childProcesses.key(childProcess);
if (processID > 0) {
qDebug() << "Child process" << processID << "has finished. Removing from internal map.";
_childProcesses.remove(processID);
@ -98,17 +98,17 @@ void AssignmentClientMonitor::stopChildProcesses() {
qDebug() << "Attempting to terminate child process" << childProcess->processId();
childProcess->terminate();
}
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
if (_childProcesses.size() > 0) {
// ask even more firmly
foreach(QProcess* childProcess, _childProcesses) {
qDebug() << "Attempting to kill child process" << childProcess->processId();
childProcess->kill();
}
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
simultaneousWaitOnChildren(WAIT_FOR_CHILD_MSECS);
}
}
@ -122,7 +122,7 @@ void AssignmentClientMonitor::aboutToQuit() {
void AssignmentClientMonitor::spawnChildClient() {
QProcess* assignmentClient = new QProcess(this);
// unparse the parts of the command-line that the child cares about
QStringList _childArguments;
if (_assignmentPool != "") {
@ -153,7 +153,7 @@ void AssignmentClientMonitor::spawnChildClient() {
// make sure that the output from the child process appears in our output
assignmentClient->setProcessChannelMode(QProcess::ForwardedChannels);
assignmentClient->start(QCoreApplication::applicationFilePath(), _childArguments);
// make sure we hear that this process has finished when it does
@ -194,7 +194,7 @@ void AssignmentClientMonitor::checkSpares() {
qDebug() << "asking child" << aSpareId << "to exit.";
SharedNodePointer childNode = nodeList->nodeWithUUID(aSpareId);
childNode->activateLocalSocket();
QByteArray diePacket = nodeList->byteArrayWithPopulatedHeader(PacketTypeStopNode);
nodeList->writeUnverifiedDatagram(diePacket, childNode);
}
@ -239,7 +239,7 @@ void AssignmentClientMonitor::readPendingDatagrams() {
// update our records about how to reach this child
matchingNode->setLocalSocket(senderSockAddr);
QVariantMap packetVariantMap =
QVariantMap packetVariantMap =
JSONBreakableMarshal::fromStringBuffer(receivedPacket.mid(numBytesForPacketHeader(receivedPacket)));
QJsonObject unpackedStatsJSON = QJsonObject::fromVariantMap(packetVariantMap);

View file

@ -850,7 +850,9 @@ void AudioMixer::run() {
++_numStatFrames;
// since we're a while loop we need to help Qt's event processing
QCoreApplication::processEvents();
QCoreApplication::sendPostedEvents(this, 0);
if (_isFinished) {
break;

View file

@ -5,7 +5,7 @@ ExternalProject_Add(
${EXTERNAL_NAME}
URL http://hifi-public.s3.amazonaws.com/dependencies/polyvox.zip
URL_MD5 904b840328278c9b36fa7a14be730c34
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
CMAKE_ARGS -DENABLE_EXAMPLES=OFF -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
LOG_DOWNLOAD 1
LOG_CONFIGURE 1

View file

@ -14,12 +14,12 @@ macro(SETUP_EXTERNALS_BINARY_DIR)
# get a short name for the generator to use in the path
STRING(REGEX REPLACE " " "-" CMAKE_GENERATOR_FOLDER_NAME ${CMAKE_GENERATOR})
if (MSVC12)
if (MSVC12)
set(CMAKE_GENERATOR_FOLDER_NAME "vc12")
else ()
if (CMAKE_GENERATOR_FOLDER_NAME STREQUAL "Unix-Makefiles")
set(CMAKE_GENERATOR_FOLDER_NAME "makefiles")
endif ()
elseif (MSVC14)
set(CMAKE_GENERATOR_FOLDER_NAME "vc14")
elseif(CMAKE_GENERATOR_FOLDER_NAME STREQUAL "Unix-Makefiles")
set(CMAKE_GENERATOR_FOLDER_NAME "makefiles")
endif ()
set(EXTERNALS_BINARY_ROOT_DIR "${CMAKE_CURRENT_BINARY_DIR}/ext")

View file

@ -99,6 +99,13 @@ var NUM_BUTTONS = 3;
var screenSize = Controller.getViewportDimensions();
var startX = screenSize.x / 2 - (NUM_BUTTONS * (BUTTON_SIZE + PADDING)) / 2;
Script.include(["../../libraries/toolBars.js"]);
const persistKey = "highfidelity.gun.toolbar.position";
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var reticle = Overlays.addOverlay("image", {
x: screenSize.x / 2 - (BUTTON_SIZE / 2),
y: screenSize.y / 2 - (BUTTON_SIZE / 2),
@ -108,9 +115,9 @@ var reticle = Overlays.addOverlay("image", {
alpha: 1
});
var offButton = Overlays.addOverlay("image", {
x: startX,
y: screenSize.y - (BUTTON_SIZE + PADDING),
var offButton = toolBar.addOverlay("image", {
x: old ? old[0] : startX,
y: old ? old[1] : (screenSize.y - (BUTTON_SIZE + PADDING)),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/gun/close.svg",
@ -118,7 +125,7 @@ var offButton = Overlays.addOverlay("image", {
});
startX += BUTTON_SIZE + PADDING;
var platformButton = Overlays.addOverlay("image", {
var platformButton = toolBar.addOverlay("image", {
x: startX,
y: screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
@ -128,7 +135,7 @@ var platformButton = Overlays.addOverlay("image", {
});
startX += BUTTON_SIZE + PADDING;
var gridButton = Overlays.addOverlay("image", {
var gridButton = toolBar.addOverlay("image", {
x: startX,
y: screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
@ -493,10 +500,8 @@ function mousePressEvent(event) {
}
function scriptEnding() {
Overlays.deleteOverlay(reticle);
Overlays.deleteOverlay(offButton);
Overlays.deleteOverlay(platformButton);
Overlays.deleteOverlay(gridButton);
Overlays.deleteOverlay(reticle);
toolBar.cleanup();
Overlays.deleteOverlay(pointer[0]);
Overlays.deleteOverlay(pointer[1]);
Overlays.deleteOverlay(text);

View file

@ -18,3 +18,4 @@ Script.load("notifications.js");
Script.load("users.js");
Script.load("grab.js");
Script.load("pointer.js");
Script.load("directory.js");

View file

@ -3,6 +3,7 @@
// examples
//
// Created by Stephen Birarda on 06/08/15.
// Added disconnect HRS 6/11/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -10,14 +11,19 @@
//
// setup the local sound we're going to use
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/short1.wav");
var connectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/hello.wav");
var disconnectSound = SoundCache.getSound("file://" + Paths.resources + "sounds/goodbye.wav");
// setup the options needed for that sound
var connectSoundOptions = {
localOnly: true
}
localOnly: true
};
// play the sound locally once we get the first audio packet from a mixer
Audio.receivedFirstPacket.connect(function(){
Audio.playSound(connectSound, connectSoundOptions);
Audio.playSound(connectSound, connectSoundOptions);
});
Audio.disconnected.connect(function(){
Audio.playSound(disconnectSound, connectSoundOptions);
});

View file

@ -3,6 +3,7 @@
// examples
//
// Created by Philip Rosedale on February 2, 2015
// Persist toolbar by HRS 6/11/15.
// Copyright 2015 High Fidelity, Inc.
//
// Press the dice button to throw some dice from the center of the screen.
@ -31,9 +32,16 @@ var screenSize = Controller.getViewportDimensions();
var BUTTON_SIZE = 32;
var PADDING = 3;
var offButton = Overlays.addOverlay("image", {
x: screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING,
y: screenSize.y - (BUTTON_SIZE + PADDING),
Script.include(["libraries/toolBars.js"]);
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
const persistKey = "highfidelity.dice.toolbar.position";
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var offButton = toolBar.addOverlay("image", {
x: old ? old[0] : (screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING),
y: old ? old[1] : (screenSize.y - (BUTTON_SIZE + PADDING)),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/close.png",
@ -45,7 +53,7 @@ var offButton = Overlays.addOverlay("image", {
alpha: 1
});
var deleteButton = Overlays.addOverlay("image", {
var deleteButton = toolBar.addOverlay("image", {
x: screenSize.x / 2 - BUTTON_SIZE,
y: screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
@ -59,7 +67,7 @@ var deleteButton = Overlays.addOverlay("image", {
alpha: 1
});
var diceButton = Overlays.addOverlay("image", {
var diceButton = toolBar.addOverlay("image", {
x: screenSize.x / 2 + PADDING,
y: screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
@ -140,10 +148,8 @@ function mousePressEvent(event) {
}
function scriptEnding() {
Overlays.deleteOverlay(offButton);
Overlays.deleteOverlay(diceButton);
Overlays.deleteOverlay(deleteButton);
toolBar.cleanup();
}
Controller.mousePressEvent.connect(mousePressEvent);
Script.scriptEnding.connect(scriptEnding);
Script.scriptEnding.connect(scriptEnding);

92
examples/directory.js Normal file
View file

@ -0,0 +1,92 @@
//
// directory.js
// examples
//
// Created by David Rowe on 8 Jun 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include("libraries/globals.js");
var directory = (function () {
var DIRECTORY_URL = "https://metaverse.highfidelity.com/directory",
directoryWindow,
DIRECTORY_BUTTON_URL = HIFI_PUBLIC_BUCKET + "images/tools/directory.svg",
BUTTON_WIDTH = 50,
BUTTON_HEIGHT = 50,
BUTTON_ALPHA = 0.9,
BUTTON_MARGIN = 8,
directoryButton,
EDIT_TOOLBAR_BUTTONS = 10, // Number of buttons in edit.js toolbar
viewport;
function updateButtonPosition() {
Overlays.editOverlay(directoryButton, {
x: viewport.x - BUTTON_WIDTH - BUTTON_MARGIN,
y: (viewport.y - (EDIT_TOOLBAR_BUTTONS + 1) * (BUTTON_HEIGHT + BUTTON_MARGIN) - BUTTON_MARGIN) / 2 - 1
});
}
function onMousePressEvent(event) {
var clickedOverlay;
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
if (clickedOverlay === directoryButton) {
if (directoryWindow.url !== DIRECTORY_URL) {
directoryWindow.setURL(DIRECTORY_URL);
}
directoryWindow.setVisible(true);
directoryWindow.raise();
}
}
function onDomainChanged() {
directoryWindow.setVisible(false);
}
function onScriptUpdate() {
var oldViewport = viewport;
viewport = Controller.getViewportDimensions();
if (viewport.x !== oldViewport.x || viewport.y !== oldViewport.y) {
updateButtonPosition();
}
}
function setUp() {
viewport = Controller.getViewportDimensions();
directoryWindow = new WebWindow('Directory', DIRECTORY_URL, 900, 700, false);
directoryWindow.setVisible(false);
directoryButton = Overlays.addOverlay("image", {
imageURL: DIRECTORY_BUTTON_URL,
width: BUTTON_WIDTH,
height: BUTTON_HEIGHT,
x: viewport.x - BUTTON_WIDTH - BUTTON_MARGIN,
y: BUTTON_MARGIN,
alpha: BUTTON_ALPHA,
visible: true
});
updateButtonPosition();
Controller.mousePressEvent.connect(onMousePressEvent);
Window.domainChanged.connect(onDomainChanged);
Script.update.connect(onScriptUpdate);
}
function tearDown() {
Overlays.deleteOverlay(directoryButton);
}
setUp();
Script.scriptEnding.connect(tearDown);
}());

View file

@ -2,6 +2,7 @@
// examples
//
// Created by Brad Hefta-Gaub on 10/2/14.
// Persist toolbar by HRS 6/11/15.
// Copyright 2014 High Fidelity, Inc.
//
// This script allows you to edit entities with a new UI/UX for mouse and trackpad based editing
@ -320,6 +321,7 @@ var toolBar = (function () {
}
}
const persistKey = "highfidelity.edit.toolbar.position";
that.move = function () {
var newViewPort,
toolsX,
@ -330,6 +332,15 @@ var toolBar = (function () {
if (toolBar === undefined) {
initialize();
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
if (old) {
windowDimensions = newViewPort;
toolBar.move(old[0], old[1]);
return;
}
} else if (windowDimensions.x === newViewPort.x &&
windowDimensions.y === newViewPort.y) {
return;

View file

@ -0,0 +1,96 @@
//
// controllerScriptingExamples.js
// examples
//
// Created by Sam Gondelman on 6/2/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Assumes you only have the default keyboard connected
// Resets every device to its default key bindings:
Controller.resetAllDeviceBindings();
// Query all actions
print("All Actions: \n" + Controller.getAllActions());
// Each action stores:
// action: int representation of enum
print("Action 5 int: \n" + Controller.getAllActions()[5].action);
// actionName: string representation of enum
print("Action 5 name: \n" + Controller.getAllActions()[5].actionName);
// inputChannels: list of all inputchannels that control that action
print("Action 5 input channels: \n" + Controller.getAllActions()[5].inputChannels + "\n");
// Each input channel stores:
// action: Action that this InputChannel maps to
print("Input channel action: \n" + Controller.getAllActions()[5].inputChannels[0].action);
// scale: sensitivity of input
print("Input channel scale: \n" + Controller.getAllActions()[5].inputChannels[0].scale);
// input and modifier: Inputs
print("Input channel input and modifier: \n" + Controller.getAllActions()[5].inputChannels[0].input + "\n" + Controller.getAllActions()[5].inputChannels[0].modifier + "\n");
// Each Input stores:
// device: device of input
print("Input device: \n" + Controller.getAllActions()[5].inputChannels[0].input.device);
// channel: channel of input
print("Input channel: \n" + Controller.getAllActions()[5].inputChannels[0].input.channel);
// type: type of input (Unknown, Button, Axis, Joint)
print("Input type: \n" + Controller.getAllActions()[5].inputChannels[0].input.type);
// id: id of input
print("Input id: \n" + Controller.getAllActions()[5].inputChannels[0].input.id + "\n");
// You can get the name of a device from its id
print("Device 1 name: \n" + Controller.getDeviceName(Controller.getAllActions()[5].inputChannels[0].input.id));
// You can also get all of a devices input channels
print("Device 1's input channels: \n" + Controller.getAllInputsForDevice(1) + "\n");
// Modifying properties:
// The following code will switch the "w" and "s" key functionality and adjust their scales
var s = Controller.getAllActions()[0].inputChannels[0];
var w = Controller.getAllActions()[1].inputChannels[0];
// You must remove an input controller before modifying it so the old input controller isn't registered anymore
// removeInputChannel and addInputChannel return true if successful, false otherwise
Controller.removeInputChannel(s);
Controller.removeInputChannel(w);
print(s.scale);
s.action = 1;
s.scale = .01;
w.action = 0;
w.scale = 10000;
Controller.addInputChannel(s);
Controller.addInputChannel(w);
print(s.scale);
// You can get all the available inputs for any device
// Each AvailableInput has:
// input: the Input itself
// inputName: string representing the input
var availableInputs = Controller.getAvailableInputs(1);
for (i = 0; i < availableInputs.length; i++) {
print(availableInputs[i].inputName);
}
// You can modify key bindings by using these avaiable inputs
// This will replace e (up) with 6
var e = Controller.getAllActions()[5].inputChannels[0];
Controller.removeInputChannel(e);
e.input = availableInputs[6].input;
Controller.addInputChannel(e);

View file

@ -4,7 +4,7 @@
// Created by Eric Levin on May 1, 2015
// Copyright 2015 High Fidelity, Inc.
//
// Grab's physically moveable entities with the mouse, by applying a spring force.
// Grab's physically moveable entities with the mouse, by applying a spring force.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -20,7 +20,7 @@ var ANGULAR_DAMPING_RATE = 0.40;
// NOTE: to improve readability global variable names start with 'g'
var gIsGrabbing = false;
var gGrabbedEntity = null;
var gPrevMouse = {x: 0, y: 0};
var gActionID = null;
var gEntityProperties;
var gStartPosition;
var gStartRotation;
@ -31,20 +31,20 @@ var gPlaneNormal = ZERO_VEC3;
// gMaxGrabDistance is a function of the size of the object.
var gMaxGrabDistance;
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// gGrabMode defines the degrees of freedom of the grab target positions
// relative to gGrabStartPosition options include:
// xzPlane (default)
// verticalCylinder (SHIFT)
// rotate (CONTROL)
// Modes to eventually support?:
// xyPlane
// yzPlane
// xyPlane
// yzPlane
// polar
// elevationAzimuth
var gGrabMode = "xzplane";
var gGrabMode = "xzplane";
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
// are relative to the ray's intersection by the same offset.
var gGrabOffset = { x: 0, y: 0, z: 0 };
@ -53,13 +53,14 @@ var gTargetRotation;
var gLiftKey = false; // SHIFT
var gRotateKey = false; // CONTROL
var gInitialMouse = { x: 0, y: 0 };
var gPreviousMouse = { x: 0, y: 0 };
var gMouseCursorLocation = { x: 0, y: 0 };
var gMouseAtRotateStart = { x: 0, y: 0 };
var gBeaconHeight = 0.10;
var gAngularVelocity = ZERO_VEC3;
// var gAngularVelocity = ZERO_VEC3;
// TODO: play sounds again when we aren't leaking AudioInjector threads
// var grabSound = SoundCache.getSound("https://hifi-public.s3.amazonaws.com/eric/sounds/CloseClamp.wav");
@ -140,6 +141,10 @@ function mouseIntersectionWithPlane(pointOnPlane, planeNormal, event) {
}
function computeNewGrabPlane() {
if (!gIsGrabbing) {
return;
}
var maybeResetMousePosition = false;
if (gGrabMode !== "rotate") {
gMouseAtRotateStart = gMouseCursorLocation;
@ -162,7 +167,7 @@ function computeNewGrabPlane() {
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
xzOffset.y = 0;
gXzDistanceToGrab = Vec3.length(xzOffset);
if (gGrabMode !== "rotate" && maybeResetMousePosition) {
// we reset the mouse position whenever we stop rotating
Window.setCursorPosition(gMouseAtRotateStart.x, gMouseAtRotateStart.y);
@ -173,6 +178,7 @@ function mousePressEvent(event) {
if (!event.isLeftButton) {
return;
}
gInitialMouse = {x: event.x, y: event.y };
gPreviousMouse = {x: event.x, y: event.y };
var pickRay = Camera.computePickRay(event.x, event.y);
@ -189,12 +195,13 @@ function mousePressEvent(event) {
var clickedEntity = pickResults.entityID;
var entityProperties = Entities.getEntityProperties(clickedEntity)
var objectPosition = entityProperties.position;
gStartPosition = entityProperties.position;
gStartRotation = entityProperties.rotation;
var cameraPosition = Camera.getPosition();
gBeaconHeight = Vec3.length(entityProperties.dimensions);
gMaxGrabDistance = gBeaconHeight / MAX_SOLID_ANGLE;
if (Vec3.distance(objectPosition, cameraPosition) > gMaxGrabDistance) {
if (Vec3.distance(gStartPosition, cameraPosition) > gMaxGrabDistance) {
// don't allow grabs of things far away
return;
}
@ -205,20 +212,20 @@ function mousePressEvent(event) {
gGrabbedEntity = clickedEntity;
gCurrentPosition = entityProperties.position;
gOriginalGravity = entityProperties.gravity;
gTargetPosition = objectPosition;
gTargetPosition = gStartPosition;
// compute the grab point
var nearestPoint = Vec3.subtract(objectPosition, cameraPosition);
var nearestPoint = Vec3.subtract(gStartPosition, cameraPosition);
var distanceToGrab = Vec3.dot(nearestPoint, pickRay.direction);
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
// compute the grab offset
gGrabOffset = Vec3.subtract(objectPosition, gPointOnPlane);
gGrabOffset = Vec3.subtract(gStartPosition, gPointOnPlane);
computeNewGrabPlane();
updateDropLine(objectPosition);
updateDropLine(gStartPosition);
// TODO: play sounds again when we aren't leaking AudioInjector threads
//Audio.playSound(grabSound, { position: entityProperties.position, volume: VOLUME });
@ -231,6 +238,8 @@ function mouseReleaseEvent() {
}
gIsGrabbing = false
Entities.deleteAction(gGrabbedEntity, gActionID);
gActionID = null;
Overlays.editOverlay(gBeacon, { visible: false });
@ -250,18 +259,24 @@ function mouseMoveEvent(event) {
gOriginalGravity = entityProperties.gravity;
}
var actionArgs = {};
if (gGrabMode === "rotate") {
var deltaMouse = { x: 0, y: 0 };
var dx = event.x - gPreviousMouse.x;
var dy = event.y - gPreviousMouse.y;
var dx = event.x - gInitialMouse.x;
var dy = event.y - gInitialMouse.y;
var orientation = Camera.getOrientation();
var dragOffset = Vec3.multiply(dx, Quat.getRight(orientation));
dragOffset = Vec3.sum(dragOffset, Vec3.multiply(-dy, Quat.getUp(orientation)));
var axis = Vec3.cross(dragOffset, Quat.getFront(orientation));
var axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 8.0; // magic number tuned by hand
gAngularVelocity = Vec3.multiply(ROTATE_STRENGTH, axis);
axis = Vec3.normalize(axis);
var ROTATE_STRENGTH = 0.4; // magic number tuned by hand
var angle = ROTATE_STRENGTH * Math.sqrt((dx * dx) + (dy * dy));
var deltaQ = Quat.angleAxis(angle, axis);
// var qZero = entityProperties.rotation;
var qZero = gStartRotation;
var qOne = Quat.multiply(deltaQ, qZero);
actionArgs = {targetRotation: qOne, angularTimeScale: 0.1};
} else {
var newTargetPosition;
if (gGrabMode === "verticalCylinder") {
@ -284,9 +299,18 @@ function mouseMoveEvent(event) {
}
}
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
}
gPreviousMouse = { x: event.x, y: event.y };
gMouseCursorLocation = { x: Window.getCursorPositionX(), y: Window.getCursorPositionY() };
if (!gActionID) {
gActionID = Entities.addAction("spring", gGrabbedEntity, actionArgs);
} else {
Entities.updateAction(gGrabbedEntity, gActionID, actionArgs);
}
updateDropLine(gTargetPosition);
}
function keyReleaseEvent(event) {
@ -309,38 +333,8 @@ function keyPressEvent(event) {
computeNewGrabPlane();
}
function update(deltaTime) {
if (!gIsGrabbing) {
return;
}
var entityProperties = Entities.getEntityProperties(gGrabbedEntity);
gCurrentPosition = entityProperties.position;
if (gGrabMode === "rotate") {
gAngularVelocity = Vec3.subtract(gAngularVelocity, Vec3.multiply(gAngularVelocity, ANGULAR_DAMPING_RATE));
Entities.editEntity(gGrabbedEntity, { angularVelocity: gAngularVelocity, });
}
// always push toward linear grab position, even when rotating
var newVelocity = ZERO_VEC3;
var dPosition = Vec3.subtract(gTargetPosition, gCurrentPosition);
var delta = Vec3.length(dPosition);
if (delta > CLOSE_ENOUGH) {
var MAX_POSITION_DELTA = 4.0;
if (delta > MAX_POSITION_DELTA) {
dPosition = Vec3.multiply(dPosition, MAX_POSITION_DELTA / delta);
}
// desired speed is proportional to displacement by the inverse of timescale
// (for critically damped motion)
newVelocity = Vec3.multiply(dPosition, INV_MOVE_TIMESCALE);
}
Entities.editEntity(gGrabbedEntity, { velocity: newVelocity, });
updateDropLine(gTargetPosition);
}
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Controller.keyReleaseEvent.connect(keyReleaseEvent);
Script.update.connect(update);

View file

@ -360,6 +360,10 @@
var elVoxelVolumeSizeZ = document.getElementById("property-voxel-volume-size-z");
var elVoxelSurfaceStyle = document.getElementById("property-voxel-surface-style");
var elHyperlinkHref = document.getElementById("property-hyperlink-href");
var elHyperlinkDescription = document.getElementById("property-hyperlink-description");
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
@ -467,6 +471,9 @@
elScriptURL.value = properties.script;
elUserData.value = properties.userData;
elHyperlinkHref.value = properties.href;
elHyperlinkDescription.value = properties.description;
for (var i = 0; i < allSections.length; i++) {
for (var j = 0; j < allSections[i].length; j++) {
allSections[i][j].style.display = 'none';
@ -612,6 +619,8 @@
elLocked.addEventListener('change', createEmitCheckedPropertyUpdateFunction('locked'));
elName.addEventListener('change', createEmitTextPropertyUpdateFunction('name'));
elHyperlinkHref.addEventListener('change', createEmitTextPropertyUpdateFunction('href'));
elHyperlinkDescription.addEventListener('change', createEmitTextPropertyUpdateFunction('description'));
elVisible.addEventListener('change', createEmitCheckedPropertyUpdateFunction('visible'));
var positionChangeFunction = createEmitVec3PropertyUpdateFunction(
@ -850,7 +859,6 @@
elVoxelVolumeSizeZ.addEventListener('change', voxelVolumeSizeChangeFunction);
elVoxelSurfaceStyle.addEventListener('change', createEmitTextPropertyUpdateFunction('voxelSurfaceStyle'));
elMoveSelectionToGrid.addEventListener("click", function() {
EventBridge.emitWebEvent(JSON.stringify({
type: "action",
@ -937,6 +945,18 @@
<input type="text" id="property-name"></input>
</div>
</div>
<div class="property">
<div class="label">Hyperlink</div>
<div class="input-area">Href<br></div>
<div class="value">
<input id="property-hyperlink-href" class="url"></input>
</div>
<div class="input-area">Description<br></div> <div class="value">
<input id="property-hyperlink-description" class="url"></input>
</div>
</div>
<div class="property">
<span class="label">Locked</span>
<span class="value">

View file

@ -304,7 +304,8 @@ SelectionDisplay = (function () {
visible: false,
dashed: true,
lineWidth: 2.0,
ignoreRayIntersection: true // this never ray intersects
ignoreRayIntersection: true, // this never ray intersects
drawInFront: true
});
var selectionBox = Overlays.addOverlay("cube", {

View file

@ -3,6 +3,7 @@
// examples
//
// Created by Clément Brisset on 5/7/14.
// Persistable drag position by HRS 6/11/15.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
@ -236,6 +237,7 @@ ToolBar = function(x, y, direction) {
y: y - ToolBar.SPACING
});
}
this.save();
}
this.setAlpha = function(alpha, tool) {
@ -313,9 +315,8 @@ ToolBar = function(x, y, direction) {
this.cleanup = function() {
for(var tool in this.tools) {
this.tools[tool].cleanup();
delete this.tools[tool];
}
if (this.back != null) {
Overlays.deleteOverlay(this.back);
this.back = null;
@ -327,7 +328,71 @@ ToolBar = function(x, y, direction) {
this.width = 0;
this.height = 0;
}
var that = this;
this.contains = function (xOrPoint, optionalY) {
var x = (optionalY === undefined) ? xOrPoint.x : xOrPoint,
y = (optionalY === undefined) ? xOrPoint.y : optionalY;
return (that.x <= x) && (x <= (that.x + that.width)) &&
(that.y <= y) && (y <= (that.y + that.height));
}
that.hover = function (enable) {
that.isHovering = enable;
if (that.back) {
if (enable) {
that.oldAlpha = Overlays.getProperty(that.back, 'backgroundAlpha');
}
Overlays.editOverlay(this.back, {
visible: enable,
backgroundAlpha: enable ? 0.5 : that.oldAlpha
});
}
};
// These are currently only doing that which is necessary for toolbar hover and toolbar drag.
// They have not yet been extended to tool hover/click/release, etc.
this.mousePressEvent = function (event) {
if (!that.contains(event)) {
that.mightBeDragging = false;
return;
}
that.mightBeDragging = true;
that.dragOffsetX = that.x - event.x;
that.dragOffsetY = that.y - event.y;
};
this.mouseMove = function (event) {
if (!that.mightBeDragging || !event.isLeftButton) {
that.mightBeDragging = false;
if (!that.contains(event)) {
if (that.isHovering) {
that.hover(false);
}
return;
}
if (!that.isHovering) {
that.hover(true);
}
return;
}
that.move(that.dragOffsetX + event.x, that.dragOffsetY + event.y);
};
Controller.mousePressEvent.connect(this.mousePressEvent);
Controller.mouseMoveEvent.connect(this.mouseMove);
// Called on move. A different approach would be to have all this on the prototype,
// and let apps extend where needed. Ex. app defines its toolbar.move() to call this.__proto__.move and then save.
this.save = function () { };
// This compatability hack breaks the model, but makes converting existing scripts easier:
this.addOverlay = function (ignored, oldSchoolProperties) {
var properties = JSON.parse(JSON.stringify(oldSchoolProperties)); // a copy
if (that.numberOfTools() === 0) {
that.move(properties.x, properties.y);
}
delete properties.x;
delete properties.y;
var index = that.addTool(properties);
var id = that.tools[index].overlay();
return id;
}
}
ToolBar.SPACING = 4;
ToolBar.VERTICAL = 0;
ToolBar.HORIZONTAL = 1;
ToolBar.HORIZONTAL = 1;

View file

@ -13,7 +13,7 @@
//
Script.include('lineRider.js')
var MAX_POINTS_PER_LINE = 30;
var LINE_LIFETIME = 60 * 5 //5 minute lifetime
var colorPalette = [{
red: 236,
@ -120,10 +120,12 @@ function MousePaint() {
y: 10,
z: 10
},
lineWidth: LINE_WIDTH
lineWidth: LINE_WIDTH,
lifetime: LINE_LIFETIME
});
points = [];
if (point) {
points.push(point);
path.push(point);
}
@ -133,22 +135,22 @@ function MousePaint() {
function mouseMoveEvent(event) {
if (!isDrawing) {
return;
}
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(Vec3.normalize(pickRay.direction), DRAWING_DISTANCE);
var point = Vec3.sum(Camera.getPosition(), addVector);
points.push(point);
path.push(point);
Entities.editEntity(line, {
linePoints: points
});
Entities.editEntity(brush, {
position: point
});
if (!isDrawing) {
return;
}
points.push(point);
path.push(point);
if (points.length === MAX_POINTS_PER_LINE) {
//We need to start a new line!
@ -253,7 +255,6 @@ function HydraPaint() {
var maxLineWidth = 10;
var currentLineWidth = minLineWidth;
var MIN_PAINT_TRIGGER_THRESHOLD = .01;
var LINE_LIFETIME = 20;
var COLOR_CHANGE_TIME_FACTOR = 0.1;
var RIGHT_BUTTON_1 = 7
@ -330,7 +331,7 @@ function HydraPaint() {
z: 10
},
lineWidth: 5,
// lifetime: LINE_LIFETIME
lifetime: LINE_LIFETIME
});
this.points = [];
if (point) {

View file

@ -3,6 +3,7 @@
//
// Created by Seth Alves on May 15th
// Modified by Eric Levin on June 4
// Persist toolbar by HRS 6/11/15.
// Copyright 2015 High Fidelity, Inc.
//
// Provides a pointer with option to draw on surfaces
@ -31,9 +32,16 @@ HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
var screenSize = Controller.getViewportDimensions();
var userCanPoint = false;
var pointerButton = Overlays.addOverlay("image", {
x: screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING,
y: screenSize.y - (BUTTON_SIZE + PADDING),
Script.include(["libraries/toolBars.js"]);
const persistKey = "highfidelity.pointer.toolbar.position";
var toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL);
toolBar.save = function () {
Settings.setValue(persistKey, JSON.stringify([toolBar.x, toolBar.y]));
};
var old = JSON.parse(Settings.getValue(persistKey) || '0');
var pointerButton = toolBar.addOverlay("image", {
x: old ? old[0] : screenSize.x / 2 - BUTTON_SIZE * 2 + PADDING,
y: old ? old[1] : screenSize.y - (BUTTON_SIZE + PADDING),
width: BUTTON_SIZE,
height: BUTTON_SIZE,
imageURL: HIFI_PUBLIC_BUCKET + "images/laser.png",
@ -150,4 +158,4 @@ Script.scriptEnding.connect(cleanup);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);

92
examples/stick.js Normal file
View file

@ -0,0 +1,92 @@
// stick.js
// examples
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Allow avatar to hold a stick
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var hand = "left";
var nullActionID = "00000000-0000-0000-0000-000000000000";
var controllerID;
var controllerActive;
var stickID = null;
var actionID = nullActionID;
// sometimes if this is run immediately the stick doesn't get created? use a timer.
Script.setTimeout(function() {
stickID = Entities.addEntity({
type: "Model",
modelURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.fbx",
compoundShapeURL: "https://hifi-public.s3.amazonaws.com/eric/models/stick.obj",
dimensions: {x: .11, y: .11, z: .59},
position: MyAvatar.getRightPalmPosition(), // initial position doesn't matter, as long as it's close
rotation: MyAvatar.orientation,
damping: .1,
collisionsWillMove: true
});
actionID = Entities.addAction("hold", stickID, {relativePosition: {x: 0.0, y: 0.0, z: -0.9},
hand: hand,
timeScale: 0.15});
}, 3000);
function cleanUp() {
Entities.deleteEntity(stickID);
}
function positionStick(stickOrientation) {
var baseOffset = {x: 0.0, y: 0.0, z: -0.9};
var offset = Vec3.multiplyQbyV(stickOrientation, baseOffset);
Entities.updateAction(stickID, actionID, {relativePosition: offset,
relativeRotation: stickOrientation});
}
function mouseMoveEvent(event) {
if (!stickID || actionID == nullActionID) {
return;
}
var windowCenterX = Window.innerWidth / 2;
var windowCenterY = Window.innerHeight / 2;
var mouseXCenterOffset = event.x - windowCenterX;
var mouseYCenterOffset = event.y - windowCenterY;
var mouseXRatio = mouseXCenterOffset / windowCenterX;
var mouseYRatio = mouseYCenterOffset / windowCenterY;
var stickOrientation = Quat.fromPitchYawRollDegrees(mouseYRatio * -90, mouseXRatio * -90, 0);
positionStick(stickOrientation);
}
function initControls(){
if (hand == "right") {
controllerID = 3; // right handed
} else {
controllerID = 4; // left handed
}
}
function update(deltaTime){
var palmPosition = Controller.getSpatialControlPosition(controllerID);
controllerActive = (Vec3.length(palmPosition) > 0);
if(!controllerActive){
return;
}
stickOrientation = Controller.getSpatialControlRawRotation(controllerID);
var adjustment = Quat.fromPitchYawRollDegrees(180, 0, 0);
stickOrientation = Quat.multiply(stickOrientation, adjustment);
positionStick(stickOrientation);
}
Script.scriptEnding.connect(cleanUp);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Script.update.connect(update);

Binary file not shown.

After

(image error) Size: 6.2 KiB

Binary file not shown.

After

(image error) Size: 4.6 KiB

Binary file not shown.

Binary file not shown.

View file

@ -57,6 +57,7 @@
#include <AccountManager.h>
#include <AddressManager.h>
#include <CursorManager.h>
#include <AmbientOcclusionEffect.h>
#include <AudioInjector.h>
#include <DeferredLightingEffect.h>
@ -100,6 +101,7 @@
#include "ModelPackager.h"
#include "Util.h"
#include "InterfaceLogging.h"
#include "InterfaceActionFactory.h"
#include "avatar/AvatarManager.h"
@ -257,6 +259,7 @@ bool setupEssentials(int& argc, char** argv) {
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
DependencyManager::registerInheritance<AvatarHashMap, AvatarManager>();
DependencyManager::registerInheritance<EntityActionFactoryInterface, InterfaceActionFactory>();
Setting::init();
@ -293,7 +296,8 @@ bool setupEssentials(int& argc, char** argv) {
auto discoverabilityManager = DependencyManager::set<DiscoverabilityManager>();
auto sceneScriptingInterface = DependencyManager::set<SceneScriptingInterface>();
auto offscreenUi = DependencyManager::set<OffscreenUi>();
auto pathUtils = DependencyManager::set<PathUtils>();
auto pathUtils = DependencyManager::set<PathUtils>();
auto actionFactory = DependencyManager::set<InterfaceActionFactory>();
return true;
}
@ -336,7 +340,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_enableProcessOctreeThread(true),
_octreeProcessor(),
_nodeBoundsDisplay(this),
_applicationOverlay(),
_runningScriptsWidget(NULL),
_runningScriptsWidgetWasVisible(false),
_trayIcon(new QSystemTrayIcon(_window)),
@ -347,7 +350,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_notifiedPacketVersionMismatchThisDomain(false),
_domainConnectionRefusals(QList<QString>()),
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
_lastFaceTrackerUpdate(0)
_lastFaceTrackerUpdate(0),
_applicationOverlay()
{
setInstance(this);
#ifdef Q_OS_WIN
@ -421,6 +425,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(audioIO.data(), &AudioClient::muteToggled, this, &Application::audioMuteToggled);
connect(audioIO.data(), &AudioClient::receivedFirstPacket,
&AudioScriptingInterface::getInstance(), &AudioScriptingInterface::receivedFirstPacket);
connect(audioIO.data(), &AudioClient::disconnected,
&AudioScriptingInterface::getInstance(), &AudioScriptingInterface::disconnected);
audioThread->start();
@ -524,7 +530,16 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_window->setVisible(true);
_glWidget->setFocusPolicy(Qt::StrongFocus);
_glWidget->setFocus();
#ifdef Q_OS_MAC
// OSX doesn't seem to provide for hiding the cursor only on the GL widget
_window->setCursor(Qt::BlankCursor);
#else
// On windows and linux, hiding the top level cursor also means it's invisible
// when hovering over the window menu, which is a pain, so only hide it for
// the GL surface
_glWidget->setCursor(Qt::BlankCursor);
#endif
// enable mouse tracking; otherwise, we only get drag events
_glWidget->setMouseTracking(true);
@ -942,12 +957,15 @@ void Application::paintGL() {
displaySide(&renderArgs, _myCamera);
glPopMatrix();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror)) {
_rearMirrorTools->render(&renderArgs, true, _glWidget->mapFromGlobal(QCursor::pos()));
} else if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(&renderArgs, _mirrorViewRect);
}
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
@ -1228,9 +1246,20 @@ void Application::keyPressEvent(QKeyEvent* event) {
}
break;
case Qt::Key_Apostrophe:
resetSensors();
case Qt::Key_Apostrophe: {
if (isMeta) {
auto cursor = Cursor::Manager::instance().getCursor();
auto curIcon = cursor->getIcon();
if (curIcon == Cursor::Icon::DEFAULT) {
cursor->setIcon(Cursor::Icon::LINK);
} else {
cursor->setIcon(Cursor::Icon::DEFAULT);
}
} else {
resetSensors();
}
break;
}
case Qt::Key_A:
if (isShifted) {
@ -1354,12 +1383,27 @@ void Application::keyPressEvent(QKeyEvent* event) {
case Qt::Key_Slash:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;
case Qt::Key_Plus:
_myAvatar->increaseSize();
case Qt::Key_Plus: {
if (isMeta && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() * 1.1f);
} else {
_myAvatar->increaseSize();
}
break;
case Qt::Key_Minus:
_myAvatar->decreaseSize();
}
case Qt::Key_Minus: {
if (isMeta && event->modifiers().testFlag(Qt::KeypadModifier)) {
auto& cursorManager = Cursor::Manager::instance();
cursorManager.setScale(cursorManager.getScale() / 1.1f);
} else {
_myAvatar->decreaseSize();
}
break;
}
case Qt::Key_Equal:
_myAvatar->resetSize();
break;
@ -1895,8 +1939,6 @@ void Application::setEnableVRMode(bool enableVRMode) {
}
resizeGL();
updateCursorVisibility();
}
void Application::setLowVelocityFilter(bool lowVelocityFilter) {
@ -2398,19 +2440,8 @@ void Application::updateCursor(float deltaTime) {
lastMousePos = QCursor::pos();
}
void Application::updateCursorVisibility() {
if (!_cursorVisible ||
Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode) ||
Menu::getInstance()->isOptionChecked(MenuOption::Enable3DTVMode)) {
_window->setCursor(Qt::BlankCursor);
} else {
_window->unsetCursor();
}
}
void Application::setCursorVisible(bool visible) {
_cursorVisible = visible;
updateCursorVisibility();
}
void Application::update(float deltaTime) {
@ -3409,7 +3440,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
"Application::displaySide() ... entities...");
RenderArgs::DebugFlags renderDebugFlags = RenderArgs::RENDER_DEBUG_NONE;
RenderArgs::RenderMode renderMode = RenderArgs::DEFAULT_RENDER_MODE;
if (Menu::getInstance()->isOptionChecked(MenuOption::PhysicsShowHulls)) {
renderDebugFlags = (RenderArgs::DebugFlags) (renderDebugFlags | (int) RenderArgs::RENDER_DEBUG_HULLS);
@ -3418,10 +3448,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
renderDebugFlags =
(RenderArgs::DebugFlags) (renderDebugFlags | (int) RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP);
}
if (theCamera.getMode() == CAMERA_MODE_MIRROR) {
renderMode = RenderArgs::MIRROR_RENDER_MODE;
}
renderArgs->_renderMode = renderMode;
renderArgs->_debugFlags = renderDebugFlags;
_entities.render(renderArgs);
}

View file

@ -149,6 +149,7 @@ public:
static glm::quat getOrientationForPath() { return getInstance()->_myAvatar->getOrientation(); }
static glm::vec3 getPositionForAudio() { return getInstance()->_myAvatar->getHead()->getPosition(); }
static glm::quat getOrientationForAudio() { return getInstance()->_myAvatar->getHead()->getFinalOrientationInWorldFrame(); }
static UserInputMapper* getUserInputMapper() { return &getInstance()->_userInputMapper; }
static void initPlugins();
static void shutdownPlugins();
@ -475,8 +476,6 @@ private:
void updateProjectionMatrix();
void updateProjectionMatrix(Camera& camera, bool updateViewFrustum = true);
void updateCursorVisibility();
void sendPingPackets();
void initDisplay();

View file

@ -0,0 +1,49 @@
//
// InterfaceActionFactory.cpp
// libraries/entities/src
//
// Created by Seth Alves on 2015-6-2
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <avatar/AvatarActionHold.h>
#include <ObjectActionPullToPoint.h>
#include <ObjectActionSpring.h>
#include "InterfaceActionFactory.h"
EntityActionPointer InterfaceActionFactory::factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) {
EntityActionPointer action = nullptr;
switch (type) {
case ACTION_TYPE_NONE:
return nullptr;
case ACTION_TYPE_PULL_TO_POINT:
action = (EntityActionPointer) new ObjectActionPullToPoint(id, ownerEntity);
break;
case ACTION_TYPE_SPRING:
action = (EntityActionPointer) new ObjectActionSpring(id, ownerEntity);
break;
case ACTION_TYPE_HOLD:
action = (EntityActionPointer) new AvatarActionHold(id, ownerEntity);
break;
}
bool ok = action->updateArguments(arguments);
if (ok) {
ownerEntity->addAction(simulation, action);
return action;
}
action = nullptr;
return action;
}

View file

@ -0,0 +1,28 @@
//
// InterfaceActionFactory.cpp
// interface/src/
//
// Created by Seth Alves on 2015-6-10
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_InterfaceActionFactory_h
#define hifi_InterfaceActionFactory_h
#include "EntityActionFactoryInterface.h"
class InterfaceActionFactory : public EntityActionFactoryInterface {
public:
InterfaceActionFactory() : EntityActionFactoryInterface() { }
virtual ~InterfaceActionFactory() { }
virtual EntityActionPointer factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments);
};
#endif // hifi_InterfaceActionFactory_h

View file

@ -107,6 +107,9 @@ Avatar::Avatar() :
}
Avatar::~Avatar() {
for(auto attachment : _unusedAttachments) {
delete attachment;
}
}
const float BILLBOARD_LOD_DISTANCE = 40.0f;
@ -298,12 +301,21 @@ bool Avatar::addToScene(AvatarSharedPointer self, std::shared_ptr<render::Scene>
pendingChanges.resetItem(_renderItemID, avatarPayloadPointer);
_skeletonModel.addToScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
for (auto attachmentModel : _attachmentModels) {
attachmentModel->addToScene(scene, pendingChanges);
}
return true;
}
void Avatar::removeFromScene(AvatarSharedPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_renderItemID);
_skeletonModel.removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
for (auto attachmentModel : _attachmentModels) {
attachmentModel->removeFromScene(scene, pendingChanges);
}
}
void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
@ -316,6 +328,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
if (postLighting &&
glm::distance(DependencyManager::get<AvatarManager>()->getMyAvatar()->getPosition(), _position) < 10.0f) {
auto geometryCache = DependencyManager::get<GeometryCache>();
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
// render pointing lasers
glm::vec3 laserColor = glm::vec3(1.0f, 0.0f, 1.0f);
@ -342,6 +355,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
pointerTransform.setTranslation(position);
pointerTransform.setRotation(rotation);
batch->setModelTransform(pointerTransform);
deferredLighting->bindSimpleProgram(*batch);
geometryCache->renderLine(*batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
}
}
@ -364,6 +378,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
pointerTransform.setTranslation(position);
pointerTransform.setRotation(rotation);
batch->setModelTransform(pointerTransform);
deferredLighting->bindSimpleProgram(*batch);
geometryCache->renderLine(*batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
}
}
@ -450,7 +465,8 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
Transform transform;
transform.setTranslation(position);
batch->setModelTransform(transform);
DependencyManager::get<GeometryCache>()->renderSphere(*batch, LOOK_AT_INDICATOR_RADIUS, 15, 15, LOOK_AT_INDICATOR_COLOR);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(*batch, LOOK_AT_INDICATOR_RADIUS
, 15, 15, LOOK_AT_INDICATOR_COLOR);
}
}
@ -482,6 +498,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
_voiceSphereID = DependencyManager::get<GeometryCache>()->allocateID();
}
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(*batch);
DependencyManager::get<GeometryCache>()->renderSphere(*batch, sphereRadius, 15, 15,
glm::vec4(SPHERE_COLOR[0], SPHERE_COLOR[1], SPHERE_COLOR[2], 1.0f - angle / MAX_SPHERE_ANGLE), true,
_voiceSphereID);
@ -516,7 +533,7 @@ glm::quat Avatar::computeRotationFromBodyToWorldUp(float proportion) const {
return glm::angleAxis(angle * proportion, axis);
}
void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel) {
void Avatar::fixupModelsInScene() {
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
@ -529,8 +546,24 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
}
for (auto attachmentModel : _attachmentModels) {
if (attachmentModel->needsFixupInScene()) {
attachmentModel->removeFromScene(scene, pendingChanges);
attachmentModel->addToScene(scene, pendingChanges);
}
}
for (auto attachmentModelToRemove : _attachmentsToRemove) {
attachmentModelToRemove->removeFromScene(scene, pendingChanges);
_unusedAttachments << attachmentModelToRemove;
}
_attachmentsToRemove.clear();
scene->enqueuePendingChanges(pendingChanges);
}
void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel) {
fixupModelsInScene();
{
Glower glower(renderArgs, glowLevel);
@ -544,10 +577,6 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
if (postLighting) {
getHand()->render(renderArgs, false);
} else {
// NOTE: we no longer call this here, because we've added all the model parts as renderable items in the scene
//_skeletonModel.render(renderArgs, 1.0f);
renderAttachments(renderArgs);
}
}
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
@ -571,22 +600,14 @@ void Avatar::simulateAttachments(float deltaTime) {
_skeletonModel.getJointCombinedRotation(jointIndex, jointRotation)) {
model->setTranslation(jointPosition + jointRotation * attachment.translation * _scale);
model->setRotation(jointRotation * attachment.rotation);
model->setScaleToFit(true, _scale * attachment.scale);
model->setScaleToFit(true, _scale * attachment.scale, true); // hack to force rescale
model->setSnapModelToCenter(false); // hack to force resnap
model->setSnapModelToCenter(true);
model->simulate(deltaTime);
}
}
}
void Avatar::renderAttachments(RenderArgs* args) {
// RenderArgs::RenderMode modelRenderMode = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
// RenderArgs::SHADOW_RENDER_MODE : RenderArgs::DEFAULT_RENDER_MODE;
/*
foreach (Model* model, _attachmentModels) {
model->render(args, 1.0f);
}
*/
}
void Avatar::updateJointMappings() {
// no-op; joint mappings come from skeleton model
}
@ -944,12 +965,18 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
}
// make sure we have as many models as attachments
while (_attachmentModels.size() < attachmentData.size()) {
Model* model = new Model(this);
Model* model = nullptr;
if (_unusedAttachments.size() > 0) {
model = _unusedAttachments.takeFirst();
} else {
model = new Model(this);
}
model->init();
_attachmentModels.append(model);
}
while (_attachmentModels.size() > attachmentData.size()) {
delete _attachmentModels.takeLast();
auto attachmentModel = _attachmentModels.takeLast();
_attachmentsToRemove << attachmentModel;
}
// update the urls

View file

@ -195,6 +195,8 @@ protected:
SkeletonModel _skeletonModel;
glm::vec3 _skeletonOffset;
QVector<Model*> _attachmentModels;
QVector<Model*> _attachmentsToRemove;
QVector<Model*> _unusedAttachments;
float _bodyYawDelta;
// These position histories and derivatives are in the world-frame.
@ -234,9 +236,9 @@ protected:
void renderDisplayName(RenderArgs* renderArgs);
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f);
virtual bool shouldRenderHead(const RenderArgs* renderArgs, const glm::vec3& cameraPosition) const;
virtual void fixupModelsInScene();
void simulateAttachments(float deltaTime);
virtual void renderAttachments(RenderArgs* args);
virtual void updateJointMappings();

View file

@ -0,0 +1,108 @@
//
// AvatarActionHold.cpp
// interface/src/avatar/
//
// Created by Seth Alves 2015-6-9
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "avatar/MyAvatar.h"
#include "avatar/AvatarManager.h"
#include "AvatarActionHold.h"
AvatarActionHold::AvatarActionHold(QUuid id, EntityItemPointer ownerEntity) :
ObjectActionSpring(id, ownerEntity) {
#if WANT_DEBUG
qDebug() << "AvatarActionHold::AvatarActionHold";
#endif
}
AvatarActionHold::~AvatarActionHold() {
#if WANT_DEBUG
qDebug() << "AvatarActionHold::~AvatarActionHold";
#endif
}
void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::vec3 palmPosition;
if (_hand == "right") {
palmPosition = myAvatar->getRightPalmPosition();
} else {
palmPosition = myAvatar->getLeftPalmPosition();
}
auto rotation = myAvatar->getWorldAlignedOrientation();
auto offset = rotation * _relativePosition;
auto position = palmPosition + offset;
rotation *= _relativeRotation;
lockForWrite();
_positionalTarget = position;
_rotationalTarget = rotation;
unlock();
ObjectActionSpring::updateActionWorker(deltaTimeStep);
}
bool AvatarActionHold::updateArguments(QVariantMap arguments) {
bool rPOk = true;
glm::vec3 relativePosition =
EntityActionInterface::extractVec3Argument("hold", arguments, "relativePosition", rPOk, false);
bool rROk = true;
glm::quat relativeRotation =
EntityActionInterface::extractQuatArgument("hold", arguments, "relativeRotation", rROk, false);
bool tSOk = true;
float timeScale =
EntityActionInterface::extractFloatArgument("hold", arguments, "timeScale", tSOk, false);
bool hOk = true;
QString hand =
EntityActionInterface::extractStringArgument("hold", arguments, "hand", hOk, false);
lockForWrite();
if (rPOk) {
_relativePosition = relativePosition;
} else if (!_parametersSet) {
_relativePosition = glm::vec3(0.0f, 0.0f, 1.0f);
}
if (rROk) {
_relativeRotation = relativeRotation;
} else if (!_parametersSet) {
_relativeRotation = glm::quat(0.0f, 0.0f, 0.0f, 1.0f);
}
if (tSOk) {
_linearTimeScale = timeScale;
_angularTimeScale = timeScale;
} else if (!_parametersSet) {
_linearTimeScale = 0.2;
_angularTimeScale = 0.2;
}
if (hOk) {
hand = hand.toLower();
if (hand == "left") {
_hand = "left";
} else if (hand == "right") {
_hand = "right";
} else {
qDebug() << "hold action -- invalid hand argument:" << hand;
_hand = "right";
}
} else if (!_parametersSet) {
_hand = "right";
}
_parametersSet = true;
_positionalTargetSet = true;
_rotationalTargetSet = true;
_active = true;
unlock();
return true;
}

View file

@ -0,0 +1,35 @@
//
// AvatarActionHold.h
// interface/src/avatar/
//
// Created by Seth Alves 2015-6-9
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AvatarActionHold_h
#define hifi_AvatarActionHold_h
#include <QUuid>
#include <EntityItem.h>
#include <ObjectActionSpring.h>
class AvatarActionHold : public ObjectActionSpring {
public:
AvatarActionHold(QUuid id, EntityItemPointer ownerEntity);
virtual ~AvatarActionHold();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateActionWorker(float deltaTimeStep);
private:
glm::vec3 _relativePosition;
glm::quat _relativeRotation;
QString _hand;
bool _parametersSet = false;
};
#endif // hifi_AvatarActionHold_h

View file

@ -55,7 +55,7 @@ AvatarManager::AvatarManager(QObject* parent) :
_avatarFades() {
// register a meta type for the weak pointer we'll use for the owning avatar mixer for each avatar
qRegisterMetaType<QWeakPointer<Node> >("NodeWeakPointer");
_myAvatar = std::shared_ptr<MyAvatar>(new MyAvatar());
_myAvatar = std::make_shared<MyAvatar>();
}
void AvatarManager::init() {
@ -97,9 +97,9 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
// simulate avatars
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
Avatar* avatar = reinterpret_cast<Avatar*>(avatarIterator.value().get());
auto avatar = std::dynamic_pointer_cast<Avatar>(avatarIterator.value());
if (avatar == _myAvatar.get() || !avatar->isInitialized()) {
if (avatar == _myAvatar || !avatar->isInitialized()) {
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
// DO NOT update or fade out uninitialized Avatars
++avatarIterator;
@ -121,26 +121,30 @@ void AvatarManager::simulateAvatarFades(float deltaTime) {
const float SHRINK_RATE = 0.9f;
const float MIN_FADE_SCALE = 0.001f;
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
while (fadingIterator != _avatarFades.end()) {
Avatar* avatar = static_cast<Avatar*>(fadingIterator->get());
auto avatar = std::static_pointer_cast<Avatar>(*fadingIterator);
avatar->setTargetScale(avatar->getScale() * SHRINK_RATE, true);
if (avatar->getTargetScale() < MIN_FADE_SCALE) {
avatar->removeFromScene(*fadingIterator, scene, pendingChanges);
fadingIterator = _avatarFades.erase(fadingIterator);
} else {
avatar->simulate(deltaTime);
++fadingIterator;
}
}
scene->enqueuePendingChanges(pendingChanges);
}
AvatarSharedPointer AvatarManager::newSharedAvatar() {
return AvatarSharedPointer(new Avatar());
return AvatarSharedPointer(std::make_shared<Avatar>());
}
// virtual
AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) {
std::shared_ptr<Avatar> avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
auto avatar = std::dynamic_pointer_cast<Avatar>(AvatarHashMap::addAvatar(sessionUUID, mixerWeakPointer));
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->addToScene(avatar, scene, pendingChanges);
@ -171,10 +175,6 @@ void AvatarManager::removeAvatar(const QUuid& sessionUUID) {
_avatarFades.push_back(avatarIterator.value());
_avatarHash.erase(avatarIterator);
}
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
avatar->removeFromScene(avatar, scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
}
@ -182,12 +182,12 @@ void AvatarManager::clearOtherAvatars() {
// clear any avatars that came from an avatar-mixer
AvatarHash::iterator avatarIterator = _avatarHash.begin();
while (avatarIterator != _avatarHash.end()) {
Avatar* avatar = reinterpret_cast<Avatar*>(avatarIterator.value().get());
if (avatar == _myAvatar.get() || !avatar->isInitialized()) {
auto avatar = std::static_pointer_cast<Avatar>(avatarIterator.value());
if (avatar == _myAvatar || !avatar->isInitialized()) {
// don't remove myAvatar or uninitialized avatars from the list
++avatarIterator;
} else {
removeAvatarMotionState(avatar);
removeAvatarMotionState(avatar.get());
_avatarFades.push_back(avatarIterator.value());
avatarIterator = _avatarHash.erase(avatarIterator);
}
@ -250,7 +250,7 @@ void AvatarManager::handleCollisionEvents(CollisionEvents& collisionEvents) {
void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
AvatarHash::iterator avatarItr = _avatarHash.find(id);
if (avatarItr != _avatarHash.end()) {
Avatar* avatar = static_cast<Avatar*>(avatarItr.value().get());
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
AvatarMotionState* motionState = avatar->_motionState;
if (motionState) {
motionState->addDirtyFlags(EntityItem::DIRTY_SHAPE);
@ -259,7 +259,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
avatar->computeShapeInfo(shapeInfo);
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar, shape);
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
avatar->_motionState = motionState;
_motionStatesToAdd.insert(motionState);
_avatarMotionStates.insert(motionState);

View file

@ -1178,44 +1178,10 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bo
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
render::PendingChanges pendingChanges;
if (_skeletonModel.needsFixupInScene()) {
_skeletonModel.removeFromScene(scene, pendingChanges);
_skeletonModel.addToScene(scene, pendingChanges);
}
if (getHead()->getFaceModel().needsFixupInScene()) {
getHead()->getFaceModel().removeFromScene(scene, pendingChanges);
getHead()->getFaceModel().addToScene(scene, pendingChanges);
}
scene->enqueuePendingChanges(pendingChanges);
fixupModelsInScene();
Camera *camera = Application::getInstance()->getCamera();
const glm::vec3 cameraPos = camera->getPosition();
const glm::vec3 cameraPos = Application::getInstance()->getCamera()->getPosition();
// HACK: comment this block which possibly change the near and break the rendering 5/6/2015
// Only tweak the frustum near far if it's not shadow
/* if (renderMode != RenderArgs::SHADOW_RENDER_MODE) {
// Set near clip distance according to skeleton model dimensions if first person and there is no separate head model.
if (shouldRenderHead(cameraPos, renderMode) || !getHead()->getFaceModel().getURL().isEmpty()) {
renderFrustum->setNearClip(DEFAULT_NEAR_CLIP);
} else {
float clipDistance = _skeletonModel.getHeadClipDistance();
clipDistance = glm::length(getEyePosition()
+ camera->getOrientation() * glm::vec3(0.0f, 0.0f, -clipDistance) - cameraPos);
renderFrustum->setNearClip(clipDistance);
}
}*/
// Render the body's voxels and head
if (!postLighting) {
// NOTE: we no longer call this here, because we've added all the model parts as renderable items in the scene
//_skeletonModel.render(renderArgs, 1.0f);
renderAttachments(renderArgs);
}
// Render head so long as the camera isn't inside it
if (shouldRenderHead(renderArgs, cameraPos)) {
getHead()->render(renderArgs, 1.0f, renderFrustum, postLighting);
@ -1571,27 +1537,6 @@ void MyAvatar::updateMotionBehavior() {
_feetTouchFloor = menu->isOptionChecked(MenuOption::ShiftHipsForIdleAnimations);
}
void MyAvatar::renderAttachments(RenderArgs* args) {
if (Application::getInstance()->getCamera()->getMode() != CAMERA_MODE_FIRST_PERSON || args->_renderMode == RenderArgs::MIRROR_RENDER_MODE) {
Avatar::renderAttachments(args);
return;
}
const FBXGeometry& geometry = _skeletonModel.getGeometry()->getFBXGeometry();
QString headJointName = (geometry.headJointIndex == -1) ? QString() : geometry.joints.at(geometry.headJointIndex).name;
// RenderArgs::RenderMode modelRenderMode = (renderMode == RenderArgs::SHADOW_RENDER_MODE) ?
// RenderArgs::SHADOW_RENDER_MODE : RenderArgs::DEFAULT_RENDER_MODE;
// FIX ME - attachments need to be added to scene too...
/*
for (int i = 0; i < _attachmentData.size(); i++) {
const QString& jointName = _attachmentData.at(i).jointName;
if (jointName != headJointName && jointName != "Head") {
_attachmentModels.at(i)->render(args, 1.0f);
}
}
*/
}
//Renders sixense laser pointers for UI selection with controllers
void MyAvatar::renderLaserPointers() {
const float PALM_TIP_ROD_RADIUS = 0.002f;

View file

@ -197,9 +197,6 @@ public slots:
signals:
void transformChanged();
protected:
virtual void renderAttachments(RenderArgs* args);
private:
// These are made private for MyAvatar so that you will use the "use" methods instead

View file

@ -159,9 +159,25 @@ void KeyboardMouseDevice::registerToUserInputMapper(UserInputMapper& mapper) {
// Grab the current free device ID
_deviceID = mapper.getFreeDeviceID();
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy());
auto proxy = UserInputMapper::DeviceProxy::Pointer(new UserInputMapper::DeviceProxy("Keyboard"));
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input._channel); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input._channel); };
proxy->getAvailabeInputs = [this] () -> QVector<UserInputMapper::InputPair> {
QVector<UserInputMapper::InputPair> availableInputs;
for (int i = (int) Qt::Key_0; i <= (int) Qt::Key_9; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
for (int i = (int) Qt::Key_A; i <= (int) Qt::Key_Z; i++) {
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key(i)), QKeySequence(Qt::Key(i)).toString()));
}
availableInputs.append(UserInputMapper::InputPair(makeInput(Qt::Key_Space), QKeySequence(Qt::Key_Space).toString()));
return availableInputs;
};
proxy->resetDeviceBindings = [this, &mapper] () -> bool {
mapper.removeAllInputChannelsForDevice(_deviceID);
this->assignDefaultInputMapping(mapper);
return true;
};
mapper.registerDevice(_deviceID, proxy);
}

View file

@ -14,7 +14,18 @@
#include "DeviceTracker.h"
#ifdef __GNUC__
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wsign-compare"
#endif
#include <glm/glm.hpp>
#ifdef __GNUC__
#pragma GCC diagnostic pop
#endif
#include <glm/gtc/quaternion.hpp>
#include <glm/gtc/matrix_transform.hpp>

View file

@ -28,6 +28,94 @@ ControllerScriptingInterface::ControllerScriptingInterface() :
{
}
static int actionMetaTypeId = qRegisterMetaType<UserInputMapper::Action>();
static int inputChannelMetaTypeId = qRegisterMetaType<UserInputMapper::InputChannel>();
static int inputMetaTypeId = qRegisterMetaType<UserInputMapper::Input>();
static int inputPairMetaTypeId = qRegisterMetaType<UserInputMapper::InputPair>();
QScriptValue inputToScriptValue(QScriptEngine* engine, const UserInputMapper::Input& input);
void inputFromScriptValue(const QScriptValue& object, UserInputMapper::Input& input);
QScriptValue inputChannelToScriptValue(QScriptEngine* engine, const UserInputMapper::InputChannel& inputChannel);
void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::InputChannel& inputChannel);
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action);
void actionFromScriptValue(const QScriptValue& object, UserInputMapper::Action& action);
QScriptValue inputPairToScriptValue(QScriptEngine* engine, const UserInputMapper::InputPair& inputPair);
void inputPairFromScriptValue(const QScriptValue& object, UserInputMapper::InputPair& inputPair);
QScriptValue inputToScriptValue(QScriptEngine* engine, const UserInputMapper::Input& input) {
QScriptValue obj = engine->newObject();
obj.setProperty("device", input.getDevice());
obj.setProperty("channel", input.getChannel());
obj.setProperty("type", (unsigned short) input.getType());
obj.setProperty("id", input.getID());
return obj;
}
void inputFromScriptValue(const QScriptValue& object, UserInputMapper::Input& input) {
input.setDevice(object.property("device").toUInt16());
input.setChannel(object.property("channel").toUInt16());
input.setType(object.property("type").toUInt16());
input.setID(object.property("id").toInt32());
}
QScriptValue inputChannelToScriptValue(QScriptEngine* engine, const UserInputMapper::InputChannel& inputChannel) {
QScriptValue obj = engine->newObject();
obj.setProperty("input", inputToScriptValue(engine, inputChannel.getInput()));
obj.setProperty("modifier", inputToScriptValue(engine, inputChannel.getModifier()));
obj.setProperty("action", inputChannel.getAction());
obj.setProperty("scale", inputChannel.getScale());
return obj;
}
void inputChannelFromScriptValue(const QScriptValue& object, UserInputMapper::InputChannel& inputChannel) {
UserInputMapper::Input input;
UserInputMapper::Input modifier;
inputFromScriptValue(object.property("input"), input);
inputChannel.setInput(input);
inputFromScriptValue(object.property("modifier"), modifier);
inputChannel.setModifier(modifier);
inputChannel.setAction(UserInputMapper::Action(object.property("action").toVariant().toInt()));
inputChannel.setScale(object.property("scale").toVariant().toFloat());
}
QScriptValue actionToScriptValue(QScriptEngine* engine, const UserInputMapper::Action& action) {
QScriptValue obj = engine->newObject();
QVector<UserInputMapper::InputChannel> inputChannels = Application::getUserInputMapper()->getInputChannelsForAction(action);
QScriptValue _inputChannels = engine->newArray(inputChannels.size());
for (int i = 0; i < inputChannels.size(); i++) {
_inputChannels.setProperty(i, inputChannelToScriptValue(engine, inputChannels[i]));
}
obj.setProperty("action", (int) action);
obj.setProperty("actionName", Application::getUserInputMapper()->getActionName(action));
obj.setProperty("inputChannels", _inputChannels);
return obj;
}
void actionFromScriptValue(const QScriptValue& object, UserInputMapper::Action& action) {
action = UserInputMapper::Action(object.property("action").toVariant().toInt());
}
QScriptValue inputPairToScriptValue(QScriptEngine* engine, const UserInputMapper::InputPair& inputPair) {
QScriptValue obj = engine->newObject();
obj.setProperty("input", inputToScriptValue(engine, inputPair.first));
obj.setProperty("inputName", inputPair.second);
return obj;
}
void inputPairFromScriptValue(const QScriptValue& object, UserInputMapper::InputPair& inputPair) {
inputFromScriptValue(object.property("input"), inputPair.first);
inputPair.second = QString(object.property("inputName").toVariant().toString());
}
void ControllerScriptingInterface::registerControllerTypes(QScriptEngine* engine) {
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::Action> >(engine);
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::InputChannel> >(engine);
qScriptRegisterSequenceMetaType<QVector<UserInputMapper::InputPair> >(engine);
qScriptRegisterMetaType(engine, actionToScriptValue, actionFromScriptValue);
qScriptRegisterMetaType(engine, inputChannelToScriptValue, inputChannelFromScriptValue);
qScriptRegisterMetaType(engine, inputToScriptValue, inputFromScriptValue);
qScriptRegisterMetaType(engine, inputPairToScriptValue, inputPairFromScriptValue);
}
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
if (event->type() == HFActionEvent::startType()) {
@ -337,6 +425,37 @@ void ControllerScriptingInterface::updateInputControllers() {
}
}
QVector<UserInputMapper::Action> ControllerScriptingInterface::getAllActions() {
return Application::getUserInputMapper()->getAllActions();
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getInputChannelsForAction(UserInputMapper::Action action) {
return Application::getUserInputMapper()->getInputChannelsForAction(action);
}
QString ControllerScriptingInterface::getDeviceName(unsigned int device) {
return Application::getUserInputMapper()->getDeviceName((unsigned short) device);
}
QVector<UserInputMapper::InputChannel> ControllerScriptingInterface::getAllInputsForDevice(unsigned int device) {
return Application::getUserInputMapper()->getAllInputsForDevice(device);
}
bool ControllerScriptingInterface::addInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->addInputChannel(inputChannel._action, inputChannel._input, inputChannel._modifier, inputChannel._scale);
}
bool ControllerScriptingInterface::removeInputChannel(UserInputMapper::InputChannel inputChannel) {
return Application::getUserInputMapper()->removeInputChannel(inputChannel);
}
QVector<UserInputMapper::InputPair> ControllerScriptingInterface::getAvailableInputs(unsigned int device) {
return Application::getUserInputMapper()->getAvailableInputs((unsigned short) device);
}
void ControllerScriptingInterface::resetAllDeviceBindings() {
Application::getUserInputMapper()->resetAllDeviceBindings();
}
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
AbstractInputController(),
@ -373,4 +492,4 @@ const unsigned int INPUTCONTROLLER_KEY_DEVICE_MASK = 16;
InputController::Key InputController::getKey() const {
return (((_deviceTrackerId & INPUTCONTROLLER_KEY_DEVICE_MASK) << INPUTCONTROLLER_KEY_DEVICE_OFFSET) | _subTrackerId);
}
}

View file

@ -14,10 +14,11 @@
#include <QtCore/QObject>
#include "ui/UserInputMapper.h"
#include <AbstractControllerScriptingInterface.h>
class PalmData;
class InputController : public AbstractInputController {
Q_OBJECT
@ -54,6 +55,9 @@ class ControllerScriptingInterface : public AbstractControllerScriptingInterface
public:
ControllerScriptingInterface();
virtual void registerControllerTypes(QScriptEngine* engine);
void emitKeyPressEvent(QKeyEvent* event) { emit keyPressEvent(KeyEvent(*event)); }
void emitKeyReleaseEvent(QKeyEvent* event) { emit keyReleaseEvent(KeyEvent(*event)); }
@ -79,6 +83,14 @@ public:
void updateInputControllers();
public slots:
Q_INVOKABLE virtual QVector<UserInputMapper::Action> getAllActions();
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
Q_INVOKABLE virtual QString getDeviceName(unsigned int device);
Q_INVOKABLE virtual QVector<UserInputMapper::InputChannel> getAllInputsForDevice(unsigned int device);
Q_INVOKABLE virtual bool addInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual bool removeInputChannel(UserInputMapper::InputChannel inputChannel);
Q_INVOKABLE virtual QVector<UserInputMapper::InputPair> getAvailableInputs(unsigned int device);
Q_INVOKABLE virtual void resetAllDeviceBindings();
virtual bool isPrimaryButtonPressed() const;
virtual glm::vec2 getPrimaryJoystickPosition() const;

View file

@ -31,7 +31,7 @@ WindowScriptingInterface::WindowScriptingInterface() :
_formResult(QDialog::Rejected)
{
const DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
connect(&domainHandler, &DomainHandler::hostnameChanged, this, &WindowScriptingInterface::domainChanged);
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &WindowScriptingInterface::domainChanged);
connect(Application::getInstance(), &Application::svoImportRequested, this, &WindowScriptingInterface::svoImportRequested);
connect(Application::getInstance(), &Application::domainConnectionRefused, this, &WindowScriptingInterface::domainConnectionRefused);
}

View file

@ -143,7 +143,6 @@ ApplicationOverlay::ApplicationOverlay() :
_alpha(1.0f),
_oculusUIRadius(1.0f),
_trailingAudioLoudness(0.0f),
_crosshairTexture(0),
_previousBorderWidth(-1),
_previousBorderHeight(-1),
_previousMagnifierBottomLeft(),
@ -267,9 +266,8 @@ void ApplicationOverlay::displayOverlayTexture() {
glLoadIdentity();
glDisable(GL_DEPTH_TEST);
glDisable(GL_LIGHTING);
if (_alpha < 1.0) {
glEnable(GL_BLEND);
}
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glViewport(0, 0, qApp->getDeviceSize().width(), qApp->getDeviceSize().height());
static const glm::vec2 topLeft(-1, 1);
@ -277,9 +275,38 @@ void ApplicationOverlay::displayOverlayTexture() {
static const glm::vec2 texCoordTopLeft(0.0f, 1.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 0.0f);
with_each_texture(_overlays.getTexture(), _newUiTexture, [&] {
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight,
DependencyManager::get<GeometryCache>()->renderQuad(
topLeft, bottomRight,
texCoordTopLeft, texCoordBottomRight,
glm::vec4(1.0f, 1.0f, 1.0f, _alpha));
});
if (!_crosshairTexture) {
_crosshairTexture = DependencyManager::get<TextureCache>()->
getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
//draw the mouse pointer
glm::vec2 canvasSize = qApp->getCanvasSize();
glm::vec2 mouseSize = 32.0f / canvasSize;
auto mouseTopLeft = topLeft * mouseSize;
auto mouseBottomRight = bottomRight * mouseSize;
vec2 mousePosition = vec2(qApp->getMouseX(), qApp->getMouseY());
mousePosition /= canvasSize;
mousePosition *= 2.0f;
mousePosition -= 1.0f;
mousePosition.y *= -1.0f;
glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec4 reticleColor = { RETICLE_COLOR[0], RETICLE_COLOR[1], RETICLE_COLOR[2], 1.0f };
DependencyManager::get<GeometryCache>()->renderQuad(
mouseTopLeft + mousePosition, mouseBottomRight + mousePosition,
texCoordTopLeft, texCoordBottomRight,
reticleColor);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glDisable(GL_TEXTURE_2D);
} glPopMatrix();
}
@ -428,6 +455,9 @@ void ApplicationOverlay::displayOverlayTextureStereo(Camera& whichCamera, float
}
//draw the mouse pointer
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glm::vec2 canvasSize = qApp->getCanvasSize();
const float reticleSize = 40.0f / canvasSize.x * quadWidth;
@ -557,9 +587,11 @@ void ApplicationOverlay::renderPointers() {
_crosshairTexture = TextureCache::getImageTexture(PathUtils::resourcesPath() + "images/sixense-reticle.png");
}
glEnable(GL_TEXTURE_2D);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
//glActiveTexture(GL_TEXTURE0);
//bindCursorTexture();
if (qApp->isHMDMode() && !qApp->getLastMouseMoveWasSimulated() && !qApp->isMouseHidden()) {
//If we are in oculus, render reticle later
@ -603,8 +635,8 @@ void ApplicationOverlay::renderPointers() {
_magActive[MOUSE] = false;
renderControllerPointers();
}
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
//glBindTexture(GL_TEXTURE_2D, 0);
//glDisable(GL_TEXTURE_2D);
}
void ApplicationOverlay::renderControllerPointers() {
@ -718,8 +750,14 @@ void ApplicationOverlay::renderControllerPointers() {
}
void ApplicationOverlay::renderPointersOculus() {
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(_crosshairTexture));
glDisable(GL_DEPTH_TEST);
glMatrixMode(GL_MODELVIEW);
//Controller Pointers
@ -744,6 +782,8 @@ void ApplicationOverlay::renderPointersOculus() {
}
glEnable(GL_DEPTH_TEST);
glDisable(GL_TEXTURE_2D);
glDisable(GL_BLEND);
}
//Renders a small magnification of the currently bound texture at the coordinates

View file

@ -96,7 +96,6 @@ private:
};
float _hmdUIAngularSize = DEFAULT_HMD_UI_ANGULAR_SIZE;
void renderReticle(glm::quat orientation, float alpha);
void renderPointers();;
void renderMagnifier(glm::vec2 magPos, float sizeMult, bool showBorder);
@ -108,6 +107,7 @@ private:
void renderCameraToggle();
void renderStatsAndLogs();
void renderDomainConnectionStatusBorder();
void bindCursorTexture(gpu::Batch& batch, uint8_t cursorId = 0);
TexturedHemisphere _overlays;
@ -125,9 +125,12 @@ private:
float _alpha = 1.0f;
float _oculusUIRadius;
float _trailingAudioLoudness;
gpu::TexturePointer _crosshairTexture;
gpu::TexturePointer _crosshairTexture;
QMap<uint16_t, gpu::TexturePointer> _cursors;
GLuint _newUiTexture{ 0 };
int _reticleQuad;

View file

@ -161,6 +161,10 @@ void Stats::drawBackground(unsigned int rgba, int x, int y, int width, int heigh
((rgba >> 8) & 0xff) / 255.0f,
(rgba & 0xff) / 255.0f);
// FIX ME: is this correct? It seems to work to fix textures bleeding into us...
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
DependencyManager::get<GeometryCache>()->renderQuad(x, y, width, height, color);
}
@ -460,7 +464,7 @@ void Stats::display(
verticalOffset = STATS_PELS_INITIALOFFSET;
horizontalOffset = _lastHorizontalOffset + _generalStatsWidth + _pingStatsWidth + _geoStatsWidth + 3;
lines = _expanded ? 10 : 2;
lines = _expanded ? 10 : 3;
drawBackground(backgroundColor, horizontalOffset, 0, canvasSize.x - horizontalOffset,
(lines + 1) * STATS_PELS_PER_LINE);
@ -608,12 +612,10 @@ void Stats::display(
}
// LOD Details
if (_expanded) {
octreeStats.str("");
QString displayLODDetails = DependencyManager::get<LODManager>()->getLODFeedbackText();
octreeStats << "LOD: You can see " << qPrintable(displayLODDetails.trimmed());
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}
octreeStats.str("");
QString displayLODDetails = DependencyManager::get<LODManager>()->getLODFeedbackText();
octreeStats << "LOD: You can see " << qPrintable(displayLODDetails.trimmed());
verticalOffset += STATS_PELS_PER_LINE;
drawText(horizontalOffset, verticalOffset, scale, rotation, font, (char*)octreeStats.str().c_str(), color);
}

View file

@ -13,7 +13,15 @@
// UserInputMapper Class
// Default contruct allocate the poutput size with the current hardcoded action channels
UserInputMapper::UserInputMapper() {
assignDefaulActionScales();
createActionNames();
}
bool UserInputMapper::registerDevice(uint16 deviceID, const DeviceProxy::Pointer& proxy){
proxy->_name += " (" + QString::number(deviceID) + ")";
_registeredDevices[deviceID] = proxy;
return true;
}
@ -27,6 +35,12 @@ UserInputMapper::DeviceProxy::Pointer UserInputMapper::getDeviceProxy(const Inpu
}
}
void UserInputMapper::resetAllDeviceBindings() {
for (auto device : _registeredDevices) {
device.second->resetDeviceBindings();
}
}
bool UserInputMapper::addInputChannel(Action action, const Input& input, float scale) {
return addInputChannel(action, input, Input(), scale);
}
@ -37,7 +51,7 @@ bool UserInputMapper::addInputChannel(Action action, const Input& input, const I
qDebug() << "UserInputMapper::addInputChannel: The input comes from a device #" << input.getDevice() << "is unknown. no inputChannel mapped.";
return false;
}
auto inputChannel = InputChannel(input, modifier, action, scale);
// Insert or replace the input to modifiers
@ -61,6 +75,37 @@ int UserInputMapper::addInputChannels(const InputChannels& channels) {
return nbAdded;
}
bool UserInputMapper::removeInputChannel(InputChannel inputChannel) {
// Remove from Input to Modifiers map
if (inputChannel.hasModifier()) {
_inputToModifiersMap.erase(inputChannel._input.getID());
}
// Remove from Action to Inputs map
std::pair<ActionToInputsMap::iterator, ActionToInputsMap::iterator> ret;
ret = _actionToInputsMap.equal_range(inputChannel._action);
for (ActionToInputsMap::iterator it=ret.first; it!=ret.second; ++it) {
if (it->second == inputChannel) {
_actionToInputsMap.erase(it);
return true;
}
}
return false;
}
void UserInputMapper::removeAllInputChannels() {
_inputToModifiersMap.clear();
_actionToInputsMap.clear();
}
void UserInputMapper::removeAllInputChannelsForDevice(uint16 device) {
QVector<InputChannel> channels = getAllInputsForDevice(device);
for (auto& channel : channels) {
removeInputChannel(channel);
}
}
int UserInputMapper::getInputChannels(InputChannels& channels) const {
for (auto& channel : _actionToInputsMap) {
channels.push_back(channel.second);
@ -69,6 +114,20 @@ int UserInputMapper::getInputChannels(InputChannels& channels) const {
return _actionToInputsMap.size();
}
QVector<UserInputMapper::InputChannel> UserInputMapper::getAllInputsForDevice(uint16 device) {
InputChannels allChannels;
getInputChannels(allChannels);
QVector<InputChannel> channels;
for (InputChannel inputChannel : allChannels) {
if (inputChannel._input._device == device) {
channels.push_back(inputChannel);
}
}
return channels;
}
void UserInputMapper::update(float deltaTime) {
// Reset the axis state for next loop
@ -130,6 +189,24 @@ void UserInputMapper::update(float deltaTime) {
}
}
QVector<UserInputMapper::Action> UserInputMapper::getAllActions() {
QVector<Action> actions;
for (auto i = 0; i < NUM_ACTIONS; i++) {
actions.append(Action(i));
}
return actions;
}
QVector<UserInputMapper::InputChannel> UserInputMapper::getInputChannelsForAction(UserInputMapper::Action action) {
QVector<InputChannel> inputChannels;
std::pair <ActionToInputsMap::iterator, ActionToInputsMap::iterator> ret;
ret = _actionToInputsMap.equal_range(action);
for (ActionToInputsMap::iterator it=ret.first; it!=ret.second; ++it) {
inputChannels.append(it->second);
}
return inputChannels;
}
void UserInputMapper::assignDefaulActionScales() {
_actionScales[LONGITUDINAL_BACKWARD] = 1.0f; // 1m per unit
_actionScales[LONGITUDINAL_FORWARD] = 1.0f; // 1m per unit
@ -144,3 +221,20 @@ void UserInputMapper::assignDefaulActionScales() {
_actionScales[BOOM_IN] = 1.0f; // 1m per unit
_actionScales[BOOM_OUT] = 1.0f; // 1m per unit
}
// This is only necessary as long as the actions are hardcoded
// Eventually you can just add the string when you add the action
void UserInputMapper::createActionNames() {
_actionNames[LONGITUDINAL_BACKWARD] = "LONGITUDINAL_BACKWARD";
_actionNames[LONGITUDINAL_FORWARD] = "LONGITUDINAL_FORWARD";
_actionNames[LATERAL_LEFT] = "LATERAL_LEFT";
_actionNames[LATERAL_RIGHT] = "LATERAL_RIGHT";
_actionNames[VERTICAL_DOWN] = "VERTICAL_DOWN";
_actionNames[VERTICAL_UP] = "VERTICAL_UP";
_actionNames[YAW_LEFT] = "YAW_LEFT";
_actionNames[YAW_RIGHT] = "YAW_RIGHT";
_actionNames[PITCH_DOWN] = "PITCH_DOWN";
_actionNames[PITCH_UP] = "PITCH_UP";
_actionNames[BOOM_IN] = "BOOM_IN";
_actionNames[BOOM_OUT] = "BOOM_OUT";
}

View file

@ -21,6 +21,7 @@
class UserInputMapper : public QObject {
Q_OBJECT
Q_ENUMS(Action)
public:
typedef unsigned short uint16;
typedef unsigned int uint32;
@ -51,8 +52,13 @@ public:
uint16 getDevice() const { return _device; }
uint16 getChannel() const { return _channel; }
uint32 getID() const { return _id; }
ChannelType getType() const { return (ChannelType) _type; }
void setDevice(uint16 device) { _device = device; }
void setChannel(uint16 channel) { _channel = channel; }
void setType(uint16 type) { _type = type; }
void setID(uint32 ID) { _id = ID; }
bool isButton() const { return getType() == ChannelType::BUTTON; }
bool isAxis() const { return getType() == ChannelType::AXIS; }
bool isJoint() const { return getType() == ChannelType::JOINT; }
@ -64,6 +70,7 @@ public:
explicit Input(uint16 device, uint16 channel, ChannelType type) : _device(device), _channel(channel), _type(uint16(type)) {}
Input(const Input& src) : _id(src._id) {}
Input& operator = (const Input& src) { _id = src._id; return (*this); }
bool operator ==(const Input& right) const { return _id == right._id; }
bool operator < (const Input& src) const { return _id < src._id; }
};
@ -83,22 +90,32 @@ public:
typedef std::function<bool (const Input& input, int timestamp)> ButtonGetter;
typedef std::function<float (const Input& input, int timestamp)> AxisGetter;
typedef std::function<JointValue (const Input& input, int timestamp)> JointGetter;
typedef QPair<Input, QString> InputPair;
typedef std::function<QVector<InputPair> ()> AvailableInputGetter;
typedef std::function<bool ()> ResetBindings;
typedef QVector<InputPair> AvailableInput;
class DeviceProxy {
public:
DeviceProxy() {}
ButtonGetter getButton = [] (const Input& input, int timestamp) -> bool { return false; };
AxisGetter getAxis = [] (const Input& input, int timestamp) -> bool { return 0.0f; };
JointGetter getJoint = [] (const Input& input, int timestamp) -> JointValue { return JointValue(); };
typedef std::shared_ptr<DeviceProxy> Pointer;
DeviceProxy(QString name) { _name = name; }
QString _name;
ButtonGetter getButton = [] (const Input& input, int timestamp) -> bool { return false; };
AxisGetter getAxis = [] (const Input& input, int timestamp) -> bool { return 0.0f; };
JointGetter getJoint = [] (const Input& input, int timestamp) -> JointValue { return JointValue(); };
AvailableInputGetter getAvailabeInputs = [] () -> AvailableInput { return QVector<InputPair>(); };
ResetBindings resetDeviceBindings = [] () -> bool { return true; };
typedef std::shared_ptr<DeviceProxy> Pointer;
};
// GetFreeDeviceID should be called before registering a device to use an ID not used by a different device.
uint16 getFreeDeviceID() { return _nextFreeDeviceID++; }
bool registerDevice(uint16 deviceID, const DeviceProxy::Pointer& device);
DeviceProxy::Pointer getDeviceProxy(const Input& input);
QString getDeviceName(uint16 deviceID) { return _registeredDevices[deviceID]->_name; }
QVector<InputPair> getAvailableInputs(uint16 deviceID) { return _registeredDevices[deviceID]->getAvailabeInputs(); }
void resetAllDeviceBindings();
// Actions are the output channels of the Mapper, that's what the InputChannel map to
// For now the Actions are hardcoded, this is bad, but we will fix that in the near future
@ -123,7 +140,12 @@ public:
NUM_ACTIONS,
};
std::vector<QString> _actionNames = std::vector<QString>(NUM_ACTIONS);
void createActionNames();
QVector<Action> getAllActions();
QString getActionName(Action action) { return UserInputMapper::_actionNames[(int) action]; }
float getActionState(Action action) const { return _actionStates[action]; }
void assignDefaulActionScales();
@ -140,27 +162,43 @@ public:
Input _modifier = Input(); // make it invalid by default, meaning no modifier
Action _action = LONGITUDINAL_BACKWARD;
float _scale = 0.0f;
Input getInput() const { return _input; }
Input getModifier() const { return _modifier; }
Action getAction() const { return _action; }
float getScale() const { return _scale; }
void setInput(Input input) { _input = input; }
void setModifier(Input modifier) { _modifier = modifier; }
void setAction(Action action) { _action = action; }
void setScale(float scale) { _scale = scale; }
InputChannel() {}
InputChannel(const Input& input, const Input& modifier, Action action, float scale = 1.0f) :
_input(input), _modifier(modifier), _action(action), _scale(scale) {}
InputChannel(const InputChannel& src) : InputChannel(src._input, src._modifier, src._action, src._scale) {}
InputChannel& operator = (const InputChannel& src) { _input = src._input; _modifier = src._modifier; _action = src._action; _scale = src._scale; return (*this); }
bool operator ==(const InputChannel& right) const { return _input == right._input && _modifier == right._modifier && _action == right._action && _scale == right._scale; }
bool hasModifier() { return _modifier.isValid(); }
};
typedef std::vector< InputChannel > InputChannels;
// Add a bunch of input channels, return the true number of channels that successfully were added
int addInputChannels(const InputChannels& channels);
// Remove the first found instance of the input channel from the input mapper, true if found
bool removeInputChannel(InputChannel channel);
void removeAllInputChannels();
void removeAllInputChannelsForDevice(uint16 device);
//Grab all the input channels currently in use, return the number
int getInputChannels(InputChannels& channels) const;
QVector<InputChannel> getAllInputsForDevice(uint16 device);
QVector<InputChannel> getInputChannelsForAction(UserInputMapper::Action action);
std::multimap<Action, InputChannel> getActionToInputsMap() { return _actionToInputsMap; }
// Update means go grab all the device input channels and update the output channel values
void update(float deltaTime);
// Default contruct allocate the poutput size with the current hardcoded action channels
UserInputMapper() { assignDefaulActionScales(); }
UserInputMapper();
protected:
typedef std::map<int, DeviceProxy::Pointer> DevicesMap;
@ -177,4 +215,12 @@ protected:
std::vector<float> _actionScales = std::vector<float>(NUM_ACTIONS, 1.0f);
};
Q_DECLARE_METATYPE(UserInputMapper::InputPair)
Q_DECLARE_METATYPE(QVector<UserInputMapper::InputPair>)
Q_DECLARE_METATYPE(UserInputMapper::Input)
Q_DECLARE_METATYPE(UserInputMapper::InputChannel)
Q_DECLARE_METATYPE(QVector<UserInputMapper::InputChannel>)
Q_DECLARE_METATYPE(UserInputMapper::Action)
Q_DECLARE_METATYPE(QVector<UserInputMapper::Action>)
#endif // hifi_UserInputMapper_h

View file

@ -123,6 +123,7 @@ protected:
namespace render {
template <> const ItemKey payloadGetKey(const Overlay::Pointer& overlay);
template <> const Item::Bound payloadGetBound(const Overlay::Pointer& overlay);
template <> int payloadGetLayer(const Overlay::Pointer& overlay);
template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args);
}

View file

@ -37,7 +37,7 @@ namespace render {
template <> const ItemKey payloadGetKey(const Overlay::Pointer& overlay) {
if (overlay->is3D() && !static_cast<Base3DOverlay*>(overlay.get())->getDrawOnHUD()) {
if (static_cast<Base3DOverlay*>(overlay.get())->getDrawInFront()) {
return ItemKey::Builder().withTypeShape().withNoDepthSort().build();
return ItemKey::Builder().withTypeShape().withLayered().build();
} else {
return ItemKey::Builder::opaqueShape();
}
@ -53,6 +53,17 @@ namespace render {
return AABox(glm::vec3(bounds.x(), bounds.y(), 0.0f), glm::vec3(bounds.width(), bounds.height(), 0.1f));
}
}
template <> int payloadGetLayer(const Overlay::Pointer& overlay) {
// MAgic number while we are defining the layering mechanism:
const int LAYER_2D = 2;
const int LAYER_3D_FRONT = 1;
const int LAYER_3D = 0;
if (overlay->is3D()) {
return (static_cast<Base3DOverlay*>(overlay.get())->getDrawInFront() ? LAYER_3D_FRONT : LAYER_3D);
} else {
return LAYER_2D;
}
}
template <> void payloadRender(const Overlay::Pointer& overlay, RenderArgs* args) {
if (args) {
glPushMatrix();

View file

@ -156,6 +156,7 @@ void AudioClient::audioMixerKilled() {
_hasReceivedFirstPacket = false;
_outgoingAvatarAudioSequenceNumber = 0;
_stats.reset();
emit disconnected();
}

View file

@ -186,6 +186,7 @@ signals:
void deviceChanged();
void receivedFirstPacket();
void disconnected();
protected:
AudioClient();

View file

@ -1012,7 +1012,9 @@ void EntityTreeRenderer::deletingEntity(const EntityItemID& entityID) {
void EntityTreeRenderer::addingEntity(const EntityItemID& entityID) {
checkAndCallPreload(entityID);
auto entity = static_cast<EntityTree*>(_tree)->findEntityByID(entityID);
addEntityToScene(entity);
if (entity) {
addEntityToScene(entity);
}
}
void EntityTreeRenderer::addEntityToScene(EntityItemPointer entity) {

View file

@ -9,15 +9,18 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderableBoxEntityItem.h"
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <DeferredLightingEffect.h>
#include <ObjectMotionState.h>
#include <PerfStat.h>
#include "RenderableBoxEntityItem.h"
#include "RenderableDebugableEntityItem.h"
EntityItemPointer RenderableBoxEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
return EntityItemPointer(new RenderableBoxEntityItem(entityID, properties));
@ -27,23 +30,11 @@ void RenderableBoxEntityItem::render(RenderArgs* args) {
PerformanceTimer perfTimer("RenderableBoxEntityItem::render");
Q_ASSERT(getType() == EntityTypes::Box);
glm::vec4 cubeColor(toGlm(getXColor()), getLocalRenderAlpha());
bool debugSimulationOwnership = args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP;
bool highlightSimulationOwnership = false;
if (debugSimulationOwnership) {
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
highlightSimulationOwnership = (getSimulatorID() == myNodeID);
}
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
if (highlightSimulationOwnership) {
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f, cubeColor);
} else {
DependencyManager::get<DeferredLightingEffect>()->renderSolidCube(batch, 1.0f, cubeColor);
}
batch.setModelTransform(getTransformToCenter()); // we want to include the scale as well
DependencyManager::get<DeferredLightingEffect>()->renderSolidCube(batch, 1.0f, cubeColor);
RenderableDebugableEntityItem::render(this, args);
};

View file

@ -13,7 +13,6 @@
#define hifi_RenderableBoxEntityItem_h
#include <BoxEntityItem.h>
#include "RenderableDebugableEntityItem.h"
#include "RenderableEntityItem.h"
class RenderableBoxEntityItem : public BoxEntityItem {

View file

@ -10,58 +10,54 @@
//
#include "RenderableDebugableEntityItem.h"
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <DeferredLightingEffect.h>
#include <PhysicsEngine.h>
#include "RenderableDebugableEntityItem.h"
#include <ObjectMotionState.h>
void RenderableDebugableEntityItem::renderBoundingBox(EntityItem* entity, RenderArgs* args,
float puffedOut, glm::vec4& color) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(entity->getTransformToCenter());
batch.setModelTransform(entity->getTransformToCenter()); // we want to include the scale as well
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f + puffedOut, color);
}
void RenderableDebugableEntityItem::renderHoverDot(EntityItem* entity, RenderArgs* args) {
const int SLICES = 8, STACKS = 8;
float radius = 0.05f;
glm::vec4 blueColor(0.0f, 0.0f, 1.0f, 1.0f);
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
Transform transform = entity->getTransformToCenter();
// Cancel true dimensions and set scale to 2 * radius (diameter)
transform.postScale(2.0f * glm::vec3(radius, radius, radius) / entity->getDimensions());
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, 0.5f, SLICES, STACKS, blueColor);
}
void RenderableDebugableEntityItem::render(EntityItem* entity, RenderArgs* args) {
bool debugSimulationOwnership = args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP;
if (args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(entity->getTransformToCenter()); // we want to include the scale as well
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
bool highlightSimulationOwnership = (entity->getSimulatorID() == myNodeID);
if (highlightSimulationOwnership) {
glm::vec4 greenColor(0.0f, 1.0f, 0.2f, 1.0f);
renderBoundingBox(entity, args, 0.08f, greenColor);
}
if (debugSimulationOwnership) {
quint64 now = usecTimestampNow();
if (now - entity->getLastEditedFromRemote() < 0.1f * USECS_PER_SECOND) {
glm::vec4 redColor(1.0f, 0.0f, 0.0f, 1.0f);
renderBoundingBox(entity, args, 0.2f, redColor);
renderBoundingBox(entity, args, 0.16f, redColor);
}
if (now - entity->getLastBroadcast() < 0.2f * USECS_PER_SECOND) {
glm::vec4 yellowColor(1.0f, 1.0f, 0.2f, 1.0f);
renderBoundingBox(entity, args, 0.3f, yellowColor);
renderBoundingBox(entity, args, 0.24f, yellowColor);
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(entity->getPhysicsInfo());
if (motionState && motionState->isActive()) {
renderHoverDot(entity, args);
glm::vec4 blueColor(0.0f, 0.0f, 1.0f, 1.0f);
renderBoundingBox(entity, args, 0.32f, blueColor);
}
}
}

View file

@ -17,7 +17,6 @@
class RenderableDebugableEntityItem {
public:
static void renderBoundingBox(EntityItem* entity, RenderArgs* args, float puffedOut, glm::vec4& color);
static void renderHoverDot(EntityItem* entity, RenderArgs* args);
static void render(EntityItem* entity, RenderArgs* args);
};

View file

@ -49,6 +49,7 @@ void RenderableLineEntityItem::render(RenderArgs* args) {
batch._glLineWidth(getLineWidth());
if (getLinePoints().size() > 1) {
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch);
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::LINE_STRIP, _lineVerticesID);
}
batch._glLineWidth(1.0f);

View file

@ -201,14 +201,6 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
glm::vec3 position = getPosition();
glm::vec3 dimensions = getDimensions();
bool debugSimulationOwnership = args->_debugFlags & RenderArgs::RENDER_DEBUG_SIMULATION_OWNERSHIP;
bool highlightSimulationOwnership = false;
if (debugSimulationOwnership) {
auto nodeList = DependencyManager::get<NodeList>();
const QUuid& myNodeID = nodeList->getSessionUUID();
highlightSimulationOwnership = (getSimulatorID() == myNodeID);
}
if (hasModel()) {
if (_model) {
if (QUrl(getModelURL()) != _model->getURL()) {
@ -232,7 +224,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
remapTextures();
{
float alpha = getLocalRenderAlpha();
// float alpha = getLocalRenderAlpha();
if (!_model || _needsModelReload) {
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
@ -274,11 +266,6 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
}
}
}
if (highlightSimulationOwnership) {
glm::vec4 greenColor(0.0f, 1.0f, 0.0f, 1.0f);
RenderableDebugableEntityItem::renderBoundingBox(this, args, 0.0f, greenColor);
}
} else {
glm::vec4 greenColor(0.0f, 1.0f, 0.0f, 1.0f);
RenderableDebugableEntityItem::renderBoundingBox(this, args, 0.0f, greenColor);

View file

@ -53,6 +53,7 @@ void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
batch.setUniformTexture(0, _texture->getGPUTexture());
}
batch.setModelTransform(getTransformToCenter());
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch);
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::QUADS, _cacheID);
};

View file

@ -213,7 +213,7 @@ uint8_t RenderablePolyVoxEntityItem::getVoxel(int x, int y, int z) {
// if _voxelSurfaceStyle is SURFACE_EDGED_CUBIC, we maintain an extra layer of
// voxels all around the requested voxel space. Having the empty voxels around
// the edges changes how the surface extractor behaves.
if (_voxelSurfaceStyle == SURFACE_EDGED_CUBIC) {
return _volData->getVoxelAt(x + 1, y + 1, z + 1);
}
@ -239,7 +239,7 @@ void RenderablePolyVoxEntityItem::updateOnCount(int x, int y, int z, uint8_t toV
if (!inUserBounds(_volData, _voxelSurfaceStyle, x, y, z)) {
return;
}
uint8_t uVoxelValue = getVoxel(x, y, z);
if (toValue != 0) {
if (uVoxelValue == 0) {
@ -347,8 +347,8 @@ void RenderablePolyVoxEntityItem::getModel() {
sizeof(PolyVox::PositionMaterialNormal),
gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::RAW)));
// auto normalAttrib = mesh->getAttributeBuffer(gpu::Stream::NORMAL);
// for (auto normal = normalAttrib.begin<glm::vec3>(); normal != normalAttrib.end<glm::vec3>(); normal++) {
// (*normal) = -(*normal);
@ -363,7 +363,7 @@ void RenderablePolyVoxEntityItem::getModel() {
// gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::RAW)));
#ifdef WANT_DEBUG
qDebug() << "---- vecIndices.size() =" << vecIndices.size();
qDebug() << "---- vecVertices.size() =" << vecVertices.size();
@ -379,7 +379,7 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
if (_needsModelReload) {
getModel();
}
Transform transform;
transform.setTranslation(getPosition() - getRegistrationPoint() * getDimensions());
transform.setRotation(getRotation());
@ -398,7 +398,7 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
mesh->getVertexBuffer()._stride);
batch.setIndexBuffer(gpu::UINT32, mesh->getIndexBuffer()._buffer, 0);
batch.drawIndexed(gpu::TRIANGLES, mesh->getNumIndices(), 0);
RenderableDebugableEntityItem::render(this, args);
}
@ -448,14 +448,13 @@ bool RenderablePolyVoxEntityItem::findDetailedRayIntersection(const glm::vec3& o
glm::vec3 normDirection = glm::normalize(direction);
// the PolyVox ray intersection code requires a near and far point.
glm::vec3 scale = getDimensions() / _voxelVolumeSize; // meters / voxel-units
// set ray cast length to long enough to cover all of the voxel space
// set ray cast length to long enough to cover all of the voxel space
float distanceToEntity = glm::distance(origin, getPosition());
float largestDimension = glm::max(getDimensions().x, getDimensions().y, getDimensions().z) * 2.0f;
glm::vec3 farPoint = origin + normDirection * (distanceToEntity + largestDimension);
glm::vec4 originInVoxel = wtvMatrix * glm::vec4(origin, 1.0f);
glm::vec4 farInVoxel = wtvMatrix * glm::vec4(farPoint, 1.0f);
PolyVox::Vector3DFloat startPoint(originInVoxel.x, originInVoxel.y, originInVoxel.z);
PolyVox::Vector3DFloat endPoint(farInVoxel.x, farInVoxel.y, farInVoxel.z);
@ -479,7 +478,7 @@ bool RenderablePolyVoxEntityItem::findDetailedRayIntersection(const glm::vec3& o
}
result -= glm::vec4(0.5f, 0.5f, 0.5f, 0.0f);
glm::vec4 intersectedWorldPosition = voxelToWorldMatrix() * result;
distance = glm::distance(glm::vec3(intersectedWorldPosition), origin);
@ -556,9 +555,9 @@ void RenderablePolyVoxEntityItem::decompressVolumeData() {
<< voxelXSize << voxelYSize << voxelZSize;
return;
}
int rawSize = voxelXSize * voxelYSize * voxelZSize;
QByteArray compressedData;
reader >> compressedData;
QByteArray uncompressedData = qUncompress(compressedData);
@ -635,9 +634,6 @@ void RenderablePolyVoxEntityItem::computeShapeInfo(ShapeInfo& info) {
float offL = -0.5f;
float offH = 0.5f;
// float offL = 0.0f;
// float offH = 1.0f;
glm::vec3 p000 = glm::vec3(wToM * glm::vec4(x + offL, y + offL, z + offL, 1.0f));
glm::vec3 p001 = glm::vec3(wToM * glm::vec4(x + offL, y + offL, z + offH, 1.0f));
glm::vec3 p010 = glm::vec3(wToM * glm::vec4(x + offL, y + offH, z + offL, 1.0f));

View file

@ -9,6 +9,8 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "RenderableSphereEntityItem.h"
#include <glm/gtx/quaternion.hpp>
#include <gpu/GPUConfig.h>
@ -18,7 +20,7 @@
#include <DeferredLightingEffect.h>
#include <PerfStat.h>
#include "RenderableSphereEntityItem.h"
#include "RenderableDebugableEntityItem.h"
EntityItemPointer RenderableSphereEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
return EntityItemPointer(new RenderableSphereEntityItem(entityID, properties));
@ -37,6 +39,8 @@ void RenderableSphereEntityItem::render(RenderArgs* args) {
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
batch.setModelTransform(getTransformToCenter());
batch.setModelTransform(getTransformToCenter()); // use a transform with scale, rotation, registration point and translation
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, 0.5f, SLICES, STACKS, sphereColor);
RenderableDebugableEntityItem::render(this, args);
};

View file

@ -47,12 +47,13 @@ void RenderableTextEntityItem::render(RenderArgs* args) {
glm::vec3 maxCorner = glm::vec3(dimensions.x, 0.0f, SLIGHTLY_BEHIND);
DependencyManager::get<DeferredLightingEffect>()->renderQuad(batch, minCorner, maxCorner, backgroundColor);
float scale = _lineHeight / _textRenderer->getRowHeight();
float scale = _lineHeight / _textRenderer->getFontSize();
transformToTopLeft.setScale(scale); // Scale to have the correct line height
batch.setModelTransform(transformToTopLeft);
float leftMargin = 0.5f * _lineHeight, topMargin = 0.5f * _lineHeight;
glm::vec2 bounds = glm::vec2(dimensions.x - 2.0f * leftMargin, dimensions.y - 2.0f * topMargin);
float leftMargin = 0.1f * _lineHeight, topMargin = 0.1f * _lineHeight;
glm::vec2 bounds = glm::vec2(dimensions.x - 2.0f * leftMargin,
dimensions.y - 2.0f * topMargin);
_textRenderer->draw(batch, leftMargin / scale, -topMargin / scale, _text, textColor, bounds / scale);
}

View file

@ -191,8 +191,8 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
}
void RenderableWebEntityItem::setSourceUrl(const QString& value) {
qDebug() << "Setting web entity source URL to " << value;
if (_sourceUrl != value) {
qDebug() << "Setting web entity source URL to " << value;
_sourceUrl = value;
if (_webSurface) {
AbstractViewStateInterface::instance()->postLambdaEvent([this] {

View file

@ -14,6 +14,7 @@
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <AbstractViewStateInterface.h>
#include <DeferredLightingEffect.h>
#include <DependencyManager.h>
#include <GeometryCache.h>
@ -100,10 +101,17 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
case SHAPE_TYPE_COMPOUND: {
PerformanceTimer perfTimer("zone->renderCompound");
updateGeometry();
if (_model && _model->isActive()) {
// FIX ME: this is no longer available... we need to switch to payloads
//_model->renderInScene(getLocalRenderAlpha(), args);
if (_model && _model->needsFixupInScene()) {
// check to see if when we added our models to the scene they were ready, if they were not ready, then
// fix them up in the scene
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
render::PendingChanges pendingChanges;
_model->removeFromScene(scene, pendingChanges);
_model->addToScene(scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
_model->setVisibleInScene(getVisible(), scene);
}
break;
}
@ -131,6 +139,15 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
break;
}
}
if ((!_drawZoneBoundaries || getShapeType() != SHAPE_TYPE_COMPOUND) &&
_model && !_model->needsFixupInScene()) {
// If the model is in the scene but doesn't need to be, remove it.
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();
render::PendingChanges pendingChanges;
_model->removeFromScene(scene, pendingChanges);
scene->enqueuePendingChanges(pendingChanges);
}
}
bool RenderableZoneEntityItem::contains(const glm::vec3& point) const {
@ -145,3 +162,51 @@ bool RenderableZoneEntityItem::contains(const glm::vec3& point) const {
return false;
}
class RenderableZoneEntityItemMeta {
public:
RenderableZoneEntityItemMeta(EntityItemPointer entity) : entity(entity){ }
typedef render::Payload<RenderableZoneEntityItemMeta> Payload;
typedef Payload::DataPointer Pointer;
EntityItemPointer entity;
};
namespace render {
template <> const ItemKey payloadGetKey(const RenderableZoneEntityItemMeta::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const RenderableZoneEntityItemMeta::Pointer& payload) {
if (payload && payload->entity) {
return payload->entity->getAABox();
}
return render::Item::Bound();
}
template <> void payloadRender(const RenderableZoneEntityItemMeta::Pointer& payload, RenderArgs* args) {
if (args) {
if (payload && payload->entity) {
payload->entity->render(args);
}
}
}
}
bool RenderableZoneEntityItem::addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
_myMetaItem = scene->allocateID();
auto renderData = RenderableZoneEntityItemMeta::Pointer(new RenderableZoneEntityItemMeta(self));
auto renderPayload = render::PayloadPointer(new RenderableZoneEntityItemMeta::Payload(renderData));
pendingChanges.resetItem(_myMetaItem, renderPayload);
return true;
}
void RenderableZoneEntityItem::removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene,
render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_myMetaItem);
if (_model) {
_model->removeFromScene(scene, pendingChanges);
}
}

View file

@ -35,6 +35,9 @@ public:
virtual void render(RenderArgs* args);
virtual bool contains(const glm::vec3& point) const;
virtual bool addToScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(EntityItemPointer self, std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
private:
Model* getModel();
void initialSimulation();
@ -45,6 +48,8 @@ private:
Model* _model;
bool _needsInitialSimulation;
render::ItemID _myMetaItem;
};
#endif // hifi_RenderableZoneEntityItem_h

View file

@ -0,0 +1,33 @@
//
// EntityActionFactoryInterface.cpp
// libraries/entities/src
//
// Created by Seth Alves on 2015-6-2
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EntityActionFactoryInterface_h
#define hifi_EntityActionFactoryInterface_h
#include <DependencyManager.h>
#include "EntityActionInterface.h"
class EntityActionFactoryInterface : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
EntityActionFactoryInterface() { }
virtual ~EntityActionFactoryInterface() { }
virtual EntityActionPointer factory(EntitySimulation* simulation,
EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) { assert(false); return nullptr; }
};
#endif // hifi_EntityActionFactoryInterface_h

View file

@ -22,6 +22,12 @@ EntityActionType EntityActionInterface::actionTypeFromString(QString actionTypeS
if (normalizedActionTypeString == "pulltopoint") {
return ACTION_TYPE_PULL_TO_POINT;
}
if (normalizedActionTypeString == "spring") {
return ACTION_TYPE_SPRING;
}
if (normalizedActionTypeString == "hold") {
return ACTION_TYPE_HOLD;
}
qDebug() << "Warning -- EntityActionInterface::actionTypeFromString got unknown action-type name" << actionTypeString;
return ACTION_TYPE_NONE;
@ -33,31 +39,37 @@ QString EntityActionInterface::actionTypeToString(EntityActionType actionType) {
return "none";
case ACTION_TYPE_PULL_TO_POINT:
return "pullToPoint";
case ACTION_TYPE_SPRING:
return "spring";
case ACTION_TYPE_HOLD:
return "hold";
}
assert(false);
return "none";
}
glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok) {
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
qDebug() << objectName << "requires argument:" << argumentName;
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return vec3();
return glm::vec3();
}
QVariant resultV = arguments[argumentName];
if (resultV.type() != (QVariant::Type) QMetaType::QVariantMap) {
qDebug() << objectName << "argument" << argumentName << "must be a map";
ok = false;
return vec3();
return glm::vec3();
}
QVariantMap resultVM = resultV.toMap();
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
ok = false;
return vec3();
return glm::vec3();
}
QVariant xV = resultVM["x"];
@ -73,17 +85,65 @@ glm::vec3 EntityActionInterface::extractVec3Argument(QString objectName, QVarian
if (!xOk || !yOk || !zOk) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z and values of type float.";
ok = false;
return vec3();
return glm::vec3();
}
return vec3(x, y, z);
return glm::vec3(x, y, z);
}
glm::quat EntityActionInterface::extractQuatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return glm::quat();
}
QVariant resultV = arguments[argumentName];
if (resultV.type() != (QVariant::Type) QMetaType::QVariantMap) {
qDebug() << objectName << "argument" << argumentName << "must be a map, not" << resultV.typeName();
ok = false;
return glm::quat();
}
QVariantMap resultVM = resultV.toMap();
if (!resultVM.contains("x") || !resultVM.contains("y") || !resultVM.contains("z")) {
qDebug() << objectName << "argument" << argumentName << "must be a map with keys of x, y, z";
ok = false;
return glm::quat();
}
QVariant xV = resultVM["x"];
QVariant yV = resultVM["y"];
QVariant zV = resultVM["z"];
QVariant wV = resultVM["w"];
bool xOk = true;
bool yOk = true;
bool zOk = true;
bool wOk = true;
float x = xV.toFloat(&xOk);
float y = yV.toFloat(&yOk);
float z = zV.toFloat(&zOk);
float w = wV.toFloat(&wOk);
if (!xOk || !yOk || !zOk || !wOk) {
qDebug() << objectName << "argument" << argumentName
<< "must be a map with keys of x, y, z, w and values of type float.";
ok = false;
return glm::quat();
}
return glm::quat(w, x, y, z);
}
float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok) {
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
qDebug() << objectName << "requires argument:" << argumentName;
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return 0.0f;
}
@ -99,3 +159,18 @@ float EntityActionInterface::extractFloatArgument(QString objectName, QVariantMa
return v;
}
QString EntityActionInterface::extractStringArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required) {
if (!arguments.contains(argumentName)) {
if (required) {
qDebug() << objectName << "requires argument:" << argumentName;
}
ok = false;
return "";
}
QVariant vV = arguments[argumentName];
QString v = vV.toString();
return v;
}

View file

@ -14,12 +14,16 @@
#include <QUuid>
#include "EntityItem.h"
class EntitySimulation;
enum EntityActionType {
// keep these synchronized with actionTypeFromString and actionTypeToString
ACTION_TYPE_NONE,
ACTION_TYPE_PULL_TO_POINT
ACTION_TYPE_PULL_TO_POINT,
ACTION_TYPE_SPRING,
ACTION_TYPE_HOLD
};
@ -32,18 +36,35 @@ public:
virtual const EntityItemPointer& getOwnerEntity() const = 0;
virtual void setOwnerEntity(const EntityItemPointer ownerEntity) = 0;
virtual bool updateArguments(QVariantMap arguments) = 0;
// virtual QByteArray serialize() = 0;
// static EntityActionPointer deserialize(EntityItemPointer ownerEntity, QByteArray data);
static EntityActionType actionTypeFromString(QString actionTypeString);
static QString actionTypeToString(EntityActionType actionType);
protected:
virtual glm::vec3 getPosition() = 0;
virtual void setPosition(glm::vec3 position) = 0;
virtual glm::quat getRotation() = 0;
virtual void setRotation(glm::quat rotation) = 0;
virtual glm::vec3 getLinearVelocity() = 0;
virtual void setLinearVelocity(glm::vec3 linearVelocity) = 0;
virtual glm::vec3 getAngularVelocity() = 0;
virtual void setAngularVelocity(glm::vec3 angularVelocity) = 0;
// these look in the arguments map for a named argument. if it's not found or isn't well formed,
// ok will be set to false (note that it's never set to true -- set it to true before calling these).
// if required is true, failure to extract an argument will cause a warning to be printed.
static glm::vec3 extractVec3Argument (QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static glm::quat extractQuatArgument (QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static float extractFloatArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static QString extractStringArgument(QString objectName, QVariantMap arguments,
QString argumentName, bool& ok, bool required = true);
static glm::vec3 extractVec3Argument(QString objectName, QVariantMap arguments, QString argumentName, bool& ok);
static float extractFloatArgument(QString objectName, QVariantMap arguments, QString argumentName, bool& ok);
};
typedef std::shared_ptr<EntityActionInterface> EntityActionPointer;
#endif // hifi_EntityActionInterface_h

View file

@ -70,7 +70,9 @@ EntityItem::EntityItem(const EntityItemID& entityItemID) :
_dirtyFlags(0),
_element(nullptr),
_physicsInfo(nullptr),
_simulated(false)
_simulated(false),
_href(""),
_description("")
{
quint64 now = usecTimestampNow();
_lastSimulated = now;
@ -117,6 +119,8 @@ EntityPropertyFlags EntityItem::getEntityProperties(EncodeBitstreamParams& param
requestedProperties += PROP_MARKETPLACE_ID;
requestedProperties += PROP_NAME;
requestedProperties += PROP_SIMULATOR_ID;
requestedProperties += PROP_HREF;
requestedProperties += PROP_DESCRIPTION;
return requestedProperties;
}
@ -246,6 +250,9 @@ OctreeElement::AppendState EntityItem::appendEntityData(OctreePacketData* packet
APPEND_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, getMarketplaceID());
APPEND_ENTITY_PROPERTY(PROP_NAME, getName());
APPEND_ENTITY_PROPERTY(PROP_COLLISION_SOUND_URL, getCollisionSoundURL());
APPEND_ENTITY_PROPERTY(PROP_HREF, getHref());
APPEND_ENTITY_PROPERTY(PROP_DESCRIPTION, getDescription());
appendSubclassData(packetData, params, entityTreeElementExtraEncodeData,
requestedProperties,
@ -573,6 +580,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
READ_ENTITY_PROPERTY(PROP_NAME, QString, setName);
READ_ENTITY_PROPERTY(PROP_COLLISION_SOUND_URL, QString, setCollisionSoundURL);
READ_ENTITY_PROPERTY(PROP_HREF, QString, setHref);
READ_ENTITY_PROPERTY(PROP_DESCRIPTION, QString, setDescription);
bytesRead += readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args, propertyFlags, overwriteLocalData);
////////////////////////////////////
@ -905,6 +915,8 @@ EntityItemProperties EntityItem::getProperties() const {
COPY_ENTITY_PROPERTY_TO_PROPERTIES(simulatorID, getSimulatorID);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(marketplaceID, getMarketplaceID);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(name, getName);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(href, getHref);
COPY_ENTITY_PROPERTY_TO_PROPERTIES(description, getDescription);
properties._defaultSettings = false;
@ -963,6 +975,8 @@ bool EntityItem::setProperties(const EntityItemProperties& properties) {
SET_ENTITY_PROPERTY_FROM_PROPERTIES(userData, setUserData);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(marketplaceID, setMarketplaceID);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(name, setName);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(href, setHref);
SET_ENTITY_PROPERTY_FROM_PROPERTIES(description, setDescription);
if (somethingChanged) {
uint64_t now = usecTimestampNow();

View file

@ -28,13 +28,16 @@
#include "EntityItemID.h"
#include "EntityItemProperties.h"
#include "EntityItemPropertiesDefaults.h"
#include "EntityActionInterface.h"
#include "EntityTypes.h"
class EntitySimulation;
class EntityTreeElement;
class EntityTreeElementExtraEncodeData;
class EntityActionInterface;
typedef std::shared_ptr<EntityActionInterface> EntityActionPointer;
namespace render {
class Scene;
class PendingChanges;
@ -203,7 +206,14 @@ public:
inline const glm::quat& getRotation() const { return _transform.getRotation(); }
inline void setRotation(const glm::quat& rotation) { _transform.setRotation(rotation); }
// Hyperlink related getters and setters
QString getHref() const { return _href; }
void setHref(QString value) { _href = value; }
QString getDescription() const { return _description; }
void setDescription(QString value) { _description = value; }
/// Dimensions in meters (0.0 - TREE_SCALE)
inline const glm::vec3& getDimensions() const { return _transform.getScale(); }
virtual void setDimensions(const glm::vec3& value);
@ -415,6 +425,8 @@ protected:
quint64 _simulatorIDChangedTime; // when was _simulatorID last updated?
QString _marketplaceID;
QString _name;
QString _href; //Hyperlink href
QString _description; //Hyperlink description
// NOTE: Damping is applied like this: v *= pow(1 - damping, dt)
//

View file

@ -347,6 +347,9 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
CHECK_PROPERTY_CHANGE(PROP_VOXEL_SURFACE_STYLE, voxelSurfaceStyle);
CHECK_PROPERTY_CHANGE(PROP_LINE_WIDTH, lineWidth);
CHECK_PROPERTY_CHANGE(PROP_LINE_POINTS, linePoints);
CHECK_PROPERTY_CHANGE(PROP_HREF, href);
CHECK_PROPERTY_CHANGE(PROP_DESCRIPTION, description);
changedProperties += _stage.getChangedProperties();
changedProperties += _atmosphere.getChangedProperties();
changedProperties += _skybox.getChangedProperties();
@ -439,7 +442,9 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(voxelSurfaceStyle);
COPY_PROPERTY_TO_QSCRIPTVALUE(lineWidth);
COPY_PROPERTY_TO_QSCRIPTVALUE(linePoints);
COPY_PROPERTY_TO_QSCRIPTVALUE(href);
COPY_PROPERTY_TO_QSCRIPTVALUE(description);
// Sitting properties support
if (!skipDefaults) {
QScriptValue sittingPoints = engine->newObject();
@ -548,6 +553,9 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object, bool
COPY_PROPERTY_FROM_QSCRIPTVALUE(voxelSurfaceStyle, uint16_t, setVoxelSurfaceStyle);
COPY_PROPERTY_FROM_QSCRIPTVALUE(lineWidth, float, setLineWidth);
COPY_PROPERTY_FROM_QSCRIPTVALUE(linePoints, qVectorVec3, setLinePoints);
COPY_PROPERTY_FROM_QSCRIPTVALUE(href, QString, setHref);
COPY_PROPERTY_FROM_QSCRIPTVALUE(description, QString, setDescription);
if (!honorReadOnly) {
// this is used by the json reader to set things that we don't want javascript to able to affect.
@ -712,6 +720,8 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
APPEND_ENTITY_PROPERTY(PROP_LOCKED, properties.getLocked());
APPEND_ENTITY_PROPERTY(PROP_USER_DATA, properties.getUserData());
APPEND_ENTITY_PROPERTY(PROP_SIMULATOR_ID, properties.getSimulatorID());
APPEND_ENTITY_PROPERTY(PROP_HREF, properties.getHref());
APPEND_ENTITY_PROPERTY(PROP_DESCRIPTION, properties.getDescription());
if (properties.getType() == EntityTypes::Web) {
APPEND_ENTITY_PROPERTY(PROP_SOURCE_URL, properties.getSourceUrl());
@ -962,6 +972,8 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LOCKED, bool, setLocked);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_USER_DATA, QString, setUserData);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SIMULATOR_ID, QUuid, setSimulatorID);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_HREF, QString, setHref);
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_DESCRIPTION, QString, setDescription);
if (properties.getType() == EntityTypes::Web) {
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_SOURCE_URL, QString, setSourceUrl);
@ -1147,6 +1159,9 @@ void EntityItemProperties::markAllChanged() {
_lineWidthChanged = true;
_linePointsChanged = true;
_hrefChanged = true;
_descriptionChanged = true;
}
/// The maximum bounding cube for the entity, independent of it's rotation.

View file

@ -148,6 +148,8 @@ public:
DEFINE_PROPERTY_REF(PROP_SOURCE_URL, SourceUrl, sourceUrl, QString);
DEFINE_PROPERTY(PROP_LINE_WIDTH, LineWidth, lineWidth, float);
DEFINE_PROPERTY_REF(LINE_POINTS, LinePoints, linePoints, QVector<glm::vec3>);
DEFINE_PROPERTY_REF(PROP_HREF, Href, href, QString);
DEFINE_PROPERTY_REF(PROP_DESCRIPTION, Description, description, QString);
static QString getBackgroundModeString(BackgroundMode mode);
@ -295,6 +297,8 @@ inline QDebug operator<<(QDebug debug, const EntityItemProperties& properties) {
DEBUG_PROPERTY_IF_CHANGED(debug, properties, VoxelVolumeSize, voxelVolumeSize, "");
DEBUG_PROPERTY_IF_CHANGED(debug, properties, VoxelData, voxelData, "");
DEBUG_PROPERTY_IF_CHANGED(debug, properties, VoxelSurfaceStyle, voxelSurfaceStyle, "");
DEBUG_PROPERTY_IF_CHANGED(debug, properties, Href, href, "");
DEBUG_PROPERTY_IF_CHANGED(debug, properties, Description, description, "");
properties.getStage().debugDump();
properties.getAtmosphere().debugDump();

View file

@ -117,6 +117,10 @@ enum EntityPropertyList {
//for lines
PROP_LINE_WIDTH,
PROP_LINE_POINTS,
// used by hyperlinks
PROP_HREF,
PROP_DESCRIPTION,
////////////////////////////////////////////////////////////////////////////////////////////////////
// ATTENTION: add new properties ABOVE this line

View file

@ -17,6 +17,8 @@
#include "ZoneEntityItem.h"
#include "EntitiesLogging.h"
#include "EntitySimulation.h"
#include "EntityActionInterface.h"
#include "EntityActionFactoryInterface.h"
#include "EntityScriptingInterface.h"
@ -491,12 +493,19 @@ QUuid EntityScriptingInterface::addAction(const QString& actionTypeString,
const QUuid& entityID,
const QVariantMap& arguments) {
QUuid actionID = QUuid::createUuid();
auto actionFactory = DependencyManager::get<EntityActionFactoryInterface>();
bool success = actionWorker(entityID, [&](EntitySimulation* simulation, EntityItemPointer entity) {
// create this action even if the entity doesn't have physics info. it will often be the
// case that a script adds an action immediately after an object is created, and the physicsInfo
// is computed asynchronously.
// if (!entity->getPhysicsInfo()) {
// return false;
// }
EntityActionType actionType = EntityActionInterface::actionTypeFromString(actionTypeString);
if (actionType == ACTION_TYPE_NONE) {
return false;
}
if (simulation->actionFactory(actionType, actionID, entity, arguments)) {
if (actionFactory->factory(simulation, actionType, actionID, entity, arguments)) {
return true;
}
return false;

View file

@ -18,6 +18,7 @@
#include <PerfStat.h>
#include "EntityActionInterface.h"
#include "EntityItem.h"
#include "EntityTree.h"
@ -56,10 +57,6 @@ public:
friend class EntityTree;
virtual EntityActionPointer actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) { return nullptr; }
virtual void addAction(EntityActionPointer action) { _actionsToAdd += action; }
virtual void removeAction(const QUuid actionID) { _actionsToRemove += actionID; }
virtual void removeActions(QList<QUuid> actionIDsToRemove) { _actionsToRemove += actionIDsToRemove; }

View file

@ -392,17 +392,20 @@ OctreeElement::AppendState EntityTreeElement::appendElementData(OctreePacketData
// If we wrote fewer entities than we expected, update the number of entities in our packet
bool successUpdateEntityCount = true;
if (!noEntitiesFit && numberOfEntities != actualNumberOfEntities) {
if (numberOfEntities != actualNumberOfEntities) {
successUpdateEntityCount = packetData->updatePriorBytes(numberOfEntitiesOffset,
(const unsigned char*)&actualNumberOfEntities, sizeof(actualNumberOfEntities));
}
// If we weren't able to update our entity count, or we couldn't fit any entities, then
// we should discard our element and return a result of NONE
if (!successUpdateEntityCount || noEntitiesFit) {
if (!successUpdateEntityCount) {
packetData->discardLevel(elementLevel);
appendElementState = OctreeElement::NONE;
} else {
if (noEntitiesFit) {
appendElementState = OctreeElement::PARTIAL;
}
packetData->endLevel(elementLevel);
}
return appendElementState;

View file

@ -18,6 +18,7 @@
#include "EntityTree.h"
#include "EntitiesLogging.h"
#include "EntityTreeElement.h"
#include "OctreeConstants.h"
@ -86,16 +87,20 @@ bool LineEntityItem::setProperties(const EntityItemProperties& properties) {
void LineEntityItem::setLinePoints(const QVector<glm::vec3>& points) {
QVector<glm::vec3> sanitizedPoints;
int invalidPoints = 0;
for (int i = 0; i < points.size(); i++) {
glm::vec3 point = points.at(i);
// Make sure all of our points are valid numbers.
// Must be greater than 0 because vector component is set to 0 if it is invalid data
if (point.x > 0 && point.y > 0 && point.z > 0){
// Must be greater than 0 because vector component is set to 0 if it is invalid data. Also should never be greater than TREE_SCALE
if ( (point.x > 0 && point.x < TREE_SCALE) && (point.y > 0 && point.y < TREE_SCALE) && (point.z > 0 && point.z < TREE_SCALE) ) {
sanitizedPoints << point;
} else {
qDebug() << "INVALID POINT";
++invalidPoints;
}
}
if (invalidPoints > 0) {
qDebug() << "Line with" << invalidPoints << "INVALID POINTS";
}
_points = sanitizedPoints;
_pointsChanged = true;
}

View file

@ -1902,8 +1902,8 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
} else {
material._material->setDiffuse(material.diffuse);
}
material._material->setSpecular(material.specular);
material._material->setShininess(material.shininess);
material._material->setMetallic(glm::length(material.specular));
material._material->setGloss(material.shininess);
if (material.opacity <= 0.0f) {
material._material->setOpacity(1.0f);
@ -2646,34 +2646,6 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
}
}
geometry.palmDirection = parseVec3(mapping.value("palmDirection", "0, -1, 0").toString());
// process attachments
QVariantHash attachments = mapping.value("attach").toHash();
for (QVariantHash::const_iterator it = attachments.constBegin(); it != attachments.constEnd(); it++) {
FBXAttachment attachment;
attachment.jointIndex = modelIDs.indexOf(processID(it.key()));
attachment.scale = glm::vec3(1.0f, 1.0f, 1.0f);
QVariantList properties = it->toList();
if (properties.isEmpty()) {
attachment.url = it->toString();
} else {
attachment.url = properties.at(0).toString();
if (properties.size() >= 2) {
attachment.translation = parseVec3(properties.at(1).toString());
if (properties.size() >= 3) {
attachment.rotation = glm::quat(glm::radians(parseVec3(properties.at(2).toString())));
if (properties.size() >= 4) {
attachment.scale = parseVec3(properties.at(3).toString());
}
}
}
}
geometry.attachments.append(attachment);
}
// Add sitting points
QVariantHash sittingPoints = mapping.value("sit").toHash();

View file

@ -189,17 +189,6 @@ public:
Q_DECLARE_METATYPE(FBXAnimationFrame)
Q_DECLARE_METATYPE(QVector<FBXAnimationFrame>)
/// An attachment to an FBX document.
class FBXAttachment {
public:
int jointIndex;
QUrl url;
glm::vec3 translation;
glm::quat rotation;
glm::vec3 scale;
};
/// A point where an avatar can sit
class SittingPoint {
public:
@ -256,9 +245,7 @@ public:
Extents meshExtents;
QVector<FBXAnimationFrame> animationFrames;
QVector<FBXAttachment> attachments;
int getJointIndex(const QString& name) const { return jointIndices.value(name) - 1; }
QStringList getJointNames() const;

View file

@ -124,7 +124,9 @@ FSTReader::ModelType FSTReader::getTypeFromName(const QString& name) {
_namesToTypes["head"] = HEAD_MODEL ;
_namesToTypes["body"] = BODY_ONLY_MODEL;
_namesToTypes["body+head"] = HEAD_AND_BODY_MODEL;
_namesToTypes["attachment"] = ATTACHMENT_MODEL;
// NOTE: this is not yet implemented, but will be used to allow you to attach fully independent models to your avatar
_namesToTypes["attachment"] = ATTACHMENT_MODEL;
}
return _namesToTypes[name];
}

View file

@ -134,8 +134,8 @@ void setMeshPartDefaults(FBXMeshPart& meshPart, QString materialID) {
meshPart._material = model::MaterialPointer(new model::Material());
meshPart._material->setDiffuse(glm::vec3(1.0, 1.0, 1.0));
meshPart._material->setOpacity(1.0);
meshPart._material->setSpecular(glm::vec3(1.0, 1.0, 1.0));
meshPart._material->setShininess(96.0);
meshPart._material->setMetallic(0.0);
meshPart._material->setGloss(96.0);
meshPart._material->setEmissive(glm::vec3(0.0, 0.0, 0.0));
}
@ -481,8 +481,8 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
meshPart.specularTexture.filename = material->specularTextureFilename;
// ... and some things are set in the underlying material.
meshPart._material->setDiffuse(material->diffuseColor);
meshPart._material->setSpecular(material->specularColor);
meshPart._material->setShininess(material->shininess);
meshPart._material->setMetallic(glm::length(material->specularColor));
meshPart._material->setGloss(material->shininess);
meshPart._material->setOpacity(material->opacity);
}
// qCDebug(modelformat) << "OBJ Reader part:" << meshPartCount << "name:" << leadFace.groupName << "material:" << groupMaterialName << "diffuse:" << meshPart._material->getDiffuse() << "faces:" << faceGroup.count() << "triangle indices will start with:" << mesh.vertices.count();
@ -544,7 +544,6 @@ void fbxDebugDump(const FBXGeometry& fbxgeo) {
qCDebug(modelformat) << "---------------- fbxGeometry ----------------";
qCDebug(modelformat) << " hasSkeletonJoints =" << fbxgeo.hasSkeletonJoints;
qCDebug(modelformat) << " offset =" << fbxgeo.offset;
qCDebug(modelformat) << " attachments.count() = " << fbxgeo.attachments.count();
qCDebug(modelformat) << " meshes.count() =" << fbxgeo.meshes.count();
foreach (FBXMesh mesh, fbxgeo.meshes) {
qCDebug(modelformat) << " vertices.count() =" << mesh.vertices.count();
@ -567,10 +566,10 @@ void fbxDebugDump(const FBXGeometry& fbxgeo) {
qCDebug(modelformat) << " quadIndices.count() =" << meshPart.quadIndices.count();
qCDebug(modelformat) << " triangleIndices.count() =" << meshPart.triangleIndices.count();
qCDebug(modelformat) << " diffuseColor =" << meshPart.diffuseColor << "mat =" << meshPart._material->getDiffuse();
qCDebug(modelformat) << " specularColor =" << meshPart.specularColor << "mat =" << meshPart._material->getSpecular();
qCDebug(modelformat) << " specularColor =" << meshPart.specularColor << "mat =" << meshPart._material->getMetallic();
qCDebug(modelformat) << " emissiveColor =" << meshPart.emissiveColor << "mat =" << meshPart._material->getEmissive();
qCDebug(modelformat) << " emissiveParams =" << meshPart.emissiveParams;
qCDebug(modelformat) << " shininess =" << meshPart.shininess << "mat =" << meshPart._material->getShininess();
qCDebug(modelformat) << " gloss =" << meshPart.shininess << "mat =" << meshPart._material->getGloss();
qCDebug(modelformat) << " opacity =" << meshPart.opacity << "mat =" << meshPart._material->getOpacity();
qCDebug(modelformat) << " materialID =" << meshPart.materialID;
qCDebug(modelformat) << " diffuse texture =" << meshPart.diffuseTexture.filename;

View file

@ -437,6 +437,8 @@ public:
explicit operator bool() const { return bool(_texture); }
bool operator !() const { return (!_texture); }
bool isValid() const { return bool(_texture); }
};
typedef std::vector<TextureView> TextureViews;

View file

@ -14,7 +14,7 @@ using namespace model;
using namespace gpu;
Material::Material() :
_flags(0),
_key(0),
_schemaBuffer(),
_textureMap() {
@ -26,13 +26,13 @@ Material::Material() :
}
Material::Material(const Material& material) :
_flags(material._flags),
_key(material._key),
_schemaBuffer(material._schemaBuffer),
_textureMap(material._textureMap) {
}
Material& Material::operator= (const Material& material) {
_flags = (material._flags);
_key = (material._key);
_schemaBuffer = (material._schemaBuffer);
_textureMap = (material._textureMap);
@ -43,52 +43,32 @@ Material::~Material() {
}
void Material::setDiffuse(const Color& diffuse) {
if (glm::any(glm::greaterThan(diffuse, Color(0.0f)))) {
_flags.set(DIFFUSE_BIT);
} else {
_flags.reset(DIFFUSE_BIT);
}
_key.setDiffuse(glm::any(glm::greaterThan(diffuse, Color(0.0f))));
_schemaBuffer.edit<Schema>()._diffuse = diffuse;
}
void Material::setSpecular(const Color& specular) {
if (glm::any(glm::greaterThan(specular, Color(0.0f)))) {
_flags.set(SPECULAR_BIT);
} else {
_flags.reset(SPECULAR_BIT);
}
_schemaBuffer.edit<Schema>()._specular = specular;
void Material::setMetallic(float metallic) {
_key.setMetallic(metallic > 0.0f);
_schemaBuffer.edit<Schema>()._metallic = glm::vec3(metallic);
}
void Material::setEmissive(const Color& emissive) {
if (glm::any(glm::greaterThan(emissive, Color(0.0f)))) {
_flags.set(EMISSIVE_BIT);
} else {
_flags.reset(EMISSIVE_BIT);
}
_key.setEmissive(glm::any(glm::greaterThan(emissive, Color(0.0f))));
_schemaBuffer.edit<Schema>()._emissive = emissive;
}
void Material::setShininess(float shininess) {
if (shininess > 0.0f) {
_flags.set(SHININESS_BIT);
} else {
_flags.reset(SHININESS_BIT);
}
_schemaBuffer.edit<Schema>()._shininess = shininess;
void Material::setGloss(float gloss) {
_key.setGloss((gloss > 0.0f));
_schemaBuffer.edit<Schema>()._gloss = gloss;
}
void Material::setOpacity(float opacity) {
if (opacity >= 1.0f) {
_flags.reset(TRANSPARENT_BIT);
} else {
_flags.set(TRANSPARENT_BIT);
}
_key.setTransparent((opacity < 1.0f));
_schemaBuffer.edit<Schema>()._opacity = opacity;
}
void Material::setTextureView(MapChannel channel, const gpu::TextureView& view) {
_flags.set(DIFFUSE_MAP_BIT + channel);
_key.setMapChannel(channel, (view.isValid()));
_textureMap[channel] = view;
}

View file

@ -23,6 +23,177 @@
namespace model {
// Material Key is a coarse trait description of a material used to classify the materials
class MaterialKey {
public:
enum FlagBit {
EMISSIVE_VAL_BIT = 0,
DIFFUSE_VAL_BIT,
METALLIC_VAL_BIT,
GLOSS_VAL_BIT,
TRANSPARENT_VAL_BIT,
EMISSIVE_MAP_BIT,
DIFFUSE_MAP_BIT,
METALLIC_MAP_BIT,
GLOSS_MAP_BIT,
TRANSPARENT_MAP_BIT,
NORMAL_MAP_BIT,
NUM_FLAGS,
};
typedef std::bitset<NUM_FLAGS> Flags;
enum MapChannel {
EMISSIVE_MAP = 0,
DIFFUSE_MAP,
METALLIC_MAP,
GLOSS_MAP,
TRANSPARENT_MAP,
NORMAL_MAP,
NUM_MAP_CHANNELS,
};
// The signature is the Flags
Flags _flags;
MaterialKey() : _flags(0) {}
MaterialKey(const Flags& flags) : _flags(flags) {}
class Builder {
Flags _flags{ 0 };
public:
Builder() {}
MaterialKey build() const { return MaterialKey(_flags); }
Builder& withEmissive() { _flags.set(EMISSIVE_VAL_BIT); return (*this); }
Builder& withDiffuse() { _flags.set(DIFFUSE_VAL_BIT); return (*this); }
Builder& withMetallic() { _flags.set(METALLIC_VAL_BIT); return (*this); }
Builder& withGloss() { _flags.set(GLOSS_VAL_BIT); return (*this); }
Builder& withTransparent() { _flags.set(TRANSPARENT_VAL_BIT); return (*this); }
Builder& withEmissiveMap() { _flags.set(EMISSIVE_MAP_BIT); return (*this); }
Builder& withDiffuseMap() { _flags.set(DIFFUSE_MAP_BIT); return (*this); }
Builder& withMetallicMap() { _flags.set(METALLIC_MAP_BIT); return (*this); }
Builder& withGlossMap() { _flags.set(GLOSS_MAP_BIT); return (*this); }
Builder& withTransparentMap() { _flags.set(TRANSPARENT_MAP_BIT); return (*this); }
Builder& withNormalMap() { _flags.set(NORMAL_MAP_BIT); return (*this); }
// Convenient standard keys that we will keep on using all over the place
static MaterialKey opaqueDiffuse() { return Builder().withDiffuse().build(); }
};
void setEmissive(bool value) { _flags.set(EMISSIVE_VAL_BIT, value); }
bool isEmissive() const { return _flags[EMISSIVE_VAL_BIT]; }
void setEmissiveMap(bool value) { _flags.set(EMISSIVE_MAP_BIT, value); }
bool isEmissiveMap() const { return _flags[EMISSIVE_MAP_BIT]; }
void setDiffuse(bool value) { _flags.set(DIFFUSE_VAL_BIT, value); }
bool isDiffuse() const { return _flags[DIFFUSE_VAL_BIT]; }
void setDiffuseMap(bool value) { _flags.set(DIFFUSE_MAP_BIT, value); }
bool isDiffuseMap() const { return _flags[DIFFUSE_MAP_BIT]; }
void setMetallic(bool value) { _flags.set(METALLIC_VAL_BIT, value); }
bool isMetallic() const { return _flags[METALLIC_VAL_BIT]; }
void setMetallicMap(bool value) { _flags.set(METALLIC_MAP_BIT, value); }
bool isMetallicMap() const { return _flags[METALLIC_MAP_BIT]; }
void setGloss(bool value) { _flags.set(GLOSS_VAL_BIT, value); }
bool isGloss() const { return _flags[GLOSS_VAL_BIT]; }
void setGlossMap(bool value) { _flags.set(GLOSS_MAP_BIT, value); }
bool isGlossMap() const { return _flags[GLOSS_MAP_BIT]; }
void setTransparent(bool value) { _flags.set(TRANSPARENT_VAL_BIT, value); }
bool isTransparent() const { return _flags[TRANSPARENT_VAL_BIT]; }
bool isOpaque() const { return !_flags[TRANSPARENT_VAL_BIT]; }
void setTransparentMap(bool value) { _flags.set(TRANSPARENT_MAP_BIT, value); }
bool isTransparentMap() const { return _flags[TRANSPARENT_MAP_BIT]; }
void setNormalMap(bool value) { _flags.set(NORMAL_MAP_BIT, value); }
bool isNormalMap() const { return _flags[NORMAL_MAP_BIT]; }
void setMapChannel(MapChannel channel, bool value) { _flags.set(EMISSIVE_MAP_BIT + channel, value); }
bool isMapChannel(MapChannel channel) const { return _flags[EMISSIVE_MAP_BIT + channel]; }
};
class MaterialFilter {
public:
MaterialKey::Flags _value{ 0 };
MaterialKey::Flags _mask{ 0 };
MaterialFilter(const MaterialKey::Flags& value = MaterialKey::Flags(0), const MaterialKey::Flags& mask = MaterialKey::Flags(0)) : _value(value), _mask(mask) {}
class Builder {
MaterialKey::Flags _value{ 0 };
MaterialKey::Flags _mask{ 0 };
public:
Builder() {}
MaterialFilter build() const { return MaterialFilter(_value, _mask); }
Builder& withoutEmissive() { _value.reset(MaterialKey::EMISSIVE_VAL_BIT); _mask.set(MaterialKey::EMISSIVE_VAL_BIT); return (*this); }
Builder& withEmissive() { _value.set(MaterialKey::EMISSIVE_VAL_BIT); _mask.set(MaterialKey::EMISSIVE_VAL_BIT); return (*this); }
Builder& withoutEmissiveMap() { _value.reset(MaterialKey::EMISSIVE_MAP_BIT); _mask.set(MaterialKey::EMISSIVE_MAP_BIT); return (*this); }
Builder& withEmissiveMap() { _value.set(MaterialKey::EMISSIVE_MAP_BIT); _mask.set(MaterialKey::EMISSIVE_MAP_BIT); return (*this); }
Builder& withoutDiffuse() { _value.reset(MaterialKey::DIFFUSE_VAL_BIT); _mask.set(MaterialKey::DIFFUSE_VAL_BIT); return (*this); }
Builder& withDiffuse() { _value.set(MaterialKey::DIFFUSE_VAL_BIT); _mask.set(MaterialKey::DIFFUSE_VAL_BIT); return (*this); }
Builder& withoutDiffuseMap() { _value.reset(MaterialKey::DIFFUSE_MAP_BIT); _mask.set(MaterialKey::DIFFUSE_MAP_BIT); return (*this); }
Builder& withDiffuseMap() { _value.set(MaterialKey::DIFFUSE_MAP_BIT); _mask.set(MaterialKey::DIFFUSE_MAP_BIT); return (*this); }
Builder& withoutMetallic() { _value.reset(MaterialKey::METALLIC_VAL_BIT); _mask.set(MaterialKey::METALLIC_VAL_BIT); return (*this); }
Builder& withMetallic() { _value.set(MaterialKey::METALLIC_VAL_BIT); _mask.set(MaterialKey::METALLIC_VAL_BIT); return (*this); }
Builder& withoutMetallicMap() { _value.reset(MaterialKey::METALLIC_MAP_BIT); _mask.set(MaterialKey::METALLIC_MAP_BIT); return (*this); }
Builder& withMetallicMap() { _value.set(MaterialKey::METALLIC_MAP_BIT); _mask.set(MaterialKey::METALLIC_MAP_BIT); return (*this); }
Builder& withoutGloss() { _value.reset(MaterialKey::GLOSS_VAL_BIT); _mask.set(MaterialKey::GLOSS_VAL_BIT); return (*this); }
Builder& withGloss() { _value.set(MaterialKey::GLOSS_VAL_BIT); _mask.set(MaterialKey::GLOSS_VAL_BIT); return (*this); }
Builder& withoutGlossMap() { _value.reset(MaterialKey::GLOSS_MAP_BIT); _mask.set(MaterialKey::GLOSS_MAP_BIT); return (*this); }
Builder& withGlossMap() { _value.set(MaterialKey::GLOSS_MAP_BIT); _mask.set(MaterialKey::GLOSS_MAP_BIT); return (*this); }
Builder& withoutTransparent() { _value.reset(MaterialKey::TRANSPARENT_VAL_BIT); _mask.set(MaterialKey::TRANSPARENT_VAL_BIT); return (*this); }
Builder& withTransparent() { _value.set(MaterialKey::TRANSPARENT_VAL_BIT); _mask.set(MaterialKey::TRANSPARENT_VAL_BIT); return (*this); }
Builder& withoutTransparentMap() { _value.reset(MaterialKey::TRANSPARENT_MAP_BIT); _mask.set(MaterialKey::TRANSPARENT_MAP_BIT); return (*this); }
Builder& withTransparentMap() { _value.set(MaterialKey::TRANSPARENT_MAP_BIT); _mask.set(MaterialKey::TRANSPARENT_MAP_BIT); return (*this); }
Builder& withoutNormalMap() { _value.reset(MaterialKey::NORMAL_MAP_BIT); _mask.set(MaterialKey::NORMAL_MAP_BIT); return (*this); }
Builder& withNormalMap() { _value.set(MaterialKey::NORMAL_MAP_BIT); _mask.set(MaterialKey::NORMAL_MAP_BIT); return (*this); }
// Convenient standard keys that we will keep on using all over the place
static MaterialFilter opaqueDiffuse() { return Builder().withDiffuse().withoutTransparent().build(); }
};
// Item Filter operator testing if a key pass the filter
bool test(const MaterialKey& key) const { return (key._flags & _mask) == (_value & _mask); }
class Less {
public:
bool operator() (const MaterialFilter& left, const MaterialFilter& right) const {
if (left._value.to_ulong() == right._value.to_ulong()) {
return left._mask.to_ulong() < right._mask.to_ulong();
} else {
return left._value.to_ulong() < right._value.to_ulong();
}
}
};
};
class Material {
public:
typedef gpu::BufferView UniformBufferView;
@ -30,52 +201,27 @@ public:
typedef glm::vec3 Color;
enum MapChannel {
DIFFUSE_MAP = 0,
SPECULAR_MAP,
SHININESS_MAP,
EMISSIVE_MAP,
OPACITY_MAP,
NORMAL_MAP,
NUM_MAPS,
};
typedef MaterialKey::MapChannel MapChannel;
typedef std::map<MapChannel, TextureView> TextureMap;
typedef std::bitset<NUM_MAPS> MapFlags;
enum FlagBit {
DIFFUSE_BIT = 0,
SPECULAR_BIT,
SHININESS_BIT,
EMISSIVE_BIT,
TRANSPARENT_BIT,
DIFFUSE_MAP_BIT,
SPECULAR_MAP_BIT,
SHININESS_MAP_BIT,
EMISSIVE_MAP_BIT,
OPACITY_MAP_BIT,
NORMAL_MAP_BIT,
NUM_FLAGS,
};
typedef std::bitset<NUM_FLAGS> Flags;
typedef std::bitset<MaterialKey::NUM_MAP_CHANNELS> MapFlags;
Material();
Material(const Material& material);
Material& operator= (const Material& material);
virtual ~Material();
const MaterialKey& getKey() const { return _key; }
const Color& getEmissive() const { return _schemaBuffer.get<Schema>()._emissive; }
const Color& getDiffuse() const { return _schemaBuffer.get<Schema>()._diffuse; }
const Color& getSpecular() const { return _schemaBuffer.get<Schema>()._specular; }
float getShininess() const { return _schemaBuffer.get<Schema>()._shininess; }
float getMetallic() const { return _schemaBuffer.get<Schema>()._metallic.x; }
float getGloss() const { return _schemaBuffer.get<Schema>()._gloss; }
float getOpacity() const { return _schemaBuffer.get<Schema>()._opacity; }
void setDiffuse(const Color& diffuse);
void setSpecular(const Color& specular);
void setEmissive(const Color& emissive);
void setShininess(float shininess);
void setDiffuse(const Color& diffuse);
void setMetallic(float metallic);
void setGloss(float gloss);
void setOpacity(float opacity);
// Schema to access the attribute values of the material
@ -84,8 +230,8 @@ public:
Color _diffuse{0.5f};
float _opacity{1.f};
Color _specular{0.03f};
float _shininess{0.1f};
Color _metallic{0.03f};
float _gloss{0.1f};
Color _emissive{0.0f};
float _spare0{0.0f};
glm::vec4 _spareVec4{0.0f}; // for alignment beauty, Material size == Mat4x4
@ -100,7 +246,7 @@ public:
protected:
Flags _flags;
MaterialKey _key;
UniformBufferView _schemaBuffer;
TextureMap _textureMap;

View file

@ -44,70 +44,73 @@ void Skybox::setCubemap(const gpu::TexturePointer& cubemap) {
void Skybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum, const Skybox& skybox) {
if (skybox.getCubemap() && skybox.getCubemap()->isDefined()) {
if (skybox.getCubemap()) {
if (skybox.getCubemap()->isDefined()) {
static gpu::PipelinePointer thePipeline;
static gpu::BufferPointer theBuffer;
static gpu::Stream::FormatPointer theFormat;
static gpu::BufferPointer theConstants;
int SKYBOX_CONSTANTS_SLOT = 0; // need to be defined by the compilation of the shader
if (!thePipeline) {
auto skyVS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(Skybox_vert)));
auto skyFS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(Skybox_frag)));
auto skyShader = gpu::ShaderPointer(gpu::Shader::createProgram(skyVS, skyFS));
static gpu::PipelinePointer thePipeline;
static gpu::BufferPointer theBuffer;
static gpu::Stream::FormatPointer theFormat;
static gpu::BufferPointer theConstants;
int SKYBOX_CONSTANTS_SLOT = 0; // need to be defined by the compilation of the shader
if (!thePipeline) {
auto skyVS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(Skybox_vert)));
auto skyFS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(Skybox_frag)));
auto skyShader = gpu::ShaderPointer(gpu::Shader::createProgram(skyVS, skyFS));
gpu::Shader::BindingSet bindings;
bindings.insert(gpu::Shader::Binding(std::string("cubeMap"), 0));
if (!gpu::Shader::makeProgram(*skyShader, bindings)) {
gpu::Shader::BindingSet bindings;
bindings.insert(gpu::Shader::Binding(std::string("cubeMap"), 0));
if (!gpu::Shader::makeProgram(*skyShader, bindings)) {
}
}
SKYBOX_CONSTANTS_SLOT = skyShader->getBuffers().findLocation("skyboxBuffer");
if (SKYBOX_CONSTANTS_SLOT == gpu::Shader::INVALID_LOCATION) {
SKYBOX_CONSTANTS_SLOT = skyShader->getUniforms().findLocation("skyboxBuffer");
}
SKYBOX_CONSTANTS_SLOT = skyShader->getBuffers().findLocation("skyboxBuffer");
if (SKYBOX_CONSTANTS_SLOT == gpu::Shader::INVALID_LOCATION) {
SKYBOX_CONSTANTS_SLOT = skyShader->getUniforms().findLocation("skyboxBuffer");
}
auto skyState = gpu::StatePointer(new gpu::State());
auto skyState = gpu::StatePointer(new gpu::State());
thePipeline = gpu::PipelinePointer(gpu::Pipeline::create(skyShader, skyState));
thePipeline = gpu::PipelinePointer(gpu::Pipeline::create(skyShader, skyState));
const float CLIP = 1.0;
const glm::vec2 vertices[4] = { {-CLIP, -CLIP}, {CLIP, -CLIP}, {-CLIP, CLIP}, {CLIP, CLIP}};
theBuffer.reset(new gpu::Buffer(sizeof(vertices), (const gpu::Byte*) vertices));
const float CLIP = 1.0;
const glm::vec2 vertices[4] = { {-CLIP, -CLIP}, {CLIP, -CLIP}, {-CLIP, CLIP}, {CLIP, CLIP}};
theBuffer.reset(new gpu::Buffer(sizeof(vertices), (const gpu::Byte*) vertices));
theFormat.reset(new gpu::Stream::Format());
theFormat->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::XYZ));
theFormat.reset(new gpu::Stream::Format());
theFormat->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::XYZ));
auto color = glm::vec4(1.0f);
theConstants.reset(new gpu::Buffer(sizeof(color), (const gpu::Byte*) &color));
auto color = glm::vec4(1.0f);
theConstants.reset(new gpu::Buffer(sizeof(color), (const gpu::Byte*) &color));
}
glm::mat4 projMat;
viewFrustum.evalProjectionMatrix(projMat);
Transform viewTransform;
viewFrustum.evalViewTransform(viewTransform);
if (glm::all(glm::equal(skybox.getColor(), glm::vec3(0.0f)))) {
auto color = glm::vec4(1.0f);
theConstants->setSubData(0, sizeof(color), (const gpu::Byte*) &color);
} else {
theConstants->setSubData(0, sizeof(Color), (const gpu::Byte*) &skybox.getColor());
}
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewTransform);
batch.setModelTransform(Transform()); // only for Mac
batch.setPipeline(thePipeline);
batch.setInputBuffer(gpu::Stream::POSITION, theBuffer, 0, 8);
batch.setUniformBuffer(SKYBOX_CONSTANTS_SLOT, theConstants, 0, theConstants->getSize());
batch.setInputFormat(theFormat);
batch.setUniformTexture(0, skybox.getCubemap());
batch.draw(gpu::TRIANGLE_STRIP, 4);
}
glm::mat4 projMat;
viewFrustum.evalProjectionMatrix(projMat);
Transform viewTransform;
viewFrustum.evalViewTransform(viewTransform);
if (glm::all(glm::equal(skybox.getColor(), glm::vec3(0.0f)))) {
auto color = glm::vec4(1.0f);
theConstants->setSubData(0, sizeof(color), (const gpu::Byte*) &color);
} else {
theConstants->setSubData(0, sizeof(Color), (const gpu::Byte*) &skybox.getColor());
}
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewTransform);
batch.setModelTransform(Transform()); // only for Mac
batch.setPipeline(thePipeline);
batch.setInputBuffer(gpu::Stream::POSITION, theBuffer, 0, 8);
batch.setUniformBuffer(SKYBOX_CONSTANTS_SLOT, theConstants, 0, theConstants->getSize());
batch.setInputFormat(theFormat);
batch.setUniformTexture(0, skybox.getCubemap());
batch.draw(gpu::TRIANGLE_STRIP, 4);
} else {
// skybox has no cubemap, just clear the color buffer
auto color = skybox.getColor();
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(color, 1.0f), 0.f, 0);
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(color, 0.0f), 0.f, 0);
}
}

View file

@ -24,7 +24,7 @@ typedef glm::vec3 Color;
class TextureUsage {
public:
gpu::Texture::Type _type{ gpu::Texture::TEX_2D };
Material::MapFlags _materialUsage{ Material::DIFFUSE_MAP };
Material::MapFlags _materialUsage{ MaterialKey::DIFFUSE_MAP };
int _environmentUsage = 0;
};

View file

@ -35,6 +35,7 @@ AddressManager::AddressManager() :
_positionGetter(NULL),
_orientationGetter(NULL)
{
}
bool AddressManager::isConnected() {
@ -415,6 +416,9 @@ bool AddressManager::handleViewpoint(const QString& viewpointString, bool should
positionRegex.cap(2).toFloat(),
positionRegex.cap(3).toFloat());
// we're about to jump positions - store the current address in our history
addCurrentAddressToHistory();
if (!isNaN(newPosition.x) && !isNaN(newPosition.y) && !isNaN(newPosition.z)) {
glm::quat newOrientation;
@ -467,6 +471,10 @@ bool AddressManager::handleUsername(const QString& lookupString) {
void AddressManager::setHost(const QString& host) {
if (host != _host) {
// if the host is being changed we should store current address in the history
addCurrentAddressToHistory();
_host = host;
emit hostChanged(_host);
}
@ -474,7 +482,8 @@ void AddressManager::setHost(const QString& host) {
void AddressManager::setDomainInfo(const QString& hostname, quint16 port) {
_host = hostname;
setHost(hostname);
_rootPlaceID = QUuid();
qCDebug(networking) << "Possible domain change required to connect to domain at" << hostname << "on" << port;
@ -500,3 +509,22 @@ void AddressManager::copyAddress() {
void AddressManager::copyPath() {
QApplication::clipboard()->setText(currentPath());
}
void AddressManager::addCurrentAddressToHistory() {
if (_lastHistoryAppend == 0) {
// we don't store the first address on application load
// just update the last append time so the next is stored
_lastHistoryAppend = usecTimestampNow();
} else {
const quint64 DOUBLE_STORE_THRESHOLD_USECS = 500000;
// avoid double storing when the host changes and the viewpoint changes immediately after
if (usecTimestampNow() - _lastHistoryAppend > DOUBLE_STORE_THRESHOLD_USECS) {
// add the current address to the history
_history.append(currentAddress());
// change our last history append to now
_lastHistoryAppend = usecTimestampNow();
}
}
}

View file

@ -98,10 +98,15 @@ private:
bool handleUsername(const QString& lookupString);
bool handleDomainID(const QString& host);
void addCurrentAddressToHistory();
QString _host;
QUuid _rootPlaceID;
PositionGetter _positionGetter;
OrientationGetter _orientationGetter;
QList<QUrl> _history;
quint64 _lastHistoryAppend = 0;
};
#endif // hifi_AddressManager_h

View file

@ -605,9 +605,6 @@ const int NUM_BYTES_STUN_HEADER = 20;
void LimitedNodeList::sendSTUNRequest() {
static quint64 lastTimeStamp = usecTimestampNow();
lastTimeStamp = usecTimestampNow();
const int NUM_INITIAL_STUN_REQUESTS_BEFORE_FAIL = 10;
if (!_hasCompletedInitialSTUN) {

View file

@ -180,10 +180,6 @@ btCollisionShape* EntityMotionState::computeNewShape() {
return nullptr;
}
// RELIABLE_SEND_HACK: until we have truly reliable resends of non-moving updates
// we alwasy resend packets for objects that have stopped moving up to some max limit.
const int MAX_NUM_NON_MOVING_UPDATES = 5;
bool EntityMotionState::isCandidateForOwnership(const QUuid& sessionID) const {
if (!_body || !_entity) {
return false;
@ -495,6 +491,10 @@ void EntityMotionState::measureBodyAcceleration() {
glm::vec3 velocity = bulletToGLM(_body->getLinearVelocity());
_measuredAcceleration = (velocity / powf(1.0f - _body->getLinearDamping(), dt) - _lastVelocity) * invDt;
_lastVelocity = velocity;
if (numSubsteps > PHYSICS_ENGINE_MAX_NUM_SUBSTEPS && !_candidateForOwnership) {
_loopsSinceOwnershipBid = 0;
_loopsWithoutOwner = 0;
}
}
}
glm::vec3 EntityMotionState::getObjectLinearVelocityChange() const {

View file

@ -24,7 +24,14 @@ ObjectAction::~ObjectAction() {
}
void ObjectAction::updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep) {
qDebug() << "ObjectAction::updateAction called";
if (!_active) {
return;
}
if (!_ownerEntity) {
qDebug() << "ObjectActionPullToPoint::updateAction no owner entity";
return;
}
updateActionWorker(deltaTimeStep);
}
void ObjectAction::debugDraw(btIDebugDraw* debugDrawer) {
@ -33,3 +40,87 @@ void ObjectAction::debugDraw(btIDebugDraw* debugDrawer) {
void ObjectAction::removeFromSimulation(EntitySimulation* simulation) const {
simulation->removeAction(_id);
}
btRigidBody* ObjectAction::getRigidBody() {
if (!_ownerEntity) {
return nullptr;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
return nullptr;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
return motionState->getRigidBody();
}
glm::vec3 ObjectAction::getPosition() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getCenterOfMassPosition());
}
void ObjectAction::setPosition(glm::vec3 position) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
// XXX
// void setWorldTransform (const btTransform &worldTrans)
assert(false);
rigidBody->activate();
}
glm::quat ObjectAction::getRotation() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::quat(0.0f, 0.0f, 0.0f, 1.0f);
}
return bulletToGLM(rigidBody->getOrientation());
}
void ObjectAction::setRotation(glm::quat rotation) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
// XXX
// void setWorldTransform (const btTransform &worldTrans)
assert(false);
rigidBody->activate();
}
glm::vec3 ObjectAction::getLinearVelocity() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getLinearVelocity());
}
void ObjectAction::setLinearVelocity(glm::vec3 linearVelocity) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
rigidBody->setLinearVelocity(glmToBullet(glm::vec3(0.0f)));
rigidBody->activate();
}
glm::vec3 ObjectAction::getAngularVelocity() {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return glm::vec3(0.0f);
}
return bulletToGLM(rigidBody->getAngularVelocity());
}
void ObjectAction::setAngularVelocity(glm::vec3 angularVelocity) {
auto rigidBody = getRigidBody();
if (!rigidBody) {
return;
}
rigidBody->setAngularVelocity(glmToBullet(angularVelocity));
rigidBody->activate();
}

View file

@ -13,12 +13,17 @@
#ifndef hifi_ObjectAction_h
#define hifi_ObjectAction_h
#include <btBulletDynamicsCommon.h>
#include <QUuid>
#include <btBulletDynamicsCommon.h>
#include <EntityItem.h>
#include "ObjectMotionState.h"
#include "BulletUtil.h"
#include "EntityActionInterface.h"
class ObjectAction : public btActionInterface, public EntityActionInterface {
public:
ObjectAction(QUuid id, EntityItemPointer ownerEntity);
@ -30,6 +35,9 @@ public:
virtual void setOwnerEntity(const EntityItemPointer ownerEntity) { _ownerEntity = ownerEntity; }
virtual bool updateArguments(QVariantMap arguments) { return false; }
// this is called from updateAction and should be overridden by subclasses
virtual void updateActionWorker(float deltaTimeStep) {}
// these are from btActionInterface
virtual void updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep);
virtual void debugDraw(btIDebugDraw* debugDrawer);
@ -39,6 +47,16 @@ private:
QReadWriteLock _lock;
protected:
virtual btRigidBody* getRigidBody();
virtual glm::vec3 getPosition();
virtual void setPosition(glm::vec3 position);
virtual glm::quat getRotation();
virtual void setRotation(glm::quat rotation);
virtual glm::vec3 getLinearVelocity();
virtual void setLinearVelocity(glm::vec3 linearVelocity);
virtual glm::vec3 getAngularVelocity();
virtual void setAngularVelocity(glm::vec3 angularVelocity);
bool tryLockForRead() { return _lock.tryLockForRead(); }
void lockForWrite() { _lock.lockForWrite(); }
void unlock() { _lock.unlock(); }

View file

@ -9,9 +9,6 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ObjectMotionState.h"
#include "BulletUtil.h"
#include "ObjectActionPullToPoint.h"
ObjectActionPullToPoint::ObjectActionPullToPoint(QUuid id, EntityItemPointer ownerEntity) :
@ -27,28 +24,34 @@ ObjectActionPullToPoint::~ObjectActionPullToPoint() {
#endif
}
void ObjectActionPullToPoint::updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep) {
void ObjectActionPullToPoint::updateActionWorker(btScalar deltaTimeStep) {
if (!tryLockForRead()) {
// don't risk hanging the thread running the physics simulation
return;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (_active && physicsInfo) {
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (rigidBody) {
glm::vec3 offset = _target - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * _speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3()));
}
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
unlock();
return;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (!rigidBody) {
unlock();
return;
}
glm::vec3 offset = _target - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * _speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3()));
}
unlock();
}

View file

@ -23,7 +23,7 @@ public:
virtual ~ObjectActionPullToPoint();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateAction(btCollisionWorld* collisionWorld, btScalar deltaTimeStep);
virtual void updateActionWorker(float deltaTimeStep);
private:

View file

@ -0,0 +1,144 @@
//
// ObjectActionSpring.cpp
// libraries/physics/src
//
// Created by Seth Alves 2015-6-5
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "ObjectActionSpring.h"
ObjectActionSpring::ObjectActionSpring(QUuid id, EntityItemPointer ownerEntity) :
ObjectAction(id, ownerEntity) {
#if WANT_DEBUG
qDebug() << "ObjectActionSpring::ObjectActionSpring";
#endif
}
ObjectActionSpring::~ObjectActionSpring() {
#if WANT_DEBUG
qDebug() << "ObjectActionSpring::~ObjectActionSpring";
#endif
}
void ObjectActionSpring::updateActionWorker(btScalar deltaTimeStep) {
if (!tryLockForRead()) {
// don't risk hanging the thread running the physics simulation
qDebug() << "ObjectActionSpring::updateActionWorker lock failed";
return;
}
void* physicsInfo = _ownerEntity->getPhysicsInfo();
if (!physicsInfo) {
unlock();
return;
}
ObjectMotionState* motionState = static_cast<ObjectMotionState*>(physicsInfo);
btRigidBody* rigidBody = motionState->getRigidBody();
if (!rigidBody) {
unlock();
qDebug() << "ObjectActionSpring::updateActionWorker no rigidBody";
return;
}
// handle the linear part
if (_positionalTargetSet) {
glm::vec3 offset = _positionalTarget - bulletToGLM(rigidBody->getCenterOfMassPosition());
float offsetLength = glm::length(offset);
float speed = offsetLength / _linearTimeScale;
if (offsetLength > IGNORE_POSITION_DELTA) {
glm::vec3 newVelocity = glm::normalize(offset) * speed;
rigidBody->setLinearVelocity(glmToBullet(newVelocity));
rigidBody->activate();
} else {
rigidBody->setLinearVelocity(glmToBullet(glm::vec3(0.0f)));
}
}
// handle rotation
if (_rotationalTargetSet) {
glm::quat bodyRotation = bulletToGLM(rigidBody->getOrientation());
// if qZero and qOne are too close to each other, we can get NaN for angle.
auto alignmentDot = glm::dot(bodyRotation, _rotationalTarget);
const float almostOne = 0.99999f;
if (glm::abs(alignmentDot) < almostOne) {
glm::quat target = _rotationalTarget;
if (alignmentDot < 0) {
target = -target;
}
glm::quat qZeroInverse = glm::inverse(bodyRotation);
glm::quat deltaQ = target * qZeroInverse;
glm::vec3 axis = glm::axis(deltaQ);
float angle = glm::angle(deltaQ);
assert(!isNaN(angle));
glm::vec3 newAngularVelocity = (angle / _angularTimeScale) * glm::normalize(axis);
rigidBody->setAngularVelocity(glmToBullet(newAngularVelocity));
rigidBody->activate();
} else {
rigidBody->setAngularVelocity(glmToBullet(glm::vec3(0.0f)));
}
}
unlock();
}
bool ObjectActionSpring::updateArguments(QVariantMap arguments) {
// targets are required, spring-constants are optional
bool ptOk = true;
glm::vec3 positionalTarget =
EntityActionInterface::extractVec3Argument("spring action", arguments, "targetPosition", ptOk, false);
bool pscOk = true;
float linearTimeScale =
EntityActionInterface::extractFloatArgument("spring action", arguments, "linearTimeScale", pscOk, false);
if (ptOk && pscOk && linearTimeScale <= 0.0f) {
qDebug() << "spring action -- linearTimeScale must be greater than zero.";
return false;
}
bool rtOk = true;
glm::quat rotationalTarget =
EntityActionInterface::extractQuatArgument("spring action", arguments, "targetRotation", rtOk, false);
bool rscOk = true;
float angularTimeScale =
EntityActionInterface::extractFloatArgument("spring action", arguments, "angularTimeScale", rscOk, false);
if (!ptOk && !rtOk) {
qDebug() << "spring action requires either targetPosition or targetRotation argument";
return false;
}
lockForWrite();
_positionalTargetSet = _rotationalTargetSet = false;
if (ptOk) {
_positionalTarget = positionalTarget;
_positionalTargetSet = true;
if (pscOk) {
_linearTimeScale = linearTimeScale;
} else {
_linearTimeScale = 0.1f;
}
}
if (rtOk) {
_rotationalTarget = rotationalTarget;
_rotationalTargetSet = true;
if (rscOk) {
_angularTimeScale = angularTimeScale;
} else {
_angularTimeScale = 0.1f;
}
}
_active = true;
unlock();
return true;
}

View file

@ -0,0 +1,39 @@
//
// ObjectActionSpring.h
// libraries/physics/src
//
// Created by Seth Alves 2015-6-5
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ObjectActionSpring_h
#define hifi_ObjectActionSpring_h
#include <QUuid>
#include <EntityItem.h>
#include "ObjectAction.h"
class ObjectActionSpring : public ObjectAction {
public:
ObjectActionSpring(QUuid id, EntityItemPointer ownerEntity);
virtual ~ObjectActionSpring();
virtual bool updateArguments(QVariantMap arguments);
virtual void updateActionWorker(float deltaTimeStep);
protected:
glm::vec3 _positionalTarget;
float _linearTimeScale;
bool _positionalTargetSet;
glm::quat _rotationalTarget;
float _angularTimeScale;
bool _rotationalTargetSet;
};
#endif // hifi_ObjectActionSpring_h

View file

@ -53,8 +53,6 @@ const uint32_t OUTGOING_DIRTY_PHYSICS_FLAGS = EntityItem::DIRTY_TRANSFORM | Enti
class OctreeEditPacketSender;
class PhysicsEngine;
extern const int MAX_NUM_NON_MOVING_UPDATES;
class ObjectMotionState : public btMotionState {
public:
// These poroperties of the PhysicsEngine are "global" within the context of all ObjectMotionStates

View file

@ -9,10 +9,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "PhysicsHelpers.h"
#include "PhysicsLogging.h"
#include "ShapeManager.h"
#include "ObjectActionPullToPoint.h"
#include "PhysicalEntitySimulation.h"
@ -234,29 +235,6 @@ void PhysicalEntitySimulation::handleCollisionEvents(CollisionEvents& collisionE
}
}
EntityActionPointer PhysicalEntitySimulation::actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments) {
EntityActionPointer action = nullptr;
switch (type) {
case ACTION_TYPE_NONE:
return nullptr;
case ACTION_TYPE_PULL_TO_POINT:
action = (EntityActionPointer) new ObjectActionPullToPoint(id, ownerEntity);
break;
}
bool ok = action->updateArguments(arguments);
if (ok) {
ownerEntity->addAction(this, action);
return action;
}
action = nullptr;
return action;
}
void PhysicalEntitySimulation::applyActionChanges() {
if (_physicsEngine) {
foreach (EntityActionPointer actionToAdd, _actionsToAdd) {

View file

@ -32,10 +32,6 @@ public:
void init(EntityTree* tree, PhysicsEngine* engine, EntityEditPacketSender* packetSender);
virtual EntityActionPointer actionFactory(EntityActionType type,
QUuid id,
EntityItemPointer ownerEntity,
QVariantMap arguments);
virtual void applyActionChanges();
protected: // only called by EntitySimulation

View file

@ -227,8 +227,7 @@ void PhysicsEngine::stepSimulation() {
// (3) synchronize outgoing motion states
// (4) send outgoing packets
const int MAX_NUM_SUBSTEPS = 4;
const float MAX_TIMESTEP = (float)MAX_NUM_SUBSTEPS * PHYSICS_ENGINE_FIXED_SUBSTEP;
const float MAX_TIMESTEP = (float)PHYSICS_ENGINE_MAX_NUM_SUBSTEPS * PHYSICS_ENGINE_FIXED_SUBSTEP;
float dt = 1.0e-6f * (float)(_clock.getTimeMicroseconds());
_clock.reset();
float timeStep = btMin(dt, MAX_TIMESTEP);
@ -245,7 +244,7 @@ void PhysicsEngine::stepSimulation() {
_characterController->preSimulation(timeStep);
}
int numSubsteps = _dynamicsWorld->stepSimulation(timeStep, MAX_NUM_SUBSTEPS, PHYSICS_ENGINE_FIXED_SUBSTEP);
int numSubsteps = _dynamicsWorld->stepSimulation(timeStep, PHYSICS_ENGINE_MAX_NUM_SUBSTEPS, PHYSICS_ENGINE_FIXED_SUBSTEP);
if (numSubsteps > 0) {
BT_PROFILE("postSimulation");
_numSubsteps += (uint32_t)numSubsteps;

View file

@ -78,8 +78,8 @@ Model::Model(QObject* parent) :
_showTrueJointTransforms(true),
_lodDistance(0.0f),
_pupilDilation(0.0f),
_isVisible(true),
_url("http://invalid.com"),
_isVisible(true),
_blendNumber(0),
_appliedBlendNumber(0),
_calculatedMeshPartBoxesValid(false),
@ -405,9 +405,6 @@ void Model::reset() {
if (_jointStates.isEmpty()) {
return;
}
foreach (Model* attachment, _attachments) {
attachment->reset();
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
for (int i = 0; i < _jointStates.size(); i++) {
_jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation, 0.0f);
@ -419,14 +416,7 @@ void Model::reset() {
}
bool Model::updateGeometry() {
// NOTE: this is a recursive call that walks all attachments, and their attachments
bool needFullUpdate = false;
for (int i = 0; i < _attachments.size(); i++) {
Model* model = _attachments.at(i);
if (model->updateGeometry()) {
needFullUpdate = true;
}
}
bool needToRebuild = false;
if (_nextGeometry) {
@ -445,6 +435,7 @@ bool Model::updateGeometry() {
QSharedPointer<NetworkGeometry> geometry = _geometry->getLODOrFallback(_lodDistance, _lodHysteresis);
if (_geometry != geometry) {
// NOTE: it is theoretically impossible to reach here after passing through the applyNextGeometry() call above.
// Which means we don't need to worry about calling deleteGeometry() below immediately after creating new geometry.
@ -499,12 +490,6 @@ bool Model::updateGeometry() {
}
_blendedVertexBuffers.push_back(buffer);
}
foreach (const FBXAttachment& attachment, fbxGeometry.attachments) {
Model* model = new Model(this);
model->init();
model->setURL(attachment.url);
_attachments.append(model);
}
needFullUpdate = true;
}
return needFullUpdate;
@ -827,70 +812,43 @@ void Model::renderSetup(RenderArgs* args) {
}
class TransparentMeshPart {
class MeshPartPayload {
public:
TransparentMeshPart(Model* model, int meshIndex, int partIndex) : model(model), meshIndex(meshIndex), partIndex(partIndex) { }
typedef render::Payload<TransparentMeshPart> Payload;
MeshPartPayload(bool transparent, Model* model, int meshIndex, int partIndex) :
transparent(transparent), model(model), url(model->getURL()), meshIndex(meshIndex), partIndex(partIndex) { }
typedef render::Payload<MeshPartPayload> Payload;
typedef Payload::DataPointer Pointer;
Model* model;
bool transparent;
Model* model;
QUrl url;
int meshIndex;
int partIndex;
};
namespace render {
template <> const ItemKey payloadGetKey(const TransparentMeshPart::Pointer& payload) {
template <> const ItemKey payloadGetKey(const MeshPartPayload::Pointer& payload) {
if (!payload->model->isVisible()) {
return ItemKey::Builder().withInvisible().build();
}
return ItemKey::Builder::transparentShape();
return payload->transparent ? ItemKey::Builder::transparentShape() : ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const TransparentMeshPart::Pointer& payload) {
template <> const Item::Bound payloadGetBound(const MeshPartPayload::Pointer& payload) {
if (payload) {
return payload->model->getPartBounds(payload->meshIndex, payload->partIndex);
}
return render::Item::Bound();
}
template <> void payloadRender(const TransparentMeshPart::Pointer& payload, RenderArgs* args) {
template <> void payloadRender(const MeshPartPayload::Pointer& payload, RenderArgs* args) {
if (args) {
return payload->model->renderPart(args, payload->meshIndex, payload->partIndex, true);
return payload->model->renderPart(args, payload->meshIndex, payload->partIndex, payload->transparent);
}
}
}
class OpaqueMeshPart {
public:
OpaqueMeshPart(Model* model, int meshIndex, int partIndex) : model(model), meshIndex(meshIndex), partIndex(partIndex) { }
typedef render::Payload<OpaqueMeshPart> Payload;
typedef Payload::DataPointer Pointer;
Model* model;
int meshIndex;
int partIndex;
};
namespace render {
template <> const ItemKey payloadGetKey(const OpaqueMeshPart::Pointer& payload) {
if (!payload->model->isVisible()) {
return ItemKey::Builder().withInvisible().build();
}
return ItemKey::Builder::opaqueShape();
}
template <> const Item::Bound payloadGetBound(const OpaqueMeshPart::Pointer& payload) {
if (payload) {
Item::Bound result = payload->model->getPartBounds(payload->meshIndex, payload->partIndex);
//qDebug() << "payloadGetBound(OpaqueMeshPart) " << result;
return result;
}
return render::Item::Bound();
}
template <> void payloadRender(const OpaqueMeshPart::Pointer& payload, RenderArgs* args) {
if (args) {
return payload->model->renderPart(args, payload->meshIndex, payload->partIndex, false);
}
}
/* template <> const model::MaterialKey& shapeGetMaterialKey(const MeshPartPayload::Pointer& payload) {
return payload->model->getPartMaterial(payload->meshIndex, payload->partIndex);
}*/
}
void Model::setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scene) {
@ -913,26 +871,19 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChan
bool somethingAdded = false;
qDebug() << "Model::addToScene : " << this->getURL().toString();
// allow the attachments to add to scene
foreach (Model* attachment, _attachments) {
bool attachementSomethingAdded = attachment->addToScene(scene, pendingChanges);
somethingAdded = somethingAdded || attachementSomethingAdded;
}
foreach (auto renderItem, _transparentRenderItems) {
auto item = scene->allocateID();
auto renderData = TransparentMeshPart::Pointer(renderItem);
auto renderPayload = render::PayloadPointer(new TransparentMeshPart::Payload(renderData));
auto renderData = MeshPartPayload::Pointer(renderItem);
auto renderPayload = render::PayloadPointer(new MeshPartPayload::Payload(renderData));
pendingChanges.resetItem(item, renderPayload);
_renderItems.insert(item, renderPayload);
somethingAdded = true;
}
foreach (auto renderItem, _opaqueRenderItems) {
auto item = scene->allocateID();
auto renderData = OpaqueMeshPart::Pointer(renderItem);
auto renderPayload = render::PayloadPointer(new OpaqueMeshPart::Payload(renderData));
auto renderData = MeshPartPayload::Pointer(renderItem);
auto renderPayload = render::PayloadPointer(new MeshPartPayload::Payload(renderData));
pendingChanges.resetItem(item, renderPayload);
_renderItems.insert(item, renderPayload);
somethingAdded = true;
@ -944,221 +895,11 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChan
}
void Model::removeFromScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
// allow the attachments to remove to scene
foreach (Model* attachment, _attachments) {
attachment->removeFromScene(scene, pendingChanges);
}
foreach (auto item, _renderItems.keys()) {
pendingChanges.removeItem(item);
}
_renderItems.clear();
_readyWhenAdded = false;
qDebug() << "Model::removeFromScene : " << this->getURL().toString();
}
bool Model::render(RenderArgs* renderArgs, float alpha) {
return true; //
PROFILE_RANGE(__FUNCTION__);
// render the attachments
foreach (Model* attachment, _attachments) {
attachment->render(renderArgs, alpha);
}
if (_meshStates.isEmpty()) {
return false;
}
renderSetup(renderArgs);
return renderCore(renderArgs, alpha);
}
bool Model::renderCore(RenderArgs* args, float alpha) {
return true;
PROFILE_RANGE(__FUNCTION__);
if (!_viewState) {
return false;
}
auto mode = args->_renderMode;
// Let's introduce a gpu::Batch to capture all the calls to the graphics api
_renderBatch.clear();
gpu::Batch& batch = _renderBatch;
// Setup the projection matrix
if (args && args->_viewFrustum) {
glm::mat4 proj;
// If for easier debug depending on the pass
if (mode == RenderArgs::SHADOW_RENDER_MODE) {
args->_viewFrustum->evalProjectionMatrix(proj);
} else {
args->_viewFrustum->evalProjectionMatrix(proj);
}
batch.setProjectionTransform(proj);
}
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
_transforms.push_back(Transform());
}
_transforms[0] = _viewState->getViewTransform();
// apply entity translation offset to the viewTransform in one go (it's a preTranslate because viewTransform goes from world to eye space)
_transforms[0].preTranslate(-_translation);
batch.setViewTransform(_transforms[0]);
/*DependencyManager::get<TextureCache>()->setPrimaryDrawBuffers(
mode == RenderArgs::DEFAULT_RENDER_MODE || mode == RenderArgs::DIFFUSE_RENDER_MODE,
mode == RenderArgs::DEFAULT_RENDER_MODE || mode == RenderArgs::NORMAL_RENDER_MODE,
mode == RenderArgs::DEFAULT_RENDER_MODE);
*/
/*if (mode != RenderArgs::SHADOW_RENDER_MODE)*/ {
GLenum buffers[3];
int bufferCount = 0;
// if (mode == RenderArgs::DEFAULT_RENDER_MODE || mode == RenderArgs::DIFFUSE_RENDER_MODE) {
if (mode != RenderArgs::SHADOW_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
}
// if (mode == RenderArgs::DEFAULT_RENDER_MODE || mode == RenderArgs::NORMAL_RENDER_MODE) {
if (mode != RenderArgs::SHADOW_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
}
// if (mode == RenderArgs::DEFAULT_RENDER_MODE) {
if (mode != RenderArgs::SHADOW_RENDER_MODE) {
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
}
GLBATCH(glDrawBuffers)(bufferCount, buffers);
// batch.setFramebuffer(DependencyManager::get<TextureCache>()->getPrimaryOpaqueFramebuffer());
}
const float DEFAULT_ALPHA_THRESHOLD = 0.5f;
//renderMeshes(batch, mode, translucent, alphaThreshold, hasTangents, hasSpecular, isSkinned, args, forceRenderMeshes);
int opaqueMeshPartsRendered = 0;
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, false, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, false, true, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, true, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, false, true, true, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, false, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, false, true, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, true, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, false, true, true, true, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, false, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, false, true, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, true, false, false, false, args, true);
opaqueMeshPartsRendered += renderMeshes(batch, mode, false, DEFAULT_ALPHA_THRESHOLD, true, true, true, false, false, args, true);
// render translucent meshes afterwards
//DependencyManager::get<TextureCache>()->setPrimaryDrawBuffers(false, true, true);
{
GLenum buffers[2];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
int translucentMeshPartsRendered = 0;
const float MOSTLY_OPAQUE_THRESHOLD = 0.75f;
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, false, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, false, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, true, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, false, true, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, false, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, false, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, true, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_OPAQUE_THRESHOLD, false, true, true, true, false, args, true);
{
GLenum buffers[1];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
GLBATCH(glDrawBuffers)(bufferCount, buffers);
}
// if (mode == RenderArgs::DEFAULT_RENDER_MODE || mode == RenderArgs::DIFFUSE_RENDER_MODE) {
if (mode != RenderArgs::SHADOW_RENDER_MODE) {
// batch.setFramebuffer(DependencyManager::get<TextureCache>()->getPrimaryTransparentFramebuffer());
const float MOSTLY_TRANSPARENT_THRESHOLD = 0.0f;
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, false, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, true, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, false, true, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, false, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, false, true, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, true, false, false, args, true);
translucentMeshPartsRendered += renderMeshes(batch, mode, true, MOSTLY_TRANSPARENT_THRESHOLD, false, true, true, true, false, args, true);
// batch.setFramebuffer(DependencyManager::get<TextureCache>()->getPrimaryOpaqueFramebuffer());
}
GLBATCH(glDepthMask)(true);
GLBATCH(glDepthFunc)(GL_LESS);
GLBATCH(glDisable)(GL_CULL_FACE);
if (mode == RenderArgs::SHADOW_RENDER_MODE) {
GLBATCH(glCullFace)(GL_BACK);
}
GLBATCH(glActiveTexture)(GL_TEXTURE0 + 1);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glActiveTexture)(GL_TEXTURE0 + 2);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glActiveTexture)(GL_TEXTURE0 + 3);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
GLBATCH(glActiveTexture)(GL_TEXTURE0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
// deactivate vertex arrays after drawing
GLBATCH(glDisableClientState)(GL_NORMAL_ARRAY);
GLBATCH(glDisableClientState)(GL_VERTEX_ARRAY);
GLBATCH(glDisableClientState)(GL_TEXTURE_COORD_ARRAY);
GLBATCH(glDisableClientState)(GL_COLOR_ARRAY);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::TANGENT);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::SKIN_CLUSTER_INDEX);
GLBATCH(glDisableVertexAttribArray)(gpu::Stream::SKIN_CLUSTER_WEIGHT);
// bind with 0 to switch back to normal operation
GLBATCH(glBindBuffer)(GL_ARRAY_BUFFER, 0);
GLBATCH(glBindBuffer)(GL_ELEMENT_ARRAY_BUFFER, 0);
GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
// Back to no program
GLBATCH(glUseProgram)(0);
// Render!
{
PROFILE_RANGE("render Batch");
#if defined(ANDROID)
#else
glPushMatrix();
#endif
::gpu::GLBackend::renderBatch(batch, true); // force sync with gl state here
#if defined(ANDROID)
#else
glPopMatrix();
#endif
}
// restore all the default material settings
_viewState->setupWorldLight();
#ifdef WANT_DEBUG_MESHBOXES
renderDebugMeshBoxes();
#endif
return true;
}
void Model::renderDebugMeshBoxes() {
@ -1267,12 +1008,12 @@ Extents Model::calculateScaledOffsetExtents(const Extents& extents) const {
Extents translatedExtents = { rotatedExtents.minimum + _translation,
rotatedExtents.maximum + _translation };
return translatedExtents;
}
/// Returns the world space equivalent of some box in model space.
AABox Model::calculateScaledOffsetAABox(const AABox& box) const {
return AABox(calculateScaledOffsetExtents(Extents(box)));
}
@ -1341,9 +1082,10 @@ void Model::setURL(const QUrl& url, const QUrl& fallback, bool retainCurrent, bo
if (_url == url && _geometry && _geometry->getURL() == url) {
return;
}
_readyWhenAdded = false; // reset out render items.
_needsReload = true;
invalidCalculatedMeshBoxes();
_url = url;
@ -1532,7 +1274,7 @@ void Model::setScaleToFit(bool scaleToFit, const glm::vec3& dimensions) {
}
}
void Model::setScaleToFit(bool scaleToFit, float largestDimension) {
void Model::setScaleToFit(bool scaleToFit, float largestDimension, bool forceRescale) {
// NOTE: if the model is not active, then it means we don't actually know the true/natural dimensions of the
// mesh, and so we can't do the needed calculations for scaling to fit to a single largest dimension. In this
// case we will record that we do want to do this, but we will stick our desired single dimension into the
@ -1545,7 +1287,7 @@ void Model::setScaleToFit(bool scaleToFit, float largestDimension) {
return;
}
if (_scaleToFit != scaleToFit || glm::length(_scaleToFitDimensions) != largestDimension) {
if (forceRescale || _scaleToFit != scaleToFit || glm::length(_scaleToFitDimensions) != largestDimension) {
_scaleToFit = scaleToFit;
// we only need to do this work if we're "turning on" scale to fit.
@ -1555,7 +1297,7 @@ void Model::setScaleToFit(bool scaleToFit, float largestDimension) {
float maxScale = largestDimension / maxDimension;
glm::vec3 modelMeshDimensions = modelMeshExtents.maximum - modelMeshExtents.minimum;
glm::vec3 dimensions = modelMeshDimensions * maxScale;
_scaleToFitDimensions = dimensions;
_scaledToFit = false; // force rescaling
}
@ -1626,7 +1368,6 @@ void Model::simulate(float deltaTime, bool fullUpdate) {
}
void Model::simulateInternal(float deltaTime) {
// NOTE: this is a recursive call that walks all attachments, and their attachments
// update the world space transforms for all joints
// update animations
@ -1643,31 +1384,7 @@ void Model::simulateInternal(float deltaTime) {
_shapesAreDirty = !_shapes.isEmpty();
// update the attachment transforms and simulate them
const FBXGeometry& geometry = _geometry->getFBXGeometry();
for (int i = 0; i < _attachments.size(); i++) {
const FBXAttachment& attachment = geometry.attachments.at(i);
Model* model = _attachments.at(i);
glm::vec3 jointTranslation = _translation;
glm::quat jointRotation = _rotation;
if (_showTrueJointTransforms) {
getJointPositionInWorldFrame(attachment.jointIndex, jointTranslation);
getJointRotationInWorldFrame(attachment.jointIndex, jointRotation);
} else {
getVisibleJointPositionInWorldFrame(attachment.jointIndex, jointTranslation);
getVisibleJointRotationInWorldFrame(attachment.jointIndex, jointRotation);
}
model->setTranslation(jointTranslation + jointRotation * attachment.translation * _scale);
model->setRotation(jointRotation * attachment.rotation);
model->setScale(_scale * attachment.scale);
if (model->isActive()) {
model->simulateInternal(deltaTime);
}
}
glm::mat4 modelToWorld = glm::mat4_cast(_rotation);
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
@ -2005,10 +1722,6 @@ void Model::applyNextGeometry() {
}
void Model::deleteGeometry() {
foreach (Model* attachment, _attachments) {
delete attachment;
}
_attachments.clear();
_blendedVertexBuffers.clear();
_jointStates.clear();
_meshStates.clear();
@ -2048,23 +1761,48 @@ void Model::setupBatchTransform(gpu::Batch& batch, RenderArgs* args) {
}
AABox Model::getPartBounds(int meshIndex, int partIndex) {
if (!_calculatedMeshPartBoxesValid) {
if (!_calculatedMeshPartBoxesValid || !_calculatedMeshBoxesValid) {
recalculateMeshBoxes(true);
}
if (meshIndex < _meshStates.size()) {
const MeshState& state = _meshStates.at(meshIndex);
bool isSkinned = state.clusterMatrices.size() > 1;
if (isSkinned) {
// if we're skinned return the entire mesh extents because we can't know for sure our clusters don't move us
return calculateScaledOffsetAABox(_geometry->getFBXGeometry().meshExtents);
}
}
if (_calculatedMeshPartBoxesValid && _calculatedMeshPartBoxes.contains(QPair<int,int>(meshIndex, partIndex))) {
return calculateScaledOffsetAABox(_calculatedMeshPartBoxes[QPair<int,int>(meshIndex, partIndex)]);
// FIX ME! - This is currently a hack because for some mesh parts our efforts to calculate the bounding
// box of the mesh part fails. It seems to create boxes that are not consistent with where the
// geometry actually renders. If instead we make all the parts share the bounds of the entire subMesh
// things will render properly.
//
// return calculateScaledOffsetAABox(_calculatedMeshPartBoxes[QPair<int,int>(meshIndex, partIndex)]);
//
// NOTE: we also don't want to use the _calculatedMeshBoxes[] because they don't handle avatar moving correctly
// without recalculating them...
// return _calculatedMeshBoxes[meshIndex];
//
// If we not skinned use the bounds of the subMesh for all it's parts
const FBXMesh& mesh = _geometry->getFBXGeometry().meshes.at(meshIndex);
return calculateScaledOffsetExtents(mesh.meshExtents);
}
return AABox();
}
void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool translucent) {
if (!_readyWhenAdded) {
return; // bail asap
}
// we always need these properly calculated before we can render, this will likely already have been done
// since the engine will call our getPartBounds() before rendering us.
if (!_calculatedMeshPartBoxesValid) {
if (!_calculatedMeshPartBoxesValid || !_calculatedMeshBoxesValid) {
recalculateMeshBoxes(true);
}
auto textureCache = DependencyManager::get<TextureCache>();
@ -2072,20 +1810,6 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
gpu::Batch& batch = *(args->_batch);
auto mode = args->_renderMode;
// render the part bounding box
#ifdef DEBUG_BOUNDING_PARTS
{
glm::vec4 cubeColor(1.0f,0.0f,0.0f,1.0f);
AABox partBounds = getPartBounds(meshIndex, partIndex);
glm::mat4 translation = glm::translate(partBounds.calcCenter());
glm::mat4 scale = glm::scale(partBounds.getDimensions());
glm::mat4 modelToWorldMatrix = translation * scale;
batch.setModelTransform(modelToWorldMatrix);
//qDebug() << "partBounds:" << partBounds;
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f, cubeColor);
}
#endif //def DEBUG_BOUNDING_PARTS
// Capture the view matrix once for the rendering of this model
if (_transforms.empty()) {
@ -2112,14 +1836,37 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
bool hasLightmap = mesh.hasEmissiveTexture();
bool isSkinned = state.clusterMatrices.size() > 1;
bool wireframe = isWireframe();
// render the part bounding box
#ifdef DEBUG_BOUNDING_PARTS
{
AABox partBounds = getPartBounds(meshIndex, partIndex);
bool inView = args->_viewFrustum->boxInFrustum(partBounds) != ViewFrustum::OUTSIDE;
glm::vec4 cubeColor;
if (isSkinned) {
cubeColor = glm::vec4(0.0f, 1.0f, 1.0f, 1.0f);
} else if (inView) {
cubeColor = glm::vec4(1.0f, 0.0f, 1.0f, 1.0f);
} else {
cubeColor = glm::vec4(1.0f, 1.0f, 0.0f, 1.0f);
}
Transform transform;
transform.setTranslation(partBounds.calcCenter());
transform.setScale(partBounds.getDimensions());
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(batch, 1.0f, cubeColor);
}
#endif //def DEBUG_BOUNDING_PARTS
if (wireframe) {
translucentMesh = hasTangents = hasSpecular = hasLightmap = isSkinned = false;
}
Locations* locations = nullptr;
pickPrograms(batch, mode, translucent, alphaThreshold, hasLightmap, hasTangents, hasSpecular, isSkinned, wireframe,
args, locations);
pickPrograms(batch, mode, translucentMesh, alphaThreshold, hasLightmap, hasTangents, hasSpecular, isSkinned, wireframe,
args, locations);
updateVisibleJointStates();
@ -2172,16 +1919,18 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
// guard against partially loaded meshes
if (partIndex >= networkMesh.parts.size() || partIndex >= mesh.parts.size()) {
return;
return;
}
const NetworkMeshPart& networkPart = networkMesh.parts.at(partIndex);
const FBXMeshPart& part = mesh.parts.at(partIndex);
model::MaterialPointer material = part._material;
#ifdef WANT_DEBUG
if (material == nullptr) {
// qCDebug(renderutils) << "WARNING: material == nullptr!!!";
qCDebug(renderutils) << "WARNING: material == nullptr!!!";
}
#endif
if (material != nullptr) {
@ -2263,7 +2012,7 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
qint64 offset = _calculatedMeshPartOffet[QPair<int,int>(meshIndex, partIndex)];
if (part.quadIndices.size() > 0) {
batch.drawIndexed(gpu::QUADS, part.quadIndices.size(), offset);
offset += part.quadIndices.size() * sizeof(int);
@ -2283,8 +2032,6 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
void Model::segregateMeshGroups() {
_renderBuckets.clear();
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
@ -2294,6 +2041,9 @@ void Model::segregateMeshGroups() {
qDebug() << "WARNING!!!! Mesh Sizes don't match! We will not segregate mesh groups yet.";
return;
}
_transparentRenderItems.clear();
_opaqueRenderItems.clear();
// Run through all of the meshes, and place them into their segregated, but unsorted buckets
for (int i = 0; i < networkMeshes.size(); i++) {
@ -2318,67 +2068,23 @@ void Model::segregateMeshGroups() {
for (int partIndex = 0; partIndex < totalParts; partIndex++) {
// this is a good place to create our renderPayloads
if (translucentMesh) {
_transparentRenderItems << std::shared_ptr<TransparentMeshPart>(new TransparentMeshPart(this, i, partIndex));
_transparentRenderItems << std::shared_ptr<MeshPartPayload>(new MeshPartPayload(true, this, i, partIndex));
} else {
_opaqueRenderItems << std::shared_ptr<OpaqueMeshPart>(new OpaqueMeshPart(this, i, partIndex));
_opaqueRenderItems << std::shared_ptr<MeshPartPayload>(new MeshPartPayload(false, this, i, partIndex));
}
}
QString materialID;
// create a material name from all the parts. If there's one part, this will be a single material and its
// true name. If however the mesh has multiple parts the name will be all the part's materials mashed together
// which will result in those parts being sorted away from single material parts.
QString lastPartMaterialID;
foreach(FBXMeshPart part, mesh.parts) {
if (part.materialID != lastPartMaterialID) {
materialID += part.materialID;
}
lastPartMaterialID = part.materialID;
}
const bool wantDebug = false;
if (wantDebug) {
qCDebug(renderutils) << "materialID:" << materialID << "parts:" << mesh.parts.size();
}
RenderKey key(translucentMesh, hasLightmap, hasTangents, hasSpecular, isSkinned, wireframe);
// reuse or create the bucket corresponding to that key and insert the mesh as unsorted
_renderBuckets[key.getRaw()]._unsortedMeshes.insertMulti(materialID, i);
}
for(auto& b : _renderBuckets) {
foreach(auto i, b.second._unsortedMeshes) {
b.second._meshes.append(i);
}
b.second._unsortedMeshes.clear();
}
_meshGroupsKnown = true;
}
QVector<int>* Model::pickMeshList(bool translucent, float alphaThreshold, bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe) {
PROFILE_RANGE(__FUNCTION__);
// depending on which parameters we were called with, pick the correct mesh group to render
QVector<int>* whichList = NULL;
RenderKey key(translucent, hasLightmap, hasTangents, hasSpecular, isSkinned, isWireframe);
auto bucket = _renderBuckets.find(key.getRaw());
if (bucket != _renderBuckets.end()) {
whichList = &(*bucket).second._meshes;
}
return whichList;
}
void Model::pickPrograms(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args,
Locations*& locations) {
RenderKey key(mode, translucent, alphaThreshold, hasLightmap, hasTangents, hasSpecular, isSkinned, isWireframe);
if (mode == RenderArgs::MIRROR_RENDER_MODE) {
key = RenderKey(key.getRaw() | RenderKey::IS_MIRROR);
}
auto pipeline = _renderPipelineLib.find(key.getRaw());
if (pipeline == _renderPipelineLib.end()) {
qDebug() << "No good, couldn't find a pipeline from the key ?" << key.getRaw();
@ -2402,212 +2108,6 @@ void Model::pickPrograms(gpu::Batch& batch, RenderMode mode, bool translucent, f
}
}
int Model::renderMeshes(gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args,
bool forceRenderSomeMeshes) {
PROFILE_RANGE(__FUNCTION__);
int meshPartsRendered = 0;
//Pick the mesh list with the requested render flags
QVector<int>* whichList = pickMeshList(translucent, alphaThreshold, hasLightmap, hasTangents, hasSpecular, isSkinned, isWireframe);
if (!whichList) {
return 0;
}
QVector<int>& list = *whichList;
// If this list has nothing to render, then don't bother proceeding. This saves us on binding to programs
if (list.empty()) {
return 0;
}
Locations* locations = nullptr;
pickPrograms(batch, mode, translucent, alphaThreshold, hasLightmap, hasTangents, hasSpecular, isSkinned, isWireframe,
args, locations);
meshPartsRendered = renderMeshesFromList(list, batch, mode, translucent, alphaThreshold,
args, locations, forceRenderSomeMeshes);
return meshPartsRendered;
}
int Model::renderMeshesFromList(QVector<int>& list, gpu::Batch& batch, RenderMode mode, bool translucent, float alphaThreshold, RenderArgs* args,
Locations* locations, bool forceRenderMeshes) {
PROFILE_RANGE(__FUNCTION__);
auto textureCache = DependencyManager::get<TextureCache>();
QString lastMaterialID;
int meshPartsRendered = 0;
updateVisibleJointStates();
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
// i is the "index" from the original networkMeshes QVector...
foreach (int i, list) {
// if our index is ever out of range for either meshes or networkMeshes, then skip it, and set our _meshGroupsKnown
// to false to rebuild out mesh groups.
if (i < 0 || i >= networkMeshes.size() || i > geometry.meshes.size()) {
_meshGroupsKnown = false; // regenerate these lists next time around.
_readyWhenAdded = false; // in case any of our users are using scenes
invalidCalculatedMeshBoxes(); // if we have to reload, we need to assume our mesh boxes are all invalid
continue;
}
// exit early if the translucency doesn't match what we're drawing
const NetworkMesh& networkMesh = networkMeshes.at(i);
const FBXMesh& mesh = geometry.meshes.at(i);
batch.setIndexBuffer(gpu::UINT32, (networkMesh._indexBuffer), 0);
int vertexCount = mesh.vertices.size();
if (vertexCount == 0) {
// sanity check
continue;
}
// if we got here, then check to see if this mesh is in view
if (args) {
bool shouldRender = true;
if (args->_viewFrustum) {
shouldRender = forceRenderMeshes ||
args->_viewFrustum->boxInFrustum(_calculatedMeshBoxes.at(i)) != ViewFrustum::OUTSIDE;
if (shouldRender && !forceRenderMeshes) {
float distance = args->_viewFrustum->distanceToCamera(_calculatedMeshBoxes.at(i).calcCenter());
shouldRender = !_viewState ? false : _viewState->shouldRenderMesh(_calculatedMeshBoxes.at(i).getLargestDimension(),
distance);
}
}
if (!shouldRender) {
continue; // skip this mesh
}
}
const MeshState& state = _meshStates.at(i);
if (state.clusterMatrices.size() > 1) {
GLBATCH(glUniformMatrix4fv)(locations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
batch.setModelTransform(Transform());
} else {
batch.setModelTransform(Transform(state.clusterMatrices[0]));
}
if (mesh.blendshapes.isEmpty()) {
batch.setInputFormat(networkMesh._vertexFormat);
batch.setInputStream(0, *networkMesh._vertexStream);
} else {
batch.setInputFormat(networkMesh._vertexFormat);
batch.setInputBuffer(0, _blendedVertexBuffers[i], 0, sizeof(glm::vec3));
batch.setInputBuffer(1, _blendedVertexBuffers[i], vertexCount * sizeof(glm::vec3), sizeof(glm::vec3));
batch.setInputStream(2, *networkMesh._vertexStream);
}
if (mesh.colors.isEmpty()) {
GLBATCH(glColor4f)(1.0f, 1.0f, 1.0f, 1.0f);
}
qint64 offset = 0;
for (int j = 0; j < networkMesh.parts.size(); j++) {
const NetworkMeshPart& networkPart = networkMesh.parts.at(j);
const FBXMeshPart& part = mesh.parts.at(j);
model::MaterialPointer material = part._material;
if ((networkPart.isTranslucent() || part.opacity != 1.0f) != translucent) {
offset += (part.quadIndices.size() + part.triangleIndices.size()) * sizeof(int);
continue;
}
// apply material properties
if (mode == RenderArgs::SHADOW_RENDER_MODE) {
/// GLBATCH(glBindTexture)(GL_TEXTURE_2D, 0);
} else {
if (lastMaterialID != part.materialID) {
const bool wantDebug = false;
if (wantDebug) {
qCDebug(renderutils) << "Material Changed ---------------------------------------------";
qCDebug(renderutils) << "part INDEX:" << j;
qCDebug(renderutils) << "NEW part.materialID:" << part.materialID;
}
if (locations->materialBufferUnit >= 0) {
batch.setUniformBuffer(locations->materialBufferUnit, material->getSchemaBuffer());
}
Texture* diffuseMap = networkPart.diffuseTexture.data();
if (mesh.isEye && diffuseMap) {
diffuseMap = (_dilatedTextures[i][j] =
static_cast<DilatableNetworkTexture*>(diffuseMap)->getDilatedTexture(_pupilDilation)).data();
}
static bool showDiffuse = true;
if (showDiffuse && diffuseMap) {
batch.setUniformTexture(0, diffuseMap->getGPUTexture());
} else {
batch.setUniformTexture(0, textureCache->getWhiteTexture());
}
if (locations->texcoordMatrices >= 0) {
glm::mat4 texcoordTransform[2];
if (!part.diffuseTexture.transform.isIdentity()) {
part.diffuseTexture.transform.getMatrix(texcoordTransform[0]);
}
if (!part.emissiveTexture.transform.isIdentity()) {
part.emissiveTexture.transform.getMatrix(texcoordTransform[1]);
}
GLBATCH(glUniformMatrix4fv)(locations->texcoordMatrices, 2, false, (const float*) &texcoordTransform);
}
if (!mesh.tangents.isEmpty()) {
Texture* normalMap = networkPart.normalTexture.data();
batch.setUniformTexture(1, !normalMap ?
textureCache->getBlueTexture() : normalMap->getGPUTexture());
}
if (locations->specularTextureUnit >= 0) {
Texture* specularMap = networkPart.specularTexture.data();
batch.setUniformTexture(locations->specularTextureUnit, !specularMap ?
textureCache->getWhiteTexture() : specularMap->getGPUTexture());
}
}
// HACK: For unkwon reason (yet!) this code that should be assigned only if the material changes need to be called for every
// drawcall with an emissive, so let's do it for now.
if (locations->emissiveTextureUnit >= 0) {
// assert(locations->emissiveParams >= 0); // we should have the emissiveParams defined in the shader
float emissiveOffset = part.emissiveParams.x;
float emissiveScale = part.emissiveParams.y;
GLBATCH(glUniform2f)(locations->emissiveParams, emissiveOffset, emissiveScale);
Texture* emissiveMap = networkPart.emissiveTexture.data();
batch.setUniformTexture(locations->emissiveTextureUnit, !emissiveMap ?
textureCache->getWhiteTexture() : emissiveMap->getGPUTexture());
}
lastMaterialID = part.materialID;
}
meshPartsRendered++;
if (part.quadIndices.size() > 0) {
batch.drawIndexed(gpu::QUADS, part.quadIndices.size(), offset);
offset += part.quadIndices.size() * sizeof(int);
}
if (part.triangleIndices.size() > 0) {
batch.drawIndexed(gpu::TRIANGLES, part.triangleIndices.size(), offset);
offset += part.triangleIndices.size() * sizeof(int);
}
}
}
return meshPartsRendered;
}
ModelBlender::ModelBlender() :
_pendingBlenders(0) {

View file

@ -51,17 +51,11 @@ namespace render {
class PendingChanges;
typedef unsigned int ItemID;
}
class OpaqueMeshPart;
class TransparentMeshPart;
class MeshPartPayload;
inline uint qHash(const std::shared_ptr<TransparentMeshPart>& a, uint seed) {
inline uint qHash(const std::shared_ptr<MeshPartPayload>& a, uint seed) {
return qHash(a.get(), seed);
}
inline uint qHash(const std::shared_ptr<OpaqueMeshPart>& a, uint seed) {
return qHash(a.get(), seed);
}
/// A generic 3D model displaying geometry loaded from a URL.
class Model : public QObject, public PhysicsEntity {
@ -77,7 +71,7 @@ public:
virtual ~Model();
/// enables/disables scale to fit behavior, the model will be automatically scaled to the specified largest dimension
void setScaleToFit(bool scaleToFit, float largestDimension = 0.0f);
void setScaleToFit(bool scaleToFit, float largestDimension = 0.0f, bool forceRescale = false);
bool getScaleToFit() const { return _scaleToFit; } /// is scale to fit enabled
bool getIsScaledToFit() const { return _scaledToFit; } /// is model scaled to fit
const glm::vec3& getScaleToFitDimensions() const { return _scaleToFitDimensions; } /// the dimensions model is scaled to
@ -312,8 +306,7 @@ protected:
float getLimbLength(int jointIndex) const;
/// Allow sub classes to force invalidating the bboxes
void invalidCalculatedMeshBoxes() {
qDebug() << "invalidCalculatedMeshBoxes()";
void invalidCalculatedMeshBoxes() {
_calculatedMeshBoxesValid = false;
_calculatedMeshPartBoxesValid = false;
_calculatedMeshTrianglesValid = false;
@ -351,8 +344,6 @@ private:
QVector<QVector<QSharedPointer<Texture> > > _dilatedTextures;
QVector<Model*> _attachments;
QSet<WeakAnimationHandlePointer> _animationHandles;
QList<AnimationHandlePointer> _runningAnimations;
@ -402,18 +393,8 @@ private:
int _debugMeshBoxesID = GeometryCache::UNKNOWN_ID;
// helper functions used by render() or renderInScene()
bool renderCore(RenderArgs* args, float alpha);
int renderMeshes(gpu::Batch& batch, RenderArgs::RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args = NULL,
bool forceRenderMeshes = false);
void setupBatchTransform(gpu::Batch& batch, RenderArgs* args);
QVector<int>* pickMeshList(bool translucent, float alphaThreshold, bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe);
int renderMeshesFromList(QVector<int>& list, gpu::Batch& batch, RenderArgs::RenderMode mode, bool translucent, float alphaThreshold,
RenderArgs* args, Locations* locations,
bool forceRenderSomeMeshes = false);
static void pickPrograms(gpu::Batch& batch, RenderArgs::RenderMode mode, bool translucent, float alphaThreshold,
bool hasLightmap, bool hasTangents, bool hasSpecular, bool isSkinned, bool isWireframe, RenderArgs* args,
Locations*& locations);
@ -524,35 +505,15 @@ private:
};
static RenderPipelineLib _renderPipelineLib;
class RenderBucket {
public:
QVector<int> _meshes;
QMap<QString, int> _unsortedMeshes;
};
typedef std::unordered_map<int, RenderBucket> BaseRenderBucketMap;
class RenderBucketMap : public BaseRenderBucketMap {
public:
typedef RenderKey Key;
};
RenderBucketMap _renderBuckets;
bool _renderCollisionHull;
QSet<std::shared_ptr<TransparentMeshPart>> _transparentRenderItems;
QSet<std::shared_ptr<OpaqueMeshPart>> _opaqueRenderItems;
QSet<std::shared_ptr<MeshPartPayload>> _transparentRenderItems;
QSet<std::shared_ptr<MeshPartPayload>> _opaqueRenderItems;
QMap<render::ItemID, render::PayloadPointer> _renderItems;
bool _readyWhenAdded = false;
bool _needsReload = true;
private:
// FIX ME - We want to get rid of this interface for rendering...
// right now the only remaining user are Avatar attachments.
// that usage has been temporarily disabled...
bool render(RenderArgs* renderArgs, float alpha = 1.0f);
};
Q_DECLARE_METATYPE(QPointer<Model>)

Some files were not shown because too many files have changed in this diff Show more