mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 17:13:31 +02:00
resolve conflicts on merge with upstream/master
This commit is contained in:
commit
05dde9d8f5
84 changed files with 949 additions and 1902 deletions
assignment-client/src/avatars
examples
interface/src
libraries
animation/src
audio/src
avatars/src
entities-renderer/src
entities/src
fbx/src
gpu/src/gpu
networking/src
DataServerAccountInfo.cppDomainHandler.cppJSONBreakableMarshal.hNetworkPacket.cppNetworkPacket.hNetworkPeer.cppPacketHeaders.cppResourceCache.cppResourceCache.h
octree/src
render-utils/src
AmbientOcclusionEffect.cppAnimationHandle.cppAnimationHandle.hDeferredLightingEffect.cppGeometryCache.cppGeometryCache.hGlowEffect.cppGlowEffect.hModel.cppModel.hRenderDeferredTask.cppRenderUtil.hTextureCache.cppTextureCache.h
shared/src
ui/src
tests/ui/src
tools/scribe/src
|
@ -53,7 +53,7 @@ AnimationDetails ScriptableAvatar::getAnimationDetails() {
|
|||
|
||||
void ScriptableAvatar::update(float deltatime) {
|
||||
// Run animation
|
||||
if (_animation != NULL && _animation->isValid() && _animation->getFrames().size() > 0) {
|
||||
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0) {
|
||||
QStringList modelJoints = getJointNames();
|
||||
QStringList animationJoints = _animation->getJointNames();
|
||||
|
||||
|
|
|
@ -175,10 +175,12 @@ function positionStick(stickOrientation) {
|
|||
inHand = false;
|
||||
Entities.updateAction(stickID, actionID, {
|
||||
relativePosition: offset,
|
||||
relativeRotation: stickOrientation
|
||||
relativeRotation: stickOrientation,
|
||||
hand: "right"
|
||||
});
|
||||
}
|
||||
function resetToHand() { // Maybe coordinate with positionStick?
|
||||
function resetToHand() { // For use with controllers, puts the sword in contact with the hand.
|
||||
// Maybe coordinate with positionStick?
|
||||
if (inHand) { // Optimization: bail if we're already inHand.
|
||||
return;
|
||||
}
|
||||
|
@ -191,14 +193,14 @@ function resetToHand() { // Maybe coordinate with positionStick?
|
|||
});
|
||||
inHand = true;
|
||||
}
|
||||
function isControllerActive() {
|
||||
// I don't think the hydra API provides any reliable way to know whether a particular controller is active. Ask for both.
|
||||
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(3)) > 0) || Vec3.length(Controller.getSpatialControlPosition(4)) > 0;
|
||||
return controllerActive;
|
||||
}
|
||||
function mouseMoveEvent(event) {
|
||||
if (event.deviceID) { // Not a MOUSE mouse event, but a (e.g., hydra) mouse event, with x/y that is not meaningful for us.
|
||||
resetToHand(); // Can only happen when controller is uncradled, so let's drive with that, resetting our attachement.
|
||||
return;
|
||||
}
|
||||
controllerActive = (Vec3.length(Controller.getSpatialControlPosition(controllerID)) > 0);
|
||||
//print("Mouse move with hand controller " + (controllerActive ? "active" : "inactive") + JSON.stringify(event));
|
||||
if (controllerActive || !isFighting()) {
|
||||
// When a controller like the hydra gives a mouse event, the x/y is not meaningful to us, but we can detect with a truty deviceID
|
||||
if (event.deviceID || !isFighting() || isControllerActive()) {
|
||||
print('Attempting attachment reset');
|
||||
resetToHand();
|
||||
return;
|
||||
|
@ -244,12 +246,20 @@ function cleanUp(leaveButtons) {
|
|||
}
|
||||
function makeSword() {
|
||||
initControls();
|
||||
var swordPosition;
|
||||
if (!isControllerActive()) { // Dont' knock yourself with sword
|
||||
swordPosition = Vec3.sum(MyAvatar.position, Vec3.multiply(2, Quat.getFront(MyAvatar.orientation)));
|
||||
} else if (hand === 'right') {
|
||||
swordPosition = MyAvatar.getRightPalmPosition();
|
||||
} else {
|
||||
swordPosition = MyAvatar.getLeftPalmPosition();
|
||||
}
|
||||
stickID = Entities.addEntity({
|
||||
type: "Model",
|
||||
modelURL: swordModel,
|
||||
compoundShapeURL: swordCollisionShape,
|
||||
dimensions: dimensions,
|
||||
position: (hand === 'right') ? MyAvatar.getRightPalmPosition() : MyAvatar.getLeftPalmPosition(), // initial position doesn't matter, as long as it's close
|
||||
position: swordPosition,
|
||||
rotation: MyAvatar.orientation,
|
||||
damping: 0.1,
|
||||
collisionSoundURL: swordCollisionSoundURL,
|
||||
|
|
|
@ -43,9 +43,10 @@ var gMaxGrabDistance;
|
|||
// elevationAzimuth
|
||||
var gGrabMode = "xzplane";
|
||||
|
||||
// gGrabOffset allows the user to grab an object off-center. It points from ray's intersection
|
||||
// with the move-plane to object center (at the moment the grab is initiated). Future target positions
|
||||
// are relative to the ray's intersection by the same offset.
|
||||
// gGrabOffset allows the user to grab an object off-center. It points from the object's center
|
||||
// to the point where the ray intersects the grab plane (at the moment the grab is initiated).
|
||||
// Future target positions of the ray intersection are on the same plane, and the offset is subtracted
|
||||
// to compute the target position of the object's center.
|
||||
var gGrabOffset = { x: 0, y: 0, z: 0 };
|
||||
|
||||
var gTargetPosition;
|
||||
|
@ -152,7 +153,6 @@ function computeNewGrabPlane() {
|
|||
maybeResetMousePosition = true;
|
||||
}
|
||||
gGrabMode = "xzPlane";
|
||||
gPointOnPlane = gCurrentPosition;
|
||||
gPlaneNormal = { x: 0, y: 1, z: 0 };
|
||||
if (gLiftKey) {
|
||||
if (!gRotateKey) {
|
||||
|
@ -163,7 +163,7 @@ function computeNewGrabPlane() {
|
|||
gGrabMode = "rotate";
|
||||
}
|
||||
|
||||
gPointOnPlane = Vec3.subtract(gCurrentPosition, gGrabOffset);
|
||||
gPointOnPlane = Vec3.sum(gCurrentPosition, gGrabOffset);
|
||||
var xzOffset = Vec3.subtract(gPointOnPlane, Camera.getPosition());
|
||||
xzOffset.y = 0;
|
||||
gXzDistanceToGrab = Vec3.length(xzOffset);
|
||||
|
@ -220,8 +220,8 @@ function mousePressEvent(event) {
|
|||
nearestPoint = Vec3.multiply(distanceToGrab, pickRay.direction);
|
||||
gPointOnPlane = Vec3.sum(cameraPosition, nearestPoint);
|
||||
|
||||
// compute the grab offset
|
||||
gGrabOffset = Vec3.subtract(gStartPosition, gPointOnPlane);
|
||||
// compute the grab offset (points from object center to point of grab)
|
||||
gGrabOffset = Vec3.subtract(gPointOnPlane, gStartPosition);
|
||||
|
||||
computeNewGrabPlane();
|
||||
|
||||
|
@ -258,6 +258,7 @@ function mouseMoveEvent(event) {
|
|||
if (Vec3.length(entityProperties.gravity) != 0) {
|
||||
gOriginalGravity = entityProperties.gravity;
|
||||
}
|
||||
gCurrentPosition = entityProperties.position;
|
||||
|
||||
var actionArgs = {};
|
||||
|
||||
|
@ -287,6 +288,7 @@ function mouseMoveEvent(event) {
|
|||
var pointOnCylinder = Vec3.multiply(planeNormal, gXzDistanceToGrab);
|
||||
pointOnCylinder = Vec3.sum(Camera.getPosition(), pointOnCylinder);
|
||||
newTargetPosition = mouseIntersectionWithPlane(pointOnCylinder, planeNormal, event);
|
||||
gPointOnPlane = Vec3.sum(newTargetPosition, gGrabOffset);
|
||||
} else {
|
||||
var cameraPosition = Camera.getPosition();
|
||||
newTargetPosition = mouseIntersectionWithPlane(gPointOnPlane, gPlaneNormal, event);
|
||||
|
@ -298,7 +300,7 @@ function mouseMoveEvent(event) {
|
|||
newTargetPosition = Vec3.sum(relativePosition, cameraPosition);
|
||||
}
|
||||
}
|
||||
gTargetPosition = Vec3.sum(newTargetPosition, gGrabOffset);
|
||||
gTargetPosition = Vec3.subtract(newTargetPosition, gGrabOffset);
|
||||
actionArgs = {targetPosition: gTargetPosition, linearTimeScale: 0.1};
|
||||
}
|
||||
gPreviousMouse = { x: event.x, y: event.y };
|
||||
|
|
|
@ -65,7 +65,6 @@
|
|||
#include <DependencyManager.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <ErrorDialog.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Context.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
|
@ -112,7 +111,6 @@
|
|||
|
||||
#include "avatar/AvatarManager.h"
|
||||
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
|
@ -270,11 +268,9 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto geometryCache = DependencyManager::set<GeometryCache>();
|
||||
auto scriptCache = DependencyManager::set<ScriptCache>();
|
||||
auto soundCache = DependencyManager::set<SoundCache>();
|
||||
auto glowEffect = DependencyManager::set<GlowEffect>();
|
||||
auto faceshift = DependencyManager::set<Faceshift>();
|
||||
auto audio = DependencyManager::set<AudioClient>();
|
||||
auto audioScope = DependencyManager::set<AudioScope>();
|
||||
auto audioIOStatsRenderer = DependencyManager::set<AudioIOStatsRenderer>();
|
||||
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
|
||||
auto ambientOcclusionEffect = DependencyManager::set<AmbientOcclusionEffect>();
|
||||
auto textureCache = DependencyManager::set<TextureCache>();
|
||||
|
@ -336,7 +332,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_mousePressed(false),
|
||||
_enableProcessOctreeThread(true),
|
||||
_octreeProcessor(),
|
||||
_nodeBoundsDisplay(this),
|
||||
_runningScriptsWidget(NULL),
|
||||
_runningScriptsWidgetWasVisible(false),
|
||||
_trayIcon(new QSystemTrayIcon(_window)),
|
||||
|
@ -710,6 +705,14 @@ void Application::cleanupBeforeQuit() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void Application::emptyLocalCache() {
|
||||
QNetworkDiskCache* cache = qobject_cast<QNetworkDiskCache*>(NetworkAccessManager::getInstance().cache());
|
||||
if (cache) {
|
||||
qDebug() << "DiskCacheEditor::clear(): Clearing disk cache.";
|
||||
cache->clear();
|
||||
}
|
||||
}
|
||||
|
||||
Application::~Application() {
|
||||
EntityTree* tree = _entities.getTree();
|
||||
tree->lockForWrite();
|
||||
|
@ -979,12 +982,16 @@ void Application::paintGL() {
|
|||
} else {
|
||||
PROFILE_RANGE(__FUNCTION__ "/mainRender");
|
||||
|
||||
DependencyManager::get<GlowEffect>()->prepare(&renderArgs);
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
// Viewport is assigned to the size of the framebuffer
|
||||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
glViewport(0, 0, size.width(), size.height());
|
||||
|
||||
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
|
@ -998,8 +1005,7 @@ void Application::paintGL() {
|
|||
|
||||
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
|
||||
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(&renderArgs);
|
||||
|
||||
auto finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
|
||||
|
@ -1007,6 +1013,8 @@ void Application::paintGL() {
|
|||
0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_LINEAR);
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0); // ???
|
||||
|
||||
_compositor.displayOverlayTexture(&renderArgs);
|
||||
}
|
||||
|
@ -1866,7 +1874,7 @@ void Application::idle() {
|
|||
}
|
||||
// After finishing all of the above work, ensure the idle timer is set to the proper interval,
|
||||
// depending on whether we're throttling or not
|
||||
idleTimer->start(_glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : 0);
|
||||
idleTimer->start(_glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : 1);
|
||||
}
|
||||
|
||||
// check for any requested background downloads.
|
||||
|
@ -2238,10 +2246,6 @@ void Application::init() {
|
|||
_entityClipboardRenderer.setViewFrustum(getViewFrustum());
|
||||
_entityClipboardRenderer.setTree(&_entityClipboard);
|
||||
|
||||
// initialize the GlowEffect with our widget
|
||||
bool glow = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
|
||||
DependencyManager::get<GlowEffect>()->init(glow);
|
||||
|
||||
// Make sure any new sounds are loaded as soon as know about them.
|
||||
connect(tree, &EntityTree::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
|
||||
connect(_myAvatar, &MyAvatar::newCollisionSoundURL, DependencyManager::get<SoundCache>().data(), &SoundCache::getSound);
|
||||
|
@ -2420,6 +2424,15 @@ void Application::cameraMenuChanged() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::reloadResourceCaches() {
|
||||
emptyLocalCache();
|
||||
|
||||
DependencyManager::get<AnimationCache>()->refreshAll();
|
||||
DependencyManager::get<GeometryCache>()->refreshAll();
|
||||
DependencyManager::get<SoundCache>()->refreshAll();
|
||||
DependencyManager::get<TextureCache>()->refreshAll();
|
||||
}
|
||||
|
||||
void Application::rotationModeChanged() {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
|
||||
_myAvatar->setHeadPitch(0);
|
||||
|
@ -3168,9 +3181,6 @@ QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
|
|||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
|
||||
|
||||
// the "glow" here causes an alpha of one
|
||||
Glower glower(renderArgs);
|
||||
|
||||
const int BILLBOARD_SIZE = 64;
|
||||
// TODO: Pass a RenderArgs to renderAvatarBillboard
|
||||
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
|
||||
|
@ -3559,25 +3569,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
}
|
||||
|
||||
if (!selfAvatarOnly) {
|
||||
_nodeBoundsDisplay.draw();
|
||||
|
||||
// render octree fades if they exist
|
||||
if (_octreeFades.size() > 0) {
|
||||
PerformanceTimer perfTimer("octreeFades");
|
||||
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
|
||||
"Application::displaySide() ... octree fades...");
|
||||
_octreeFadesLock.lockForWrite();
|
||||
for(std::vector<OctreeFade>::iterator fade = _octreeFades.begin(); fade != _octreeFades.end();) {
|
||||
fade->render(renderArgs);
|
||||
if(fade->isDone()) {
|
||||
fade = _octreeFades.erase(fade);
|
||||
} else {
|
||||
++fade;
|
||||
}
|
||||
}
|
||||
_octreeFadesLock.unlock();
|
||||
}
|
||||
|
||||
// give external parties a change to hook in
|
||||
{
|
||||
PerformanceTimer perfTimer("inWorldInterface");
|
||||
|
@ -3710,6 +3701,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
glViewport(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
glScissor(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
renderArgs->_viewport = glm::ivec4(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
|
||||
} else {
|
||||
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
|
||||
QSize size = DependencyManager::get<TextureCache>()->getFrameBufferSize();
|
||||
|
@ -3717,6 +3709,8 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
|
||||
glViewport(x, size.height() - y - height, width, height);
|
||||
glScissor(x, size.height() - y - height, width, height);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(x, size.height() - y - height, width, height);
|
||||
}
|
||||
bool updateViewFrustum = false;
|
||||
updateProjectionMatrix(_mirrorCamera, updateViewFrustum);
|
||||
|
@ -3729,6 +3723,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
glPopMatrix();
|
||||
|
||||
// reset Viewport and projection matrix
|
||||
renderArgs->_viewport = glm::ivec4(viewport[0], viewport[1], viewport[2], viewport[3]);
|
||||
glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
updateProjectionMatrix(_myCamera, updateViewFrustum);
|
||||
|
@ -3881,17 +3876,6 @@ void Application::nodeKilled(SharedNodePointer node) {
|
|||
qCDebug(interfaceapp, "model server going away...... v[%f, %f, %f, %f]",
|
||||
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
|
||||
|
||||
// Add the jurisditionDetails object to the list of "fade outs"
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
|
||||
OctreeFade fade(OctreeFade::FADE_OUT, NODE_KILLED_RED, NODE_KILLED_GREEN, NODE_KILLED_BLUE);
|
||||
fade.voxelDetails = rootDetails;
|
||||
const float slightly_smaller = 0.99f;
|
||||
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
|
||||
_octreeFadesLock.lockForWrite();
|
||||
_octreeFades.push_back(fade);
|
||||
_octreeFadesLock.unlock();
|
||||
}
|
||||
|
||||
// If the model server is going away, remove it from our jurisdiction map so we don't send voxels to a dead server
|
||||
_entityServerJurisdictions.lockForWrite();
|
||||
_entityServerJurisdictions.erase(_entityServerJurisdictions.find(nodeUUID));
|
||||
|
@ -3968,16 +3952,8 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
|
|||
qCDebug(interfaceapp, "stats from new %s server... [%f, %f, %f, %f]",
|
||||
qPrintable(serverType),
|
||||
(double)rootDetails.x, (double)rootDetails.y, (double)rootDetails.z, (double)rootDetails.s);
|
||||
|
||||
// Add the jurisditionDetails object to the list of "fade outs"
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::DontFadeOnOctreeServerChanges)) {
|
||||
OctreeFade fade(OctreeFade::FADE_OUT, NODE_ADDED_RED, NODE_ADDED_GREEN, NODE_ADDED_BLUE);
|
||||
fade.voxelDetails = rootDetails;
|
||||
const float slightly_smaller = 0.99f;
|
||||
fade.voxelDetails.s = fade.voxelDetails.s * slightly_smaller;
|
||||
_octreeFadesLock.lockForWrite();
|
||||
_octreeFades.push_back(fade);
|
||||
_octreeFadesLock.unlock();
|
||||
} else {
|
||||
jurisdiction->unlock();
|
||||
}
|
||||
} else {
|
||||
jurisdiction->unlock();
|
||||
|
|
|
@ -59,7 +59,6 @@
|
|||
#include "ui/BandwidthDialog.h"
|
||||
#include "ui/HMDToolsDialog.h"
|
||||
#include "ui/ModelsBrowser.h"
|
||||
#include "ui/NodeBounds.h"
|
||||
#include "ui/OctreeStatsDialog.h"
|
||||
#include "ui/SnapshotShareDialog.h"
|
||||
#include "ui/LodToolsDialog.h"
|
||||
|
@ -71,7 +70,6 @@
|
|||
#include "ui/ToolWindow.h"
|
||||
#include "ui/UserInputMapper.h"
|
||||
#include "devices/KeyboardMouseDevice.h"
|
||||
#include "octree/OctreeFade.h"
|
||||
#include "octree/OctreePacketProcessor.h"
|
||||
#include "UndoStackScriptingInterface.h"
|
||||
|
||||
|
@ -90,13 +88,6 @@ class Node;
|
|||
class ProgramObject;
|
||||
class ScriptEngine;
|
||||
|
||||
static const float NODE_ADDED_RED = 0.0f;
|
||||
static const float NODE_ADDED_GREEN = 1.0f;
|
||||
static const float NODE_ADDED_BLUE = 0.0f;
|
||||
static const float NODE_KILLED_RED = 1.0f;
|
||||
static const float NODE_KILLED_GREEN = 0.0f;
|
||||
static const float NODE_KILLED_BLUE = 0.0f;
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
|
@ -313,8 +304,6 @@ public:
|
|||
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
|
||||
virtual qreal getDevicePixelRatio();
|
||||
|
||||
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
|
||||
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
||||
glm::vec2 getViewportDimensions() const;
|
||||
|
@ -452,6 +441,8 @@ public slots:
|
|||
void handleDomainConnectionDeniedPacket(QSharedPointer<NLPacket> packet);
|
||||
|
||||
void cameraMenuChanged();
|
||||
|
||||
void reloadResourceCaches();
|
||||
|
||||
private slots:
|
||||
void clearDomainOctreeDetails();
|
||||
|
@ -497,6 +488,8 @@ private:
|
|||
void init();
|
||||
|
||||
void cleanupBeforeQuit();
|
||||
|
||||
void emptyLocalCache();
|
||||
|
||||
void update(float deltaTime);
|
||||
|
||||
|
@ -625,10 +618,6 @@ private:
|
|||
NodeToOctreeSceneStats _octreeServerSceneStats;
|
||||
QReadWriteLock _octreeSceneStatsLock;
|
||||
|
||||
NodeBounds _nodeBoundsDisplay;
|
||||
|
||||
std::vector<OctreeFade> _octreeFades;
|
||||
QReadWriteLock _octreeFadesLock;
|
||||
ControllerScriptingInterface _controllerScriptingInterface;
|
||||
QPointer<LogDialog> _logDialog;
|
||||
QPointer<SnapshotShareDialog> _snapshotShareDialog;
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
#include <OctreeConstants.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 30.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 60.0;
|
||||
const float DEFAULT_DESKTOP_LOD_DOWN_FPS = 15.0;
|
||||
const float DEFAULT_HMD_LOD_DOWN_FPS = 30.0;
|
||||
const float MAX_LIKELY_DESKTOP_FPS = 59.0; // this is essentially, V-synch - 1 fps
|
||||
const float MAX_LIKELY_HMD_FPS = 74.0; // this is essentially, V-synch - 1 fps
|
||||
const float INCREASE_LOD_GAP = 15.0f;
|
||||
|
|
|
@ -9,6 +9,9 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <QFileDialog>
|
||||
#include <QMenuBar>
|
||||
#include <QShortcut>
|
||||
|
@ -16,7 +19,6 @@
|
|||
#include <AddressManager.h>
|
||||
#include <AudioClient.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <PathUtils.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
@ -24,7 +26,6 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "AccountManager.h"
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "avatar/AvatarManager.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
|
@ -37,7 +38,6 @@
|
|||
#include "SpeechRecognizer.h"
|
||||
#endif
|
||||
#include "ui/DialogsManager.h"
|
||||
#include "ui/NodeBounds.h"
|
||||
#include "ui/StandAloneJSConsole.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
|
@ -248,7 +248,6 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ScriptedMotorControl, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::GlowWhenSpeaking, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
|
||||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
@ -256,6 +255,8 @@ Menu::Menu() {
|
|||
avatar, SLOT(updateMotionBehavior()));
|
||||
|
||||
MenuWrapper* viewMenu = addMenu("View");
|
||||
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::Fullscreen,
|
||||
|
@ -314,13 +315,6 @@ Menu::Menu() {
|
|||
qApp,
|
||||
SLOT(setEnable3DTVMode(bool)));
|
||||
|
||||
|
||||
MenuWrapper* nodeBordersMenu = viewMenu->addMenu("Server Borders");
|
||||
NodeBounds& nodeBounds = qApp->getNodeBoundsDisplay();
|
||||
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersEntityNodes,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_1, false,
|
||||
&nodeBounds, SLOT(setShowEntityNodes(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);
|
||||
|
@ -340,7 +334,6 @@ Menu::Menu() {
|
|||
0, // QML Qt::SHIFT | Qt::Key_A,
|
||||
true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DontFadeOnOctreeServerChanges);
|
||||
|
||||
MenuWrapper* ambientLightMenu = renderOptionsMenu->addMenu(MenuOption::RenderAmbientLight);
|
||||
QActionGroup* ambientLightGroup = new QActionGroup(ambientLightMenu);
|
||||
|
@ -393,8 +386,6 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars,
|
||||
0, // QML Qt::Key_Asterisk,
|
||||
true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true,
|
||||
DependencyManager::get<GlowEffect>().data(), SLOT(toggleGlowEffect(bool)));
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Wireframe, Qt::ALT | Qt::Key_W, false);
|
||||
addActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::LodTools,
|
||||
|
@ -597,18 +588,13 @@ Menu::Menu() {
|
|||
audioScopeFramesGroup->addAction(fiftyFrames);
|
||||
}
|
||||
|
||||
auto statsRenderer = DependencyManager::get<AudioIOStatsRenderer>();
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStats,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_A,
|
||||
false,
|
||||
statsRenderer.data(),
|
||||
SLOT(toggle()));
|
||||
false); //, statsRenderer.data(), SLOT(toggle())); // TODO: convert to dialogbox
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioStatsShowInjectedStreams,
|
||||
0,
|
||||
false,
|
||||
statsRenderer.data(),
|
||||
SLOT(toggleShowInjectedStreams()));
|
||||
false); //, statsRenderer.data(), SLOT(toggleShowInjectedStreams)); // TODO: convert to dialogbox
|
||||
|
||||
|
||||
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
|
||||
|
|
|
@ -179,14 +179,12 @@ namespace MenuOption {
|
|||
const QString DisplayModelElementProxy = "Display Model Element Bounds";
|
||||
const QString DisplayDebugTimingDetails = "Display Timing Details";
|
||||
const QString DontDoPrecisionPicking = "Don't Do Precision Picking";
|
||||
const QString DontFadeOnOctreeServerChanges = "Don't Fade In/Out on Octree Server Changes";
|
||||
const QString DontRenderEntitiesAsScene = "Don't Render Entities as Scene";
|
||||
const QString EchoLocalAudio = "Echo Local Audio";
|
||||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EditEntitiesHelp = "Edit Entities Help...";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableCharacterController = "Enable avatar collisions";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMyAvatarSimulateTiming = "Expand /myAvatar/simulation";
|
||||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
|
@ -200,7 +198,6 @@ namespace MenuOption {
|
|||
const QString FrameTimer = "Show Timer";
|
||||
const QString Fullscreen = "Fullscreen";
|
||||
const QString FullscreenMirror = "Fullscreen Mirror";
|
||||
const QString GlowWhenSpeaking = "Glow When Speaking";
|
||||
const QString HMDTools = "HMD Tools";
|
||||
const QString IncreaseAvatarSize = "Increase Avatar Size";
|
||||
const QString IndependentMode = "Independent Mode";
|
||||
|
@ -230,6 +227,7 @@ namespace MenuOption {
|
|||
const QString Preferences = "Preferences...";
|
||||
const QString Quit = "Quit";
|
||||
const QString ReloadAllScripts = "Reload All Scripts";
|
||||
const QString ReloadContent = "Reload Content (Clears all caches)";
|
||||
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
|
||||
const QString RenderFocusIndicator = "Show Eye Focus";
|
||||
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
|
||||
|
|
|
@ -100,23 +100,6 @@ int widthText(float scale, int mono, char const* string) {
|
|||
return textRenderer(mono)->computeExtent(string).x; // computeWidth(string) * (scale / 0.10);
|
||||
}
|
||||
|
||||
void drawText(int x, int y, float scale, float radians, int mono,
|
||||
char const* string, const float* color) {
|
||||
//
|
||||
// Draws text on screen as stroked so it can be resized
|
||||
//
|
||||
glPushMatrix();
|
||||
glTranslatef(static_cast<float>(x), static_cast<float>(y), 0.0f);
|
||||
|
||||
|
||||
glRotated(double(radians * DEGREES_PER_RADIAN), 0.0, 0.0, 1.0);
|
||||
glScalef(scale / 0.1f, scale / 0.1f, 1.0f);
|
||||
|
||||
glm::vec4 colorV4 = {color[0], color[1], color[2], 1.0f };
|
||||
textRenderer(mono)->draw(0, 0, string, colorV4);
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
void renderCollisionOverlay(int width, int height, float magnitude, float red, float blue, float green) {
|
||||
const float MIN_VISIBLE_COLLISION = 0.01f;
|
||||
if (magnitude > MIN_VISIBLE_COLLISION) {
|
||||
|
|
|
@ -24,9 +24,6 @@ const glm::vec3 randVector();
|
|||
void renderWorldBox(gpu::Batch& batch);
|
||||
int widthText(float scale, int mono, char const* string);
|
||||
|
||||
void drawText(int x, int y, float scale, float radians, int mono,
|
||||
char const* string, const float* color);
|
||||
|
||||
void renderCollisionOverlay(int width, int height, float magnitude, float red = 0, float blue = 0, float green = 0);
|
||||
|
||||
void runTimingTests();
|
||||
|
|
|
@ -1,242 +0,0 @@
|
|||
//
|
||||
// AudioIOStatsRenderer.cpp
|
||||
// interface/src/audio
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-12-16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <AudioClient.h>
|
||||
#include <AudioConstants.h>
|
||||
#include <AudioIOStats.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <NodeList.h>
|
||||
#include <Util.h>
|
||||
|
||||
#include "AudioIOStatsRenderer.h"
|
||||
|
||||
AudioIOStatsRenderer::AudioIOStatsRenderer() :
|
||||
_stats(NULL),
|
||||
_isEnabled(false),
|
||||
_shouldShowInjectedStreams(false)
|
||||
{
|
||||
// grab the stats object from the audio I/O singleton
|
||||
_stats = &DependencyManager::get<AudioClient>()->getStats();
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
const unsigned int STATS_WIDTH = 1500;
|
||||
#else
|
||||
const unsigned int STATS_WIDTH = 650;
|
||||
#endif
|
||||
const unsigned int STATS_HEIGHT_PER_LINE = 20;
|
||||
|
||||
void AudioIOStatsRenderer::render(const float* color, int width, int height) {
|
||||
if (!_isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
const int linesWhenCentered = _shouldShowInjectedStreams ? 34 : 27;
|
||||
const int CENTERED_BACKGROUND_HEIGHT = STATS_HEIGHT_PER_LINE * linesWhenCentered;
|
||||
|
||||
int lines = _shouldShowInjectedStreams ? _stats->getMixerInjectedStreamStatsMap().size() * 7 + 27 : 27;
|
||||
int statsHeight = STATS_HEIGHT_PER_LINE * lines;
|
||||
|
||||
|
||||
static const glm::vec4 backgroundColor = { 0.2f, 0.2f, 0.2f, 0.6f };
|
||||
int x = std::max((width - (int)STATS_WIDTH) / 2, 0);
|
||||
int y = std::max((height - CENTERED_BACKGROUND_HEIGHT) / 2, 0);
|
||||
int w = STATS_WIDTH;
|
||||
int h = statsHeight;
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(x, y, w, h, backgroundColor);
|
||||
|
||||
int horizontalOffset = x + 5;
|
||||
int verticalOffset = y;
|
||||
|
||||
float scale = 0.10f;
|
||||
float rotation = 0.0f;
|
||||
int font = 2;
|
||||
|
||||
char latencyStatString[512];
|
||||
|
||||
float audioInputBufferLatency = 0.0f, inputRingBufferLatency = 0.0f, networkRoundtripLatency = 0.0f, mixerRingBufferLatency = 0.0f, outputRingBufferLatency = 0.0f, audioOutputBufferLatency = 0.0f;
|
||||
|
||||
AudioStreamStats downstreamAudioStreamStats = _stats->getMixerDownstreamStats();
|
||||
SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer);
|
||||
if (!audioMixerNodePointer.isNull()) {
|
||||
audioInputBufferLatency = _stats->getAudioInputMsecsReadStats().getWindowAverage();
|
||||
inputRingBufferLatency = (float) _stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
|
||||
networkRoundtripLatency = audioMixerNodePointer->getPingMs();
|
||||
mixerRingBufferLatency = _stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
outputRingBufferLatency = downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
|
||||
audioOutputBufferLatency = _stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
|
||||
}
|
||||
float totalLatency = audioInputBufferLatency + inputRingBufferLatency + networkRoundtripLatency + mixerRingBufferLatency + outputRingBufferLatency + audioOutputBufferLatency;
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Audio input buffer: %7.2fms - avg msecs of samples read to the input ring buffer in last 10s",
|
||||
(double)audioInputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Input ring buffer: %7.2fms - avg msecs of samples in input ring buffer in last 10s",
|
||||
(double)inputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Network to mixer: %7.2fms - half of last ping value calculated by the node list",
|
||||
(double)(networkRoundtripLatency / 2.0f));
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" AudioMixer ring buffer: %7.2fms - avg msecs of samples in audio mixer's ring buffer in last 10s",
|
||||
(double)mixerRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Network to client: %7.2fms - half of last ping value calculated by the node list",
|
||||
(double)(networkRoundtripLatency / 2.0f));
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Output ring buffer: %7.2fms - avg msecs of samples in output ring buffer in last 10s",
|
||||
(double)outputRingBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString,
|
||||
" Audio output buffer: %7.2fms - avg msecs of samples in audio output buffer in last 10s",
|
||||
(double)audioOutputBufferLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
sprintf(latencyStatString, " TOTAL: %7.2fms\n", (double)totalLatency);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, latencyStatString, color);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char clientUpstreamMicLabelString[] = "Upstream Mic Audio Packets Sent Gaps (by client):";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, clientUpstreamMicLabelString, color);
|
||||
|
||||
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketSentTimeGaps();
|
||||
|
||||
char stringBuffer[512];
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(packetSentTimeGaps.getMin()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getMax()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getAverage()).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(packetSentTimeGaps.getWindowMin()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowMax()).toLatin1().data(),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowAverage()).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamMicLabelString[] = "Upstream mic audio stats (received and reported by audio-mixer):";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamMicLabelString, color);
|
||||
|
||||
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), horizontalOffset, verticalOffset,
|
||||
scale, rotation, font, color);
|
||||
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char downstreamLabelString[] = "Downstream mixed audio stats:";
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, downstreamLabelString, color);
|
||||
|
||||
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
|
||||
renderAudioStreamStats(&downstreamStats, horizontalOffset, verticalOffset,
|
||||
scale, rotation, font, color, true);
|
||||
|
||||
|
||||
if (_shouldShowInjectedStreams) {
|
||||
|
||||
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE; // blank line
|
||||
|
||||
char upstreamInjectedLabelString[512];
|
||||
sprintf(upstreamInjectedLabelString, "Upstream injected audio stats: stream ID: %s",
|
||||
injectedStreamAudioStats._streamIdentifier.toString().toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, upstreamInjectedLabelString, color);
|
||||
|
||||
renderAudioStreamStats(&injectedStreamAudioStats, horizontalOffset, verticalOffset, scale, rotation, font, color);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioIOStatsRenderer::renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats) {
|
||||
|
||||
char stringBuffer[512];
|
||||
|
||||
sprintf(stringBuffer, " Packet loss | overall: %5.2f%% (%d lost), last_30s: %5.2f%% (%d lost)",
|
||||
(double)(streamStats->_packetStreamStats.getLostRate() * 100.0f),
|
||||
streamStats->_packetStreamStats._lost,
|
||||
(double)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f),
|
||||
streamStats->_packetStreamWindowStats._lost);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
if (isDownstreamStats) {
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u+%d, available: %u+%d",
|
||||
streamStats->_desiredJitterBufferFrames,
|
||||
streamStats->_framesAvailableAverage,
|
||||
(int)((float)_stats->getAudioInputMsecsReadStats().getWindowAverage() / AudioConstants::NETWORK_FRAME_MSECS),
|
||||
streamStats->_framesAvailable,
|
||||
(int)(_stats->getAudioOutputMsecsUnplayedStats().getCurrentIntervalLastSample()
|
||||
/ AudioConstants::NETWORK_FRAME_MSECS));
|
||||
} else {
|
||||
sprintf(stringBuffer, " Ringbuffer frames | desired: %u, avg_available(10s): %u, available: %u",
|
||||
streamStats->_desiredJitterBufferFrames,
|
||||
streamStats->_framesAvailableAverage,
|
||||
streamStats->_framesAvailable);
|
||||
}
|
||||
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Ringbuffer stats | starves: %u, prev_starve_lasted: %u, frames_dropped: %u, overflows: %u",
|
||||
streamStats->_starveCount,
|
||||
streamStats->_consecutiveNotMixedCount,
|
||||
streamStats->_framesDropped,
|
||||
streamStats->_overflowCount);
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (overall) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats->_timeGapMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
|
||||
sprintf(stringBuffer, " Inter-packet timegaps (last 30s) | min: %9s, max: %9s, avg: %9s",
|
||||
formatUsecTime(streamStats->_timeGapWindowMin).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapWindowMax).toLatin1().data(),
|
||||
formatUsecTime(streamStats->_timeGapWindowAverage).toLatin1().data());
|
||||
verticalOffset += STATS_HEIGHT_PER_LINE;
|
||||
drawText(horizontalOffset, verticalOffset, scale, rotation, font, stringBuffer, color);
|
||||
}
|
|
@ -1,45 +0,0 @@
|
|||
//
|
||||
// AudioIOStatsRenderer.h
|
||||
// interface/src/audio
|
||||
//
|
||||
// Created by Stephen Birarda on 2014-12-16.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioIOStatsRenderer_h
|
||||
#define hifi_AudioIOStatsRenderer_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
class AudioIOStats;
|
||||
class AudioStreamStats;
|
||||
|
||||
class AudioIOStatsRenderer : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
void render(const float* color, int width, int height);
|
||||
|
||||
public slots:
|
||||
void toggle() { _isEnabled = !_isEnabled; }
|
||||
void toggleShowInjectedStreams() { _shouldShowInjectedStreams = !_shouldShowInjectedStreams; }
|
||||
protected:
|
||||
AudioIOStatsRenderer();
|
||||
private:
|
||||
// audio stats methods for rendering
|
||||
void renderAudioStreamStats(const AudioStreamStats* streamStats, int horizontalOffset, int& verticalOffset,
|
||||
float scale, float rotation, int font, const float* color, bool isDownstreamStats = false);
|
||||
|
||||
const AudioIOStats* _stats;
|
||||
|
||||
bool _isEnabled;
|
||||
bool _shouldShowInjectedStreams;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_AudioIOStatsRenderer_h
|
|
@ -24,7 +24,6 @@
|
|||
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <LODManager.h>
|
||||
#include <NodeList.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
@ -410,9 +409,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, boo
|
|||
float GLOW_FROM_AVERAGE_LOUDNESS = ((this == DependencyManager::get<AvatarManager>()->getMyAvatar())
|
||||
? 0.0f
|
||||
: MAX_GLOW * getHeadData()->getAudioLoudness() / GLOW_MAX_LOUDNESS);
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::GlowWhenSpeaking)) {
|
||||
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
|
||||
}
|
||||
GLOW_FROM_AVERAGE_LOUDNESS = 0.0f;
|
||||
|
||||
float glowLevel = _moving && distanceToTarget > GLOW_DISTANCE && renderArgs->_renderMode == RenderArgs::NORMAL_RENDER_MODE
|
||||
? 1.0f
|
||||
|
@ -579,8 +576,6 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool
|
|||
fixupModelsInScene();
|
||||
|
||||
{
|
||||
Glower glower(renderArgs, glowLevel);
|
||||
|
||||
if (_shouldRenderBillboard || !(_skeletonModel.isRenderable() && getHead()->getFaceModel().isRenderable())) {
|
||||
if (postLighting || renderArgs->_renderMode == RenderArgs::SHADOW_RENDER_MODE) {
|
||||
// render the billboard until both models are loaded
|
||||
|
@ -636,7 +631,7 @@ void Avatar::renderBillboard(RenderArgs* renderArgs) {
|
|||
_billboardTexture = DependencyManager::get<TextureCache>()->getTexture(
|
||||
uniqueUrl, DEFAULT_TEXTURE, false, _billboard);
|
||||
}
|
||||
if (!_billboardTexture->isLoaded()) {
|
||||
if (!_billboardTexture || !_billboardTexture->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
// rotate about vertical to face the camera
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
#endif
|
||||
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <PerfStat.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
#include <UUID.h>
|
||||
|
|
|
@ -56,18 +56,14 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
|
|||
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
|
||||
joint.preTransform * glm::mat4_cast(joint.preRotation)));
|
||||
glm::vec3 pitchYawRoll = safeEulerAngles(_owningHead->getFinalOrientationInLocalFrame());
|
||||
if (owningAvatar->isMyAvatar()) {
|
||||
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
|
||||
_owningHead->getTorsoTwist(),
|
||||
_owningHead->getFinalLeanSideways()));
|
||||
pitchYawRoll -= lean;
|
||||
}
|
||||
|
||||
glm::vec3 lean = glm::radians(glm::vec3(_owningHead->getFinalLeanForward(),
|
||||
_owningHead->getTorsoTwist(),
|
||||
_owningHead->getFinalLeanSideways()));
|
||||
pitchYawRoll -= lean;
|
||||
state.setRotationInConstrainedFrame(glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2]))
|
||||
* glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1]))
|
||||
* glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0]))
|
||||
* joint.rotation, DEFAULT_PRIORITY);
|
||||
|
||||
}
|
||||
|
||||
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, JointState& state) {
|
||||
|
|
|
@ -323,28 +323,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
}
|
||||
|
||||
|
||||
void MyAvatar::renderDebugBodyPoints() {
|
||||
glm::vec3 torsoPosition(getPosition());
|
||||
glm::vec3 headPosition(getHead()->getEyePosition());
|
||||
float torsoToHead = glm::length(headPosition - torsoPosition);
|
||||
glm::vec3 position;
|
||||
qCDebug(interfaceapp, "head-above-torso %.2f, scale = %0.2f", (double)torsoToHead, (double)getScale());
|
||||
|
||||
// Torso Sphere
|
||||
position = torsoPosition;
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
DependencyManager::get<GeometryCache>()->renderSphere(0.2f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
|
||||
glPopMatrix();
|
||||
|
||||
// Head Sphere
|
||||
position = headPosition;
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
DependencyManager::get<GeometryCache>()->renderSphere(0.15f, 10.0f, 10.0f, glm::vec4(0, 1, 0, .5f));
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
// virtual
|
||||
void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting) {
|
||||
// don't render if we've been asked to disable local rendering
|
||||
|
@ -355,8 +333,9 @@ void MyAvatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, b
|
|||
Avatar::render(renderArgs, cameraPosition, postLighting);
|
||||
|
||||
// don't display IK constraints in shadow mode
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) && postLighting) {
|
||||
_skeletonModel.renderIKConstraints();
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::ShowIKConstraints) &&
|
||||
renderArgs && renderArgs->_batch) {
|
||||
_skeletonModel.renderIKConstraints(*renderArgs->_batch);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,7 +48,6 @@ public:
|
|||
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPosition, bool postLighting = false) override;
|
||||
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, bool postLighting, float glowLevel = 0.0f) override;
|
||||
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
|
||||
void renderDebugBodyPoints();
|
||||
|
||||
// setters
|
||||
void setLeanScale(float scale) { _leanScale = scale; }
|
||||
|
|
|
@ -186,9 +186,9 @@ void SkeletonModel::getHandShapes(int jointIndex, QVector<const Shape*>& shapes)
|
|||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::renderIKConstraints() {
|
||||
renderJointConstraints(getRightHandJointIndex());
|
||||
renderJointConstraints(getLeftHandJointIndex());
|
||||
void SkeletonModel::renderIKConstraints(gpu::Batch& batch) {
|
||||
renderJointConstraints(batch, getRightHandJointIndex());
|
||||
renderJointConstraints(batch, getLeftHandJointIndex());
|
||||
}
|
||||
|
||||
class IndexValue {
|
||||
|
@ -312,26 +312,27 @@ void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const
|
|||
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(this, parentState, joint, state);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderJointConstraints(int jointIndex) {
|
||||
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
|
||||
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
|
||||
return;
|
||||
}
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
const float BASE_DIRECTION_SIZE = 0.3f;
|
||||
float directionSize = BASE_DIRECTION_SIZE * extractUniformScale(_scale);
|
||||
glLineWidth(3.0f);
|
||||
batch._glLineWidth(3.0f);
|
||||
do {
|
||||
const FBXJoint& joint = geometry.joints.at(jointIndex);
|
||||
const JointState& jointState = _jointStates.at(jointIndex);
|
||||
glm::vec3 position = _rotation * jointState.getPosition() + _translation;
|
||||
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::quat parentRotation = (joint.parentIndex == -1) ? _rotation : _rotation * _jointStates.at(joint.parentIndex).getRotation();
|
||||
glm::vec3 rotationAxis = glm::axis(parentRotation);
|
||||
glRotatef(glm::degrees(glm::angle(parentRotation)), rotationAxis.x, rotationAxis.y, rotationAxis.z);
|
||||
float fanScale = directionSize * 0.75f;
|
||||
glScalef(fanScale, fanScale, fanScale);
|
||||
|
||||
Transform transform = Transform();
|
||||
transform.setTranslation(position);
|
||||
transform.setRotation(parentRotation);
|
||||
transform.setScale(fanScale);
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
const int AXIS_COUNT = 3;
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
@ -362,17 +363,14 @@ void SkeletonModel::renderJointConstraints(int jointIndex) {
|
|||
// TODO: this is really inefficient constantly recreating these vertices buffers. It would be
|
||||
// better if the skeleton model cached these buffers for each of the joints they are rendering
|
||||
geometryCache->updateVertices(_triangleFanID, points, color);
|
||||
geometryCache->renderVertices(gpu::TRIANGLE_FAN, _triangleFanID);
|
||||
geometryCache->renderVertices(batch, gpu::TRIANGLE_FAN, _triangleFanID);
|
||||
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
renderOrientationDirections(jointIndex, position, _rotation * jointState.getRotation(), directionSize);
|
||||
jointIndex = joint.parentIndex;
|
||||
|
||||
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
|
||||
|
||||
glLineWidth(1.0f);
|
||||
}
|
||||
|
||||
void SkeletonModel::renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size) {
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
/// \param shapes[out] list in which is stored pointers to hand shapes
|
||||
void getHandShapes(int jointIndex, QVector<const Shape*>& shapes) const;
|
||||
|
||||
void renderIKConstraints();
|
||||
void renderIKConstraints(gpu::Batch& batch);
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
|
||||
|
@ -144,7 +144,7 @@ protected:
|
|||
|
||||
private:
|
||||
|
||||
void renderJointConstraints(int jointIndex);
|
||||
void renderJointConstraints(gpu::Batch& batch, int jointIndex);
|
||||
void renderOrientationDirections(int jointIndex, glm::vec3 position, const glm::quat& orientation, float size);
|
||||
|
||||
struct OrientationLineIDs {
|
||||
|
|
|
@ -24,7 +24,6 @@
|
|||
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <avatar/MyAvatar.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <GlWindow.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
#include <OglplusHelpers.h>
|
||||
|
@ -644,15 +643,9 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
return;
|
||||
}
|
||||
|
||||
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
} else {
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFBO));
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
|
@ -714,6 +707,7 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
|
||||
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
|
||||
renderArgs->_viewport = glm::ivec4(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
qApp->displaySide(renderArgs, *_camera);
|
||||
qApp->getApplicationCompositor().displayOverlayTextureHmd(renderArgs, eye);
|
||||
|
@ -723,15 +717,8 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
glPopMatrix();
|
||||
|
||||
gpu::FramebufferPointer finalFbo;
|
||||
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
} else {
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
}
|
||||
finalFbo = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
@ -824,7 +811,6 @@ glm::quat OculusManager::getOrientation() {
|
|||
return toGlm(trackingState.HeadPose.ThePose.Orientation);
|
||||
}
|
||||
|
||||
//Used to set the size of the glow framebuffers
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include "gpu/GLBackend.h"
|
||||
#include "Application.h"
|
||||
|
||||
|
@ -82,6 +81,9 @@ void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int scre
|
|||
}
|
||||
|
||||
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
||||
|
||||
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
|
||||
|
||||
double nearZ = DEFAULT_NEAR_CLIP; // near clipping plane
|
||||
double farZ = DEFAULT_FAR_CLIP; // far clipping plane
|
||||
|
||||
|
@ -94,6 +96,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
int portalH = deviceSize.height();
|
||||
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
DependencyManager::get<GlowEffect>()->prepare(renderArgs);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
|
@ -107,6 +110,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
_activeEye = &eye;
|
||||
glViewport(portalX, portalY, portalW, portalH);
|
||||
glScissor(portalX, portalY, portalW, portalH);
|
||||
renderArgs->_viewport = glm::ivec4(portalX, portalY, portalW, portalH);
|
||||
|
||||
glm::mat4 projection = glm::frustum<float>(eye.left, eye.right, eye.bottom, eye.top, nearZ, farZ);
|
||||
projection = glm::translate(projection, vec3(eye.modelTranslation, 0, 0));
|
||||
|
@ -118,6 +122,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
renderArgs->_renderSide = RenderArgs::MONO;
|
||||
|
||||
qApp->displaySide(renderArgs, eyeCamera, false);
|
||||
qApp->getApplicationCompositor().displayOverlayTexture(renderArgs);
|
||||
_activeEye = NULL;
|
||||
|
@ -128,6 +133,7 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
glPopMatrix();
|
||||
glDisable(GL_SCISSOR_TEST);
|
||||
|
||||
// FIXME - glow effect is removed, 3D TV mode broken until we get display plugins working
|
||||
auto finalFbo = DependencyManager::get<GlowEffect>()->render(renderArgs);
|
||||
auto fboSize = finalFbo->getSize();
|
||||
// Get the ACTUAL device size for the BLIT
|
||||
|
@ -142,6 +148,8 @@ void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {
|
|||
|
||||
// reset the viewport to how we started
|
||||
glViewport(0, 0, deviceSize.width(), deviceSize.height());
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void TV3DManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
//
|
||||
// OctreeFade.cpp
|
||||
// interface/src/octree
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/6/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <OctreeConstants.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "OctreeFade.h"
|
||||
|
||||
const float OctreeFade::FADE_OUT_START = 0.5f;
|
||||
const float OctreeFade::FADE_OUT_END = 0.05f;
|
||||
const float OctreeFade::FADE_OUT_STEP = 0.9f;
|
||||
const float OctreeFade::FADE_IN_START = 0.05f;
|
||||
const float OctreeFade::FADE_IN_END = 0.5f;
|
||||
const float OctreeFade::FADE_IN_STEP = 1.1f;
|
||||
const float OctreeFade::DEFAULT_RED = 0.5f;
|
||||
const float OctreeFade::DEFAULT_GREEN = 0.5f;
|
||||
const float OctreeFade::DEFAULT_BLUE = 0.5f;
|
||||
|
||||
OctreeFade::OctreeFade(FadeDirection direction, float red, float green, float blue) :
|
||||
direction(direction),
|
||||
red(red),
|
||||
green(green),
|
||||
blue(blue)
|
||||
{
|
||||
opacity = (direction == FADE_OUT) ? FADE_OUT_START : FADE_IN_START;
|
||||
}
|
||||
|
||||
void OctreeFade::render(RenderArgs* renderArgs) {
|
||||
DependencyManager::get<GlowEffect>()->begin(renderArgs);
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glPushMatrix();
|
||||
glScalef(1.0f, 1.0f, 1.0f);
|
||||
glTranslatef(voxelDetails.x + voxelDetails.s * 0.5f,
|
||||
voxelDetails.y + voxelDetails.s * 0.5f,
|
||||
voxelDetails.z + voxelDetails.s * 0.5f);
|
||||
glLineWidth(1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(voxelDetails.s, glm::vec4(red, green, blue, opacity));
|
||||
glLineWidth(1.0f);
|
||||
glPopMatrix();
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
|
||||
DependencyManager::get<GlowEffect>()->end(renderArgs);
|
||||
|
||||
opacity *= (direction == FADE_OUT) ? FADE_OUT_STEP : FADE_IN_STEP;
|
||||
}
|
||||
|
||||
bool OctreeFade::isDone() const {
|
||||
if (direction == FADE_OUT) {
|
||||
return opacity <= FADE_OUT_END;
|
||||
} else {
|
||||
return opacity >= FADE_IN_END;
|
||||
}
|
||||
return true; // unexpected case, assume we're done
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
//
|
||||
// OctreeFade.h
|
||||
// interface/src/octree
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/6/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_OctreeFade_h
|
||||
#define hifi_OctreeFade_h
|
||||
|
||||
#include <OctalCode.h> // for VoxelPositionSize
|
||||
|
||||
class OctreeFade {
|
||||
public:
|
||||
|
||||
enum FadeDirection { FADE_OUT, FADE_IN};
|
||||
static const float FADE_OUT_START;
|
||||
static const float FADE_OUT_END;
|
||||
static const float FADE_OUT_STEP;
|
||||
static const float FADE_IN_START;
|
||||
static const float FADE_IN_END;
|
||||
static const float FADE_IN_STEP;
|
||||
static const float DEFAULT_RED;
|
||||
static const float DEFAULT_GREEN;
|
||||
static const float DEFAULT_BLUE;
|
||||
|
||||
VoxelPositionSize voxelDetails;
|
||||
FadeDirection direction;
|
||||
float opacity;
|
||||
|
||||
float red;
|
||||
float green;
|
||||
float blue;
|
||||
|
||||
OctreeFade(FadeDirection direction = FADE_OUT, float red = DEFAULT_RED,
|
||||
float green = DEFAULT_GREEN, float blue = DEFAULT_BLUE);
|
||||
|
||||
void render(RenderArgs* renderArgs);
|
||||
bool isDone() const;
|
||||
};
|
||||
|
||||
#endif // hifi_OctreeFade_h
|
|
@ -206,6 +206,7 @@ void ApplicationCompositor::displayOverlayTexture(RenderArgs* renderArgs) {
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->useSimpleDrawPipeline(batch);
|
||||
batch.setViewportTransform(glm::ivec4(0, 0, deviceSize.width(), deviceSize.height()));
|
||||
batch.setModelTransform(Transform());
|
||||
batch.setViewTransform(Transform());
|
||||
batch.setProjectionTransform(mat4());
|
||||
|
@ -535,75 +536,6 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
|
|||
}
|
||||
}
|
||||
|
||||
//Renders a small magnification of the currently bound texture at the coordinates
|
||||
void ApplicationCompositor::renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder) {
|
||||
if (!_magnifier) {
|
||||
return;
|
||||
}
|
||||
auto canvasSize = qApp->getCanvasSize();
|
||||
|
||||
const int widgetWidth = canvasSize.x;
|
||||
const int widgetHeight = canvasSize.y;
|
||||
|
||||
const float halfWidth = (MAGNIFY_WIDTH / _textureAspectRatio) * sizeMult / 2.0f;
|
||||
const float halfHeight = MAGNIFY_HEIGHT * sizeMult / 2.0f;
|
||||
// Magnification Texture Coordinates
|
||||
const float magnifyULeft = (magPos.x - halfWidth) / (float)widgetWidth;
|
||||
const float magnifyURight = (magPos.x + halfWidth) / (float)widgetWidth;
|
||||
const float magnifyVTop = 1.0f - (magPos.y - halfHeight) / (float)widgetHeight;
|
||||
const float magnifyVBottom = 1.0f - (magPos.y + halfHeight) / (float)widgetHeight;
|
||||
|
||||
const float newHalfWidth = halfWidth * MAGNIFY_MULT;
|
||||
const float newHalfHeight = halfHeight * MAGNIFY_MULT;
|
||||
//Get yaw / pitch value for the corners
|
||||
const glm::vec2 topLeftYawPitch = overlayToSpherical(glm::vec2(magPos.x - newHalfWidth,
|
||||
magPos.y - newHalfHeight));
|
||||
const glm::vec2 bottomRightYawPitch = overlayToSpherical(glm::vec2(magPos.x + newHalfWidth,
|
||||
magPos.y + newHalfHeight));
|
||||
|
||||
const glm::vec3 bottomLeft = getPoint(topLeftYawPitch.x, bottomRightYawPitch.y);
|
||||
const glm::vec3 bottomRight = getPoint(bottomRightYawPitch.x, bottomRightYawPitch.y);
|
||||
const glm::vec3 topLeft = getPoint(topLeftYawPitch.x, topLeftYawPitch.y);
|
||||
const glm::vec3 topRight = getPoint(bottomRightYawPitch.x, topLeftYawPitch.y);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
if (bottomLeft != _previousMagnifierBottomLeft || bottomRight != _previousMagnifierBottomRight
|
||||
|| topLeft != _previousMagnifierTopLeft || topRight != _previousMagnifierTopRight) {
|
||||
QVector<glm::vec3> border;
|
||||
border << topLeft;
|
||||
border << bottomLeft;
|
||||
border << bottomRight;
|
||||
border << topRight;
|
||||
border << topLeft;
|
||||
geometryCache->updateVertices(_magnifierBorder, border, glm::vec4(1.0f, 0.0f, 0.0f, _alpha));
|
||||
|
||||
_previousMagnifierBottomLeft = bottomLeft;
|
||||
_previousMagnifierBottomRight = bottomRight;
|
||||
_previousMagnifierTopLeft = topLeft;
|
||||
_previousMagnifierTopRight = topRight;
|
||||
}
|
||||
|
||||
glPushMatrix(); {
|
||||
if (showBorder) {
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
glLineWidth(1.0f);
|
||||
//Outer Line
|
||||
geometryCache->renderVertices(gpu::LINE_STRIP, _magnifierBorder);
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
}
|
||||
glm::vec4 magnifierColor = { 1.0f, 1.0f, 1.0f, _alpha };
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(bottomLeft, bottomRight, topRight, topLeft,
|
||||
glm::vec2(magnifyULeft, magnifyVBottom),
|
||||
glm::vec2(magnifyURight, magnifyVBottom),
|
||||
glm::vec2(magnifyURight, magnifyVTop),
|
||||
glm::vec2(magnifyULeft, magnifyVTop),
|
||||
magnifierColor, _magnifierQuad);
|
||||
|
||||
} glPopMatrix();
|
||||
}
|
||||
|
||||
void ApplicationCompositor::buildHemiVertices(
|
||||
const float fov, const float aspectRatio, const int slices, const int stacks) {
|
||||
static float textureFOV = 0.0f, textureAspectRatio = 1.0f;
|
||||
|
|
|
@ -77,7 +77,6 @@ private:
|
|||
void updateTooltips();
|
||||
|
||||
void renderPointers(gpu::Batch& batch);
|
||||
void renderMagnifier(gpu::Batch& batch, const glm::vec2& magPos, float sizeMult, bool showBorder);
|
||||
void renderControllerPointers(gpu::Batch& batch);
|
||||
void renderPointersOculus(gpu::Batch& batch);
|
||||
|
||||
|
|
|
@ -27,7 +27,6 @@
|
|||
#include <PerfStat.h>
|
||||
|
||||
#include "AudioClient.h"
|
||||
#include "audio/AudioIOStatsRenderer.h"
|
||||
#include "audio/AudioScope.h"
|
||||
#include "Application.h"
|
||||
#include "ApplicationOverlay.h"
|
||||
|
@ -165,19 +164,6 @@ void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {
|
|||
drawText(canvasSize.x - 100, canvasSize.y - timerBottom,
|
||||
0.30f, 0.0f, 0, frameTimer.toUtf8().constData(), WHITE_TEXT);
|
||||
}
|
||||
|
||||
glPointSize(1.0f);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
NodeBounds& nodeBoundsDisplay = qApp->getNodeBoundsDisplay();
|
||||
nodeBoundsDisplay.drawOverlay();
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_LIGHTING);
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
|
||||
fboViewport(_overlayFramebuffer);
|
||||
*/
|
||||
}
|
||||
|
||||
|
|
|
@ -1,183 +0,0 @@
|
|||
//
|
||||
// NodeBounds.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Ryan Huffman on 05/14/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// This class draws a border around the different Entity nodes on the current domain,
|
||||
// and a semi-transparent cube around the currently mouse-overed node.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Util.h"
|
||||
|
||||
#include "NodeBounds.h"
|
||||
|
||||
NodeBounds::NodeBounds(QObject* parent) :
|
||||
QObject(parent),
|
||||
_showEntityNodes(false),
|
||||
_overlayText() {
|
||||
|
||||
}
|
||||
|
||||
void NodeBounds::draw() {
|
||||
if (!_showEntityNodes) {
|
||||
_overlayText[0] = '\0';
|
||||
return;
|
||||
}
|
||||
|
||||
NodeToJurisdictionMap& entityServerJurisdictions = Application::getInstance()->getEntityServerJurisdictions();
|
||||
NodeToJurisdictionMap* serverJurisdictions;
|
||||
|
||||
// Compute ray to find selected nodes later on. We can't use the pre-computed ray in Application because it centers
|
||||
// itself after the cursor disappears.
|
||||
PickRay pickRay = qApp->computePickRay();
|
||||
|
||||
// Variables to keep track of the selected node and properties to draw the cube later if needed
|
||||
Node* selectedNode = NULL;
|
||||
float selectedDistance = FLT_MAX;
|
||||
bool selectedIsInside = true;
|
||||
glm::vec3 selectedCenter;
|
||||
float selectedScale = 0;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([&](const SharedNodePointer& node){
|
||||
NodeType_t nodeType = node->getType();
|
||||
|
||||
if (nodeType == NodeType::EntityServer && _showEntityNodes) {
|
||||
serverJurisdictions = &entityServerJurisdictions;
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
QUuid nodeUUID = node->getUUID();
|
||||
serverJurisdictions->lockForRead();
|
||||
if (serverJurisdictions->find(nodeUUID) != serverJurisdictions->end()) {
|
||||
const JurisdictionMap& map = (*serverJurisdictions)[nodeUUID];
|
||||
|
||||
unsigned char* rootCode = map.getRootOctalCode();
|
||||
|
||||
if (rootCode) {
|
||||
VoxelPositionSize rootDetails;
|
||||
voxelDetailsForCode(rootCode, rootDetails);
|
||||
serverJurisdictions->unlock();
|
||||
glm::vec3 location(rootDetails.x, rootDetails.y, rootDetails.z);
|
||||
|
||||
AACube serverBounds(location, rootDetails.s);
|
||||
|
||||
glm::vec3 center = serverBounds.getVertex(BOTTOM_RIGHT_NEAR)
|
||||
+ ((serverBounds.getVertex(TOP_LEFT_FAR) - serverBounds.getVertex(BOTTOM_RIGHT_NEAR)) / 2.0f);
|
||||
|
||||
const float ENTITY_NODE_SCALE = 0.99f;
|
||||
|
||||
float scaleFactor = rootDetails.s;
|
||||
|
||||
// Scale by 0.92 - 1.00 depending on the scale of the node. This allows smaller nodes to scale in
|
||||
// a bit and not overlap larger nodes.
|
||||
scaleFactor *= 0.92f + (rootDetails.s * 0.08f);
|
||||
|
||||
// Scale different node types slightly differently because it's common for them to overlap.
|
||||
if (nodeType == NodeType::EntityServer) {
|
||||
scaleFactor *= ENTITY_NODE_SCALE;
|
||||
}
|
||||
|
||||
float red, green, blue;
|
||||
getColorForNodeType(nodeType, red, green, blue);
|
||||
drawNodeBorder(center, scaleFactor, red, green, blue);
|
||||
|
||||
float distance;
|
||||
BoxFace face;
|
||||
|
||||
bool inside = serverBounds.contains(pickRay.origin);
|
||||
bool colliding = serverBounds.findRayIntersection(pickRay.origin, pickRay.direction, distance, face);
|
||||
|
||||
// If the camera is inside a node it will be "selected" if you don't have your cursor over another node
|
||||
// that you aren't inside.
|
||||
if (colliding && (!selectedNode || (!inside && (distance < selectedDistance || selectedIsInside)))) {
|
||||
selectedNode = node.data();
|
||||
selectedDistance = distance;
|
||||
selectedIsInside = inside;
|
||||
selectedCenter = center;
|
||||
selectedScale = scaleFactor;
|
||||
}
|
||||
} else {
|
||||
serverJurisdictions->unlock();
|
||||
}
|
||||
} else {
|
||||
serverJurisdictions->unlock();
|
||||
}
|
||||
});
|
||||
|
||||
if (selectedNode) {
|
||||
glPushMatrix();
|
||||
|
||||
glTranslatef(selectedCenter.x, selectedCenter.y, selectedCenter.z);
|
||||
glScalef(selectedScale, selectedScale, selectedScale);
|
||||
|
||||
float red, green, blue;
|
||||
getColorForNodeType(selectedNode->getType(), red, green, blue);
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(red, green, blue, 0.2f));
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
HifiSockAddr addr = selectedNode->getPublicSocket();
|
||||
QString overlay = QString("%1:%2 %3ms")
|
||||
.arg(addr.getAddress().toString())
|
||||
.arg(addr.getPort())
|
||||
.arg(selectedNode->getPingMs())
|
||||
.left(MAX_OVERLAY_TEXT_LENGTH);
|
||||
|
||||
// Ideally we'd just use a QString, but I ran into weird blinking issues using
|
||||
// constData() directly, as if the data was being overwritten.
|
||||
strcpy(_overlayText, overlay.toLocal8Bit().constData());
|
||||
} else {
|
||||
_overlayText[0] = '\0';
|
||||
}
|
||||
}
|
||||
|
||||
void NodeBounds::drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue) {
|
||||
glPushMatrix();
|
||||
glTranslatef(center.x, center.y, center.z);
|
||||
glScalef(scale, scale, scale);
|
||||
glLineWidth(2.5);
|
||||
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, glm::vec4(red, green, blue, 1.0f));
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
void NodeBounds::getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue) {
|
||||
red = nodeType == 0.0;
|
||||
green = 0.0;
|
||||
blue = nodeType == NodeType::EntityServer ? 1.0 : 0.0;
|
||||
}
|
||||
|
||||
void NodeBounds::drawOverlay() {
|
||||
if (strlen(_overlayText) > 0) {
|
||||
Application* application = Application::getInstance();
|
||||
|
||||
const float TEXT_COLOR[] = { 0.90f, 0.90f, 0.90f };
|
||||
const float TEXT_SCALE = 0.1f;
|
||||
const int TEXT_HEIGHT = 10;
|
||||
const float ROTATION = 0.0f;
|
||||
const int FONT = 2;
|
||||
const int PADDING = 10;
|
||||
const int MOUSE_OFFSET = 10;
|
||||
const int BACKGROUND_BEVEL = 3;
|
||||
|
||||
int mouseX = application->getTrueMouseX(),
|
||||
mouseY = application->getTrueMouseY(),
|
||||
textWidth = widthText(TEXT_SCALE, 0, _overlayText);
|
||||
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(
|
||||
mouseX + MOUSE_OFFSET, mouseY - TEXT_HEIGHT - PADDING,
|
||||
textWidth + (2 * PADDING), TEXT_HEIGHT + (2 * PADDING), BACKGROUND_BEVEL,
|
||||
glm::vec4(0.4f, 0.4f, 0.4f, 0.6f));
|
||||
drawText(mouseX + MOUSE_OFFSET + PADDING, mouseY, TEXT_SCALE, ROTATION, FONT, _overlayText, TEXT_COLOR);
|
||||
}
|
||||
}
|
|
@ -1,47 +0,0 @@
|
|||
//
|
||||
// NodeBounds.h
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Ryan Huffman on 05/14/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_NodeBounds_h
|
||||
#define hifi_NodeBounds_h
|
||||
|
||||
#include <QObject>
|
||||
|
||||
#include <NodeList.h>
|
||||
|
||||
const int MAX_OVERLAY_TEXT_LENGTH = 64;
|
||||
|
||||
class NodeBounds : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
NodeBounds(QObject* parent = NULL);
|
||||
|
||||
bool getShowEntityNodes() { return _showEntityNodes; }
|
||||
bool getShowParticleNodes() { return _showParticleNodes; }
|
||||
|
||||
void draw();
|
||||
void drawOverlay();
|
||||
|
||||
public slots:
|
||||
void setShowEntityNodes(bool value) { _showEntityNodes = value; }
|
||||
void setShowParticleNodes(bool value) { _showParticleNodes = value; }
|
||||
|
||||
protected:
|
||||
void drawNodeBorder(const glm::vec3& center, float scale, float red, float green, float blue);
|
||||
void getColorForNodeType(NodeType_t nodeType, float& red, float& green, float& blue);
|
||||
|
||||
private:
|
||||
bool _showEntityNodes;
|
||||
bool _showParticleNodes;
|
||||
char _overlayText[MAX_OVERLAY_TEXT_LENGTH + 1];
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_NodeBounds_h
|
|
@ -28,12 +28,12 @@ BillboardOverlay::BillboardOverlay(const BillboardOverlay* billboardOverlay) :
|
|||
}
|
||||
|
||||
void BillboardOverlay::render(RenderArgs* args) {
|
||||
if (!_isLoaded) {
|
||||
if (!_texture) {
|
||||
_isLoaded = true;
|
||||
_texture = DependencyManager::get<TextureCache>()->getTexture(_url);
|
||||
}
|
||||
|
||||
if (!_visible || !_texture->isLoaded()) {
|
||||
if (!_visible || !_texture || !_texture->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ void BillboardOverlay::setBillboardURL(const QString& url) {
|
|||
bool BillboardOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
float& distance, BoxFace& face) {
|
||||
|
||||
if (_texture) {
|
||||
if (_texture && _texture->isLoaded()) {
|
||||
glm::quat rotation = getRotation();
|
||||
if (_isFacingAvatar) {
|
||||
// rotate about vertical to face the camera
|
||||
|
|
|
@ -12,7 +12,6 @@
|
|||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <StreamUtils.h>
|
||||
|
||||
|
@ -104,93 +103,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
|
|||
DependencyManager::get<DeferredLightingEffect>()->renderWireCube(*batch, 1.0f, cubeColor);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::vec3 axis = glm::axis(rotation);
|
||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
||||
glPushMatrix();
|
||||
glm::vec3 positionToCenter = center - position;
|
||||
glTranslatef(positionToCenter.x, positionToCenter.y, positionToCenter.z);
|
||||
if (_isSolid) {
|
||||
if (_borderSize > 0) {
|
||||
// Draw a cube at a larger size behind the main cube, creating
|
||||
// a border effect.
|
||||
// Disable writing to the depth mask so that the "border" cube will not
|
||||
// occlude the main cube. This means the border could be covered by
|
||||
// overlays that are further back and drawn later, but this is good
|
||||
// enough for the use-case.
|
||||
glDepthMask(GL_FALSE);
|
||||
glPushMatrix();
|
||||
glScalef(dimensions.x * _borderSize, dimensions.y * _borderSize, dimensions.z * _borderSize);
|
||||
|
||||
if (_drawOnHUD) {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
|
||||
}
|
||||
|
||||
glPopMatrix();
|
||||
glDepthMask(GL_TRUE);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
glScalef(dimensions.x, dimensions.y, dimensions.z);
|
||||
if (_drawOnHUD) {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderSolidCube(1.0f, cubeColor);
|
||||
}
|
||||
glPopMatrix();
|
||||
} else {
|
||||
glLineWidth(_lineWidth);
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
glm::vec3 halfDimensions = dimensions / 2.0f;
|
||||
glm::vec3 bottomLeftNear(-halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 bottomRightNear(halfDimensions.x, -halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 topLeftNear(-halfDimensions.x, halfDimensions.y, -halfDimensions.z);
|
||||
glm::vec3 topRightNear(halfDimensions.x, halfDimensions.y, -halfDimensions.z);
|
||||
|
||||
glm::vec3 bottomLeftFar(-halfDimensions.x, -halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 bottomRightFar(halfDimensions.x, -halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 topLeftFar(-halfDimensions.x, halfDimensions.y, halfDimensions.z);
|
||||
glm::vec3 topRightFar(halfDimensions.x, halfDimensions.y, halfDimensions.z);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
geometryCache->renderDashedLine(bottomLeftNear, bottomRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightNear, bottomRightFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightFar, bottomLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomLeftFar, bottomLeftNear, cubeColor);
|
||||
|
||||
geometryCache->renderDashedLine(topLeftNear, topRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(topRightNear, topRightFar, cubeColor);
|
||||
geometryCache->renderDashedLine(topRightFar, topLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(topLeftFar, topLeftNear, cubeColor);
|
||||
|
||||
geometryCache->renderDashedLine(bottomLeftNear, topLeftNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightNear, topRightNear, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomLeftFar, topLeftFar, cubeColor);
|
||||
geometryCache->renderDashedLine(bottomRightFar, topRightFar, cubeColor);
|
||||
|
||||
} else {
|
||||
glScalef(dimensions.x, dimensions.y, dimensions.z);
|
||||
DependencyManager::get<GeometryCache>()->renderWireCube(1.0f, cubeColor);
|
||||
}
|
||||
}
|
||||
glPopMatrix();
|
||||
glPopMatrix();
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -62,42 +62,18 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
return;
|
||||
}
|
||||
|
||||
// TODO: I commented all the code needed to migrate this ImageOverlay rendering from naked gl to gpu::Batch
|
||||
/*gpu::Batch localBatch;
|
||||
gpu::Batch& batch = (args->_batch ? (*args->_batch) : localBatch);
|
||||
static gpu::PipelinePointer drawPipeline;
|
||||
static int texcoordRectLoc = -1;
|
||||
static int colorLoc = -1;
|
||||
if (!drawPipeline) {
|
||||
auto blitProgram = gpu::StandardShaderLib::getProgram(gpu::StandardShaderLib::getDrawTexcoordRectTransformUnitQuadVS, gpu::StandardShaderLib::getDrawColoredTexturePS);
|
||||
gpu::Shader::makeProgram(*blitProgram);
|
||||
texcoordRectLoc = blitProgram->getUniforms().findLocation("texcoordRect");
|
||||
colorLoc = blitProgram->getUniforms().findLocation("color");
|
||||
|
||||
gpu::StatePointer blitState = gpu::StatePointer(new gpu::State());
|
||||
blitState->setBlendFunction(false, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
|
||||
blitState->setColorWriteMask(true, true, true, true);
|
||||
drawPipeline = gpu::PipelinePointer(gpu::Pipeline::create(blitProgram, blitState));
|
||||
}
|
||||
*/
|
||||
// TODO: batch.setPipeline(drawPipeline);
|
||||
glUseProgram(0);
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
|
||||
if (_renderImage) {
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
glBindTexture(GL_TEXTURE_2D, _texture->getID());
|
||||
// TODO: batch.setResourceTexture(0, _texture->getGPUTexture());
|
||||
} // TODO: else {
|
||||
// TODO: batch.setResourceTexture(0, args->_whiteTexture);
|
||||
// TODO: }
|
||||
|
||||
// TODO: batch.setViewTransform(Transform());
|
||||
batch.setResourceTexture(0, _texture->getGPUTexture());
|
||||
} else {
|
||||
batch.setResourceTexture(0, args->_whiteTexture);
|
||||
}
|
||||
|
||||
const float MAX_COLOR = 255.0f;
|
||||
xColor color = getColor();
|
||||
float alpha = getAlpha();
|
||||
glm::vec4 quadColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
// TODO: batch._glUniform4fv(colorLoc, 1, (const float*) &quadColor);
|
||||
|
||||
int left = _bounds.left();
|
||||
int right = _bounds.right() + 1;
|
||||
|
@ -107,11 +83,7 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
glm::vec2 topLeft(left, top);
|
||||
glm::vec2 bottomRight(right, bottom);
|
||||
|
||||
// TODO: Transform model;
|
||||
// TODO: model.setTranslation(glm::vec3(0.5f * (right + left), 0.5f * (top + bottom), 0.0f));
|
||||
// TODO: model.setScale(glm::vec3(0.5f * (right - left), 0.5f * (bottom - top), 1.0f));
|
||||
// TODO: batch.setModelTransform(model);
|
||||
|
||||
batch.setModelTransform(Transform());
|
||||
|
||||
// if for some reason our image is not over 0 width or height, don't attempt to render the image
|
||||
if (_renderImage) {
|
||||
|
@ -144,22 +116,13 @@ void ImageOverlay::render(RenderArgs* args) {
|
|||
glm::vec2 texCoordBottomRight(x + w, y + h);
|
||||
glm::vec4 texcoordRect(texCoordTopLeft, w, h);
|
||||
|
||||
// TODO: batch._glUniform4fv(texcoordRectLoc, 1, (const float*) &texcoordRect);
|
||||
// TODO: batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, texCoordTopLeft, texCoordBottomRight, quadColor);
|
||||
} else {
|
||||
// TODO: batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
|
||||
}
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
} else {
|
||||
// TODO: batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(topLeft, bottomRight, quadColor);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, topLeft, bottomRight, quadColor);
|
||||
}
|
||||
|
||||
// TODO: if (!args->_batch) {
|
||||
// TODO: args->_context->render(batch);
|
||||
// TODO: }
|
||||
}
|
||||
|
||||
void ImageOverlay::setProperties(const QScriptValue& properties) {
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
|
@ -53,7 +52,6 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
glm::vec4 colorv4(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
|
||||
auto batch = args->_batch;
|
||||
|
||||
if (batch) {
|
||||
batch->setModelTransform(_transform);
|
||||
|
||||
|
@ -63,38 +61,6 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
}
|
||||
} else {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glLineWidth(_lineWidth);
|
||||
|
||||
glm::vec3 position = getPosition();
|
||||
glm::quat rotation = getRotation();
|
||||
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::vec3 axis = glm::axis(rotation);
|
||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
// TODO: add support for color to renderDashedLine()
|
||||
DependencyManager::get<GeometryCache>()->renderDashedLine(_start, _end, colorv4, _geometryCacheID);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->renderLine(_start, _end, colorv4, _geometryCacheID);
|
||||
}
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -9,8 +9,6 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <GlowEffect.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include "LocalModelsOverlay.h"
|
||||
|
@ -32,11 +30,7 @@ void LocalModelsOverlay::update(float deltatime) {
|
|||
|
||||
void LocalModelsOverlay::render(RenderArgs* args) {
|
||||
if (_visible) {
|
||||
float glowLevel = getGlowLevel();
|
||||
Glower* glower = NULL;
|
||||
if (glowLevel > 0.0f) {
|
||||
glower = new Glower(glowLevel);
|
||||
}
|
||||
float glowLevel = getGlowLevel(); // FIXME, glowing removed for now
|
||||
|
||||
auto batch = args ->_batch;
|
||||
Application* app = Application::getInstance();
|
||||
|
@ -47,10 +41,6 @@ void LocalModelsOverlay::render(RenderArgs* args) {
|
|||
_entityTreeRenderer->render(args);
|
||||
transform.setTranslation(oldTranslation);
|
||||
batch->setViewTransform(transform);
|
||||
|
||||
if (glower) {
|
||||
delete glower;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -12,8 +12,6 @@
|
|||
#include "ModelOverlay.h"
|
||||
|
||||
#include <Application.h>
|
||||
#include <GlowEffect.h>
|
||||
|
||||
|
||||
ModelOverlay::ModelOverlay()
|
||||
: _model(),
|
||||
|
|
|
@ -103,17 +103,7 @@ void Overlays::renderHUD(RenderArgs* renderArgs) {
|
|||
|
||||
|
||||
foreach(Overlay::Pointer thisOverlay, _overlaysHUD) {
|
||||
if (thisOverlay->is3D()) {
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_LIGHTING);
|
||||
|
||||
thisOverlay->render(renderArgs);
|
||||
|
||||
glDisable(GL_LIGHTING);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
} else {
|
||||
thisOverlay->render(renderArgs);
|
||||
}
|
||||
thisOverlay->render(renderArgs);
|
||||
}
|
||||
|
||||
renderArgs->_context->syncCache();
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
#include "Rectangle3DOverlay.h"
|
||||
|
||||
#include <GeometryCache.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
Rectangle3DOverlay::Rectangle3DOverlay() :
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "Sphere3DOverlay.h"
|
||||
|
|
|
@ -38,10 +38,7 @@ QSharedPointer<Resource> AnimationCache::createResource(const QUrl& url, const Q
|
|||
return QSharedPointer<Resource>(new Animation(url), &Resource::allReferencesCleared);
|
||||
}
|
||||
|
||||
Animation::Animation(const QUrl& url) :
|
||||
Resource(url),
|
||||
_isValid(false) {
|
||||
}
|
||||
Animation::Animation(const QUrl& url) : Resource(url) {}
|
||||
|
||||
class AnimationReader : public QRunnable {
|
||||
public:
|
||||
|
@ -97,7 +94,6 @@ QVector<FBXAnimationFrame> Animation::getFrames() const {
|
|||
void Animation::setGeometry(const FBXGeometry& geometry) {
|
||||
_geometry = geometry;
|
||||
finishedLoading(true);
|
||||
_isValid = true;
|
||||
}
|
||||
|
||||
void Animation::downloadFinished(QNetworkReply* reply) {
|
||||
|
|
|
@ -57,8 +57,6 @@ public:
|
|||
Q_INVOKABLE QStringList getJointNames() const;
|
||||
|
||||
Q_INVOKABLE QVector<FBXAnimationFrame> getFrames() const;
|
||||
|
||||
bool isValid() const { return _isValid; }
|
||||
|
||||
protected:
|
||||
|
||||
|
@ -69,7 +67,6 @@ protected:
|
|||
private:
|
||||
|
||||
FBXGeometry _geometry;
|
||||
bool _isValid;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -294,7 +294,7 @@ AudioInjector* AudioInjector::playSound(const QString& soundUrl, const float vol
|
|||
if (soundCache.isNull()) {
|
||||
return NULL;
|
||||
}
|
||||
SharedSoundPointer sound = soundCache.data()->getSound(QUrl(soundUrl));
|
||||
SharedSoundPointer sound = soundCache->getSound(QUrl(soundUrl));
|
||||
if (sound.isNull() || !sound->isReady()) {
|
||||
return NULL;
|
||||
}
|
||||
|
|
|
@ -161,19 +161,15 @@ QByteArray AvatarData::toByteArray() {
|
|||
// Body scale
|
||||
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
|
||||
|
||||
// Head rotation (NOTE: This needs to become a quaternion to save two bytes)
|
||||
glm::vec3 pitchYawRoll = glm::vec3(_headData->getFinalPitch(),
|
||||
_headData->getFinalYaw(),
|
||||
_headData->getFinalRoll());
|
||||
if (this->isMyAvatar()) {
|
||||
glm::vec3 lean = glm::vec3(_headData->getFinalLeanForward(),
|
||||
_headData->getTorsoTwist(),
|
||||
_headData->getFinalLeanSideways());
|
||||
pitchYawRoll -= lean;
|
||||
}
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.x);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.y);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, pitchYawRoll.z);
|
||||
// Head rotation
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalPitch());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalYaw());
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalRoll());
|
||||
|
||||
// Body lean
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanForward);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanSideways);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_torsoTwist);
|
||||
|
||||
// Lookat Position
|
||||
memcpy(destinationBuffer, &_headData->_lookAtPosition, sizeof(_headData->_lookAtPosition));
|
||||
|
@ -291,14 +287,17 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
// headPitch = 2 (compressed float)
|
||||
// headYaw = 2 (compressed float)
|
||||
// headRoll = 2 (compressed float)
|
||||
// leanForward = 2 (compressed float)
|
||||
// leanSideways = 2 (compressed float)
|
||||
// torsoTwist = 2 (compressed float)
|
||||
// lookAt = 12
|
||||
// audioLoudness = 4
|
||||
// }
|
||||
// + 1 byte for pupilSize
|
||||
// + 1 byte for numJoints (0)
|
||||
// = 45 bytes
|
||||
int minPossibleSize = 45;
|
||||
|
||||
// = 51 bytes
|
||||
int minPossibleSize = 51;
|
||||
|
||||
int maxAvailableSize = buffer.size();
|
||||
if (minPossibleSize > maxAvailableSize) {
|
||||
if (shouldLogError(now)) {
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <SharedUtil.h>
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
#include <AbstractScriptingServicesInterface.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GlowEffect.h>
|
||||
#include <Model.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
#include <PerfStat.h>
|
||||
|
@ -546,7 +545,7 @@ const FBXGeometry* EntityTreeRenderer::getCollisionGeometryForEntity(EntityItemP
|
|||
Model* model = modelEntityItem->getModel(this);
|
||||
if (model) {
|
||||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = model->getCollisionGeometry();
|
||||
if (!collisionNetworkGeometry.isNull()) {
|
||||
if (collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) {
|
||||
result = &collisionNetworkGeometry->getFBXGeometry();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -63,11 +63,11 @@ void RenderableModelEntityItem::remapTextures() {
|
|||
return; // nothing to do if we don't have a model
|
||||
}
|
||||
|
||||
if (!_model->isLoadedWithTextures()) {
|
||||
return; // nothing to do if the model has not yet loaded its default textures
|
||||
if (!_model->isLoaded()) {
|
||||
return; // nothing to do if the model has not yet loaded
|
||||
}
|
||||
|
||||
if (!_originalTexturesRead && _model->isLoadedWithTextures()) {
|
||||
if (!_originalTexturesRead) {
|
||||
const QSharedPointer<NetworkGeometry>& networkGeometry = _model->getGeometry();
|
||||
if (networkGeometry) {
|
||||
_originalTextures = networkGeometry->getTextureNames();
|
||||
|
@ -119,7 +119,7 @@ bool RenderableModelEntityItem::readyToAddToScene(RenderArgs* renderArgs) {
|
|||
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(renderArgs->_renderer);
|
||||
getModel(renderer);
|
||||
}
|
||||
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoadedWithTextures()) {
|
||||
if (renderArgs && _model && _needsInitialSimulation && _model->isActive() && _model->isLoaded()) {
|
||||
_model->setScaleToFit(true, getDimensions());
|
||||
_model->setSnapModelToRegistrationPoint(true, getRegistrationPoint());
|
||||
_model->setRotation(getRotation());
|
||||
|
@ -401,8 +401,8 @@ bool RenderableModelEntityItem::isReadyToComputeShape() {
|
|||
const QSharedPointer<NetworkGeometry> collisionNetworkGeometry = _model->getCollisionGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
|
||||
if ((! collisionNetworkGeometry.isNull() && collisionNetworkGeometry->isLoadedWithTextures()) &&
|
||||
(! renderNetworkGeometry.isNull() && renderNetworkGeometry->isLoadedWithTextures())) {
|
||||
if ((collisionNetworkGeometry && collisionNetworkGeometry->isLoaded()) &&
|
||||
(renderNetworkGeometry && renderNetworkGeometry->isLoaded())) {
|
||||
// we have both URLs AND both geometries AND they are both fully loaded.
|
||||
return true;
|
||||
}
|
||||
|
@ -423,7 +423,7 @@ void RenderableModelEntityItem::computeShapeInfo(ShapeInfo& info) {
|
|||
|
||||
// should never fall in here when collision model not fully loaded
|
||||
// hence we assert collisionNetworkGeometry is not NULL
|
||||
assert(!collisionNetworkGeometry.isNull());
|
||||
assert(collisionNetworkGeometry);
|
||||
|
||||
const FBXGeometry& collisionGeometry = collisionNetworkGeometry->getFBXGeometry();
|
||||
const QSharedPointer<NetworkGeometry> renderNetworkGeometry = _model->getGeometry();
|
||||
|
|
|
@ -14,7 +14,6 @@
|
|||
|
||||
#include <gpu/GPUConfig.h>
|
||||
|
||||
#include <GlowEffect.h>
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <PerfStat.h>
|
||||
|
@ -169,7 +168,6 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
|||
_webSurface->resize(QSize(dims.x, dims.y));
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
Glower glow(0.0f);
|
||||
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
||||
Q_ASSERT(getType() == EntityTypes::Web);
|
||||
static const glm::vec2 texMin(0.0f), texMax(1.0f), topLeft(-0.5f), bottomRight(0.5f);
|
||||
|
|
|
@ -610,6 +610,9 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
int bytesRead = parser.offset();
|
||||
#endif
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const QUuid& myNodeID = nodeList->getSessionUUID();
|
||||
bool weOwnSimulation = _simulationOwner.matchesValidID(myNodeID);
|
||||
|
||||
if (args.bitstreamVersion >= VERSION_ENTITIES_HAVE_SIMULATION_OWNER_AND_ACTIONS_OVER_WIRE) {
|
||||
// pack SimulationOwner and terse update properties near each other
|
||||
|
@ -632,10 +635,8 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
}
|
||||
{ // When we own the simulation we don't accept updates to the entity's transform/velocities
|
||||
// but since we're using macros below we have to temporarily modify overwriteLocalData.
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
bool weOwnIt = _simulationOwner.matchesValidID(nodeList->getSessionUUID());
|
||||
bool oldOverwrite = overwriteLocalData;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnIt;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
|
||||
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
|
||||
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
|
||||
READ_ENTITY_PROPERTY(PROP_VELOCITY, glm::vec3, updateVelocity);
|
||||
|
@ -657,6 +658,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
READ_ENTITY_PROPERTY(PROP_REGISTRATION_POINT, glm::vec3, setRegistrationPoint);
|
||||
} else {
|
||||
// legacy order of packing here
|
||||
// TODO: purge this logic in a few months from now (2015.07)
|
||||
READ_ENTITY_PROPERTY(PROP_POSITION, glm::vec3, updatePosition);
|
||||
READ_ENTITY_PROPERTY(PROP_DIMENSIONS, glm::vec3, updateDimensions);
|
||||
READ_ENTITY_PROPERTY(PROP_ROTATION, glm::quat, updateRotation);
|
||||
|
@ -702,7 +704,16 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
READ_ENTITY_PROPERTY(PROP_HREF, QString, setHref);
|
||||
READ_ENTITY_PROPERTY(PROP_DESCRIPTION, QString, setDescription);
|
||||
|
||||
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
|
||||
{ // When we own the simulation we don't accept updates to the entity's actions
|
||||
// but since we're using macros below we have to temporarily modify overwriteLocalData.
|
||||
// NOTE: this prevents userB from adding an action to an object1 when UserA
|
||||
// has simulation ownership of it.
|
||||
// TODO: figure out how to allow multiple users to update actions simultaneously
|
||||
bool oldOverwrite = overwriteLocalData;
|
||||
overwriteLocalData = overwriteLocalData && !weOwnSimulation;
|
||||
READ_ENTITY_PROPERTY(PROP_ACTION_DATA, QByteArray, setActionData);
|
||||
overwriteLocalData = oldOverwrite;
|
||||
}
|
||||
|
||||
bytesRead += readEntitySubclassDataFromBuffer(dataAt, (bytesLeftToRead - bytesRead), args,
|
||||
propertyFlags, overwriteLocalData);
|
||||
|
@ -713,7 +724,7 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
// NOTE: we had a bad version of the stream that we added stream data after the subclass. We can attempt to recover
|
||||
// by doing this parsing here... but it's not likely going to fully recover the content.
|
||||
//
|
||||
// TODO: Remove this conde once we've sufficiently migrated content past this damaged version
|
||||
// TODO: Remove this code once we've sufficiently migrated content past this damaged version
|
||||
if (args.bitstreamVersion == VERSION_ENTITIES_HAS_MARKETPLACE_ID_DAMAGED) {
|
||||
READ_ENTITY_PROPERTY(PROP_MARKETPLACE_ID, QString, setMarketplaceID);
|
||||
}
|
||||
|
@ -738,8 +749,6 @@ int EntityItem::readEntityDataFromBuffer(const unsigned char* data, int bytesLef
|
|||
}
|
||||
}
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const QUuid& myNodeID = nodeList->getSessionUUID();
|
||||
if (overwriteLocalData) {
|
||||
if (!_simulationOwner.matchesValidID(myNodeID)) {
|
||||
|
||||
|
|
|
@ -184,7 +184,7 @@ void ModelEntityItem::cleanupLoadedAnimations() {
|
|||
_loadedAnimations.clear();
|
||||
}
|
||||
|
||||
Animation* ModelEntityItem::getAnimation(const QString& url) {
|
||||
AnimationPointer ModelEntityItem::getAnimation(const QString& url) {
|
||||
AnimationPointer animation;
|
||||
|
||||
// if we don't already have this model then create it and initialize it
|
||||
|
@ -194,7 +194,7 @@ Animation* ModelEntityItem::getAnimation(const QString& url) {
|
|||
} else {
|
||||
animation = _loadedAnimations[url];
|
||||
}
|
||||
return animation.data();
|
||||
return animation;
|
||||
}
|
||||
|
||||
void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
||||
|
@ -203,9 +203,8 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
|||
return;
|
||||
}
|
||||
|
||||
Animation* myAnimation = getAnimation(_animationURL);
|
||||
|
||||
if (!_jointMappingCompleted) {
|
||||
AnimationPointer myAnimation = getAnimation(_animationURL);
|
||||
if (myAnimation && myAnimation->isLoaded()) {
|
||||
QStringList animationJointNames = myAnimation->getJointNames();
|
||||
|
||||
if (modelJointNames.size() > 0 && animationJointNames.size() > 0) {
|
||||
|
@ -220,8 +219,12 @@ void ModelEntityItem::mapJoints(const QStringList& modelJointNames) {
|
|||
|
||||
QVector<glm::quat> ModelEntityItem::getAnimationFrame() {
|
||||
QVector<glm::quat> frameData;
|
||||
if (hasAnimation() && _jointMappingCompleted) {
|
||||
Animation* myAnimation = getAnimation(_animationURL);
|
||||
if (!hasAnimation() || !_jointMappingCompleted) {
|
||||
return frameData;
|
||||
}
|
||||
|
||||
AnimationPointer myAnimation = getAnimation(_animationURL);
|
||||
if (myAnimation && myAnimation->isLoaded()) {
|
||||
QVector<FBXAnimationFrame> frames = myAnimation->getFrames();
|
||||
int frameCount = frames.size();
|
||||
if (frameCount > 0) {
|
||||
|
|
|
@ -141,7 +141,7 @@ protected:
|
|||
bool _jointMappingCompleted;
|
||||
QVector<int> _jointMapping;
|
||||
|
||||
static Animation* getAnimation(const QString& url);
|
||||
static AnimationPointer getAnimation(const QString& url);
|
||||
static QMap<QString, AnimationPointer> _loadedAnimations;
|
||||
static AnimationCache _animationCache;
|
||||
|
||||
|
|
|
@ -93,7 +93,16 @@ QByteArray FSTReader::writeMapping(const QVariantHash& mapping) {
|
|||
for (auto key : PREFERED_ORDER) {
|
||||
auto it = mapping.find(key);
|
||||
if (it != mapping.constEnd()) {
|
||||
writeVariant(buffer, it);
|
||||
if (key == FREE_JOINT_FIELD) { // writeVariant does not handle strings added using insertMulti.
|
||||
for (auto multi : mapping.values(key)) {
|
||||
buffer.write(key.toUtf8());
|
||||
buffer.write(" = ");
|
||||
buffer.write(multi.toByteArray());
|
||||
buffer.write("\n");
|
||||
}
|
||||
} else {
|
||||
writeVariant(buffer, it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -40,11 +40,8 @@
|
|||
namespace gpu {
|
||||
|
||||
enum ReservedSlot {
|
||||
/* TRANSFORM_OBJECT_SLOT = 6,
|
||||
TRANSFORM_OBJECT_SLOT = 6,
|
||||
TRANSFORM_CAMERA_SLOT = 7,
|
||||
*/
|
||||
TRANSFORM_OBJECT_SLOT = 1,
|
||||
TRANSFORM_CAMERA_SLOT = 2,
|
||||
};
|
||||
|
||||
class Batch {
|
||||
|
|
|
@ -87,10 +87,12 @@ GLBackend::GLBackend() :
|
|||
_pipeline(),
|
||||
_output()
|
||||
{
|
||||
initInput();
|
||||
initTransform();
|
||||
}
|
||||
|
||||
GLBackend::~GLBackend() {
|
||||
killInput();
|
||||
killTransform();
|
||||
}
|
||||
|
||||
|
|
|
@ -228,7 +228,21 @@ public:
|
|||
|
||||
void do_setStateColorWriteMask(uint32 mask);
|
||||
|
||||
// Repporting stats of the context
|
||||
class Stats {
|
||||
public:
|
||||
int _ISNumFormatChanges = 0;
|
||||
int _ISNumInputBufferChanges = 0;
|
||||
int _ISNumIndexBufferChanges = 0;
|
||||
|
||||
Stats() {}
|
||||
Stats(const Stats& stats) = default;
|
||||
};
|
||||
|
||||
void getStats(Stats& stats) const { stats = _stats; }
|
||||
|
||||
protected:
|
||||
Stats _stats;
|
||||
|
||||
// Draw Stage
|
||||
void do_draw(Batch& batch, uint32 paramOffset);
|
||||
|
@ -242,12 +256,13 @@ protected:
|
|||
void do_setInputFormat(Batch& batch, uint32 paramOffset);
|
||||
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
|
||||
void do_setIndexBuffer(Batch& batch, uint32 paramOffset);
|
||||
|
||||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
|
||||
void initInput();
|
||||
void killInput();
|
||||
void syncInputStateCache();
|
||||
void updateInput();
|
||||
struct InputStageState {
|
||||
bool _invalidFormat;
|
||||
bool _invalidFormat = true;
|
||||
Stream::FormatPointer _format;
|
||||
|
||||
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
|
||||
|
@ -256,6 +271,7 @@ protected:
|
|||
Buffers _buffers;
|
||||
Offsets _bufferOffsets;
|
||||
Offsets _bufferStrides;
|
||||
std::vector<GLuint> _bufferVBOs;
|
||||
|
||||
BufferPointer _indexBuffer;
|
||||
Offset _indexBufferOffset;
|
||||
|
@ -264,6 +280,8 @@ protected:
|
|||
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
|
||||
ActivationCache _attributeActivation;
|
||||
|
||||
GLuint _defaultVAO;
|
||||
|
||||
InputStageState() :
|
||||
_invalidFormat(true),
|
||||
_format(0),
|
||||
|
@ -271,10 +289,12 @@ protected:
|
|||
_buffers(_buffersState.size(), BufferPointer(0)),
|
||||
_bufferOffsets(_buffersState.size(), 0),
|
||||
_bufferStrides(_buffersState.size(), 0),
|
||||
_bufferVBOs(_buffersState.size(), 0),
|
||||
_indexBuffer(0),
|
||||
_indexBufferOffset(0),
|
||||
_indexBufferType(UINT32),
|
||||
_attributeActivation(0)
|
||||
_attributeActivation(0),
|
||||
_defaultVAO(0)
|
||||
{}
|
||||
} _input;
|
||||
|
||||
|
|
|
@ -28,13 +28,39 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
|
|||
uint32 channel = batch._params[paramOffset + 3]._uint;
|
||||
|
||||
if (channel < getNumInputBuffers()) {
|
||||
_input._buffers[channel] = buffer;
|
||||
_input._bufferOffsets[channel] = offset;
|
||||
_input._bufferStrides[channel] = stride;
|
||||
_input._buffersState.set(channel);
|
||||
bool isModified = false;
|
||||
if (_input._buffers[channel] != buffer) {
|
||||
_input._buffers[channel] = buffer;
|
||||
|
||||
GLuint vbo = 0;
|
||||
if (buffer) {
|
||||
vbo = getBufferID((*buffer));
|
||||
}
|
||||
_input._bufferVBOs[channel] = vbo;
|
||||
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (_input._bufferOffsets[channel] != offset) {
|
||||
_input._bufferOffsets[channel] = offset;
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (_input._bufferStrides[channel] != stride) {
|
||||
_input._bufferStrides[channel] = stride;
|
||||
isModified = true;
|
||||
}
|
||||
|
||||
if (isModified) {
|
||||
_input._buffersState.set(channel);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#define NOT_SUPPORT_VAO
|
||||
#if defined(SUPPORT_VAO)
|
||||
#else
|
||||
|
||||
#define SUPPORT_LEGACY_OPENGL
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
static const int NUM_CLASSIC_ATTRIBS = Stream::TANGENT;
|
||||
|
@ -45,24 +71,120 @@ static const GLenum attributeSlotToClassicAttribName[NUM_CLASSIC_ATTRIBS] = {
|
|||
GL_TEXTURE_COORD_ARRAY
|
||||
};
|
||||
#endif
|
||||
#endif
|
||||
|
||||
void GLBackend::initInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
if(!_input._defaultVAO) {
|
||||
glGenVertexArrays(1, &_input._defaultVAO);
|
||||
}
|
||||
glBindVertexArray(_input._defaultVAO);
|
||||
(void) CHECK_GL_ERROR();
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::killInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
glBindVertexArray(0);
|
||||
if(_input._defaultVAO) {
|
||||
glDeleteVertexArrays(1, &_input._defaultVAO);
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::syncInputStateCache() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
for (int i = 0; i < NUM_CLASSIC_ATTRIBS; i++) {
|
||||
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
//_input._defaultVAO
|
||||
glBindVertexArray(_input._defaultVAO);
|
||||
#else
|
||||
int i = 0;
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
|
||||
_input._attributeActivation[i] = glIsEnabled(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
#endif
|
||||
for (; i < _input._attributeActivation.size(); i++) {
|
||||
GLint active = 0;
|
||||
glGetVertexAttribiv(i, GL_VERTEX_ATTRIB_ARRAY_ENABLED, &active);
|
||||
_input._attributeActivation[i] = active;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::updateInput() {
|
||||
#if defined(SUPPORT_VAO)
|
||||
if (_input._invalidFormat) {
|
||||
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
glVertexAttribFormat(
|
||||
attrib._slot,
|
||||
attrib._element.getDimensionCount(),
|
||||
_elementTypeToGLType[attrib._element.getType()],
|
||||
attrib._element.isNormalized(),
|
||||
attrib._offset);
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_input._attributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
_input._invalidFormat = false;
|
||||
_stats._ISNumFormatChanges++;
|
||||
}
|
||||
|
||||
if (_input._buffersState.any()) {
|
||||
int numBuffers = _input._buffers.size();
|
||||
auto buffer = _input._buffers.data();
|
||||
auto vbo = _input._bufferVBOs.data();
|
||||
auto offset = _input._bufferOffsets.data();
|
||||
auto stride = _input._bufferStrides.data();
|
||||
|
||||
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
|
||||
if (_input._buffersState.test(bufferNum)) {
|
||||
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
|
||||
}
|
||||
buffer++;
|
||||
vbo++;
|
||||
offset++;
|
||||
stride++;
|
||||
}
|
||||
_input._buffersState.reset();
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
#else
|
||||
if (_input._invalidFormat || _input._buffersState.any()) {
|
||||
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
_stats._ISNumFormatChanges++;
|
||||
|
||||
// Check expected activation
|
||||
if (_input._format) {
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
for (Stream::Format::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
|
||||
const Stream::Attribute& attrib = (*it).second;
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
}
|
||||
}
|
||||
|
@ -72,17 +194,15 @@ void GLBackend::updateInput() {
|
|||
bool newState = newActivation[i];
|
||||
if (newState != _input._attributeActivation[i]) {
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
const bool useClientState = i < NUM_CLASSIC_ATTRIBS;
|
||||
#else
|
||||
const bool useClientState = false;
|
||||
#endif
|
||||
if (useClientState) {
|
||||
if (i < NUM_CLASSIC_ATTRIBS) {
|
||||
if (newState) {
|
||||
glEnableClientState(attributeSlotToClassicAttribName[i]);
|
||||
} else {
|
||||
glDisableClientState(attributeSlotToClassicAttribName[i]);
|
||||
}
|
||||
} else {
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
|
@ -103,18 +223,23 @@ void GLBackend::updateInput() {
|
|||
const Offsets& strides = _input._bufferStrides;
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
_stats._ISNumInputBufferChanges++;
|
||||
|
||||
for (Stream::Format::ChannelMap::const_iterator channelIt = _input._format->getChannels().begin();
|
||||
channelIt != _input._format->getChannels().end();
|
||||
channelIt++) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (*channelIt).second;
|
||||
if ((*channelIt).first < buffers.size()) {
|
||||
int bufferNum = (*channelIt).first;
|
||||
GLuint boundVBO = 0;
|
||||
for (auto& channelIt : inputChannels) {
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
if ((channelIt).first < buffers.size()) {
|
||||
int bufferNum = (channelIt).first;
|
||||
|
||||
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
|
||||
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void) CHECK_GL_ERROR();
|
||||
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
|
||||
GLuint vbo = _input._bufferVBOs[bufferNum];
|
||||
if (boundVBO != vbo) {
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo);
|
||||
(void) CHECK_GL_ERROR();
|
||||
boundVBO = vbo;
|
||||
}
|
||||
_input._buffersState[bufferNum] = false;
|
||||
|
||||
for (unsigned int i = 0; i < channel._slots.size(); i++) {
|
||||
|
@ -126,9 +251,6 @@ void GLBackend::updateInput() {
|
|||
GLuint pointer = attrib._offset + offsets[bufferNum];
|
||||
#if defined(SUPPORT_LEGACY_OPENGL)
|
||||
const bool useClientState = slot < NUM_CLASSIC_ATTRIBS;
|
||||
#else
|
||||
const bool useClientState = false;
|
||||
#endif
|
||||
if (useClientState) {
|
||||
switch (slot) {
|
||||
case Stream::POSITION:
|
||||
|
@ -144,7 +266,9 @@ void GLBackend::updateInput() {
|
|||
glTexCoordPointer(count, type, stride, reinterpret_cast<GLvoid*>(pointer));
|
||||
break;
|
||||
};
|
||||
} else {
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
GLboolean isNormalized = attrib._element.isNormalized();
|
||||
glVertexAttribPointer(slot, count, type, isNormalized, stride,
|
||||
reinterpret_cast<GLvoid*>(pointer));
|
||||
|
@ -158,61 +282,7 @@ void GLBackend::updateInput() {
|
|||
// everything format related should be in sync now
|
||||
_input._invalidFormat = false;
|
||||
}
|
||||
|
||||
/* TODO: Fancy version GL4.4
|
||||
if (_needInputFormatUpdate) {
|
||||
|
||||
InputActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_inputFormat) {
|
||||
const StreamFormat::AttributeMap& attributes = _inputFormat->getAttributes();
|
||||
for (StreamFormat::AttributeMap::const_iterator it = attributes.begin(); it != attributes.end(); it++) {
|
||||
const StreamFormat::Attribute& attrib = (*it).second;
|
||||
newActivation.set(attrib._slot);
|
||||
glVertexAttribFormat(
|
||||
attrib._slot,
|
||||
attrib._element.getDimensionCount(),
|
||||
_elementTypeToGLType[attrib._element.getType()],
|
||||
attrib._element.isNormalized(),
|
||||
attrib._stride);
|
||||
}
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
// Manage Activation what was and what is expected now
|
||||
for (int i = 0; i < newActivation.size(); i++) {
|
||||
bool newState = newActivation[i];
|
||||
if (newState != _inputAttributeActivation[i]) {
|
||||
if (newState) {
|
||||
glEnableVertexAttribArray(i);
|
||||
} else {
|
||||
glDisableVertexAttribArray(i);
|
||||
}
|
||||
_inputAttributeActivation.flip(i);
|
||||
}
|
||||
}
|
||||
CHECK_GL_ERROR();
|
||||
|
||||
_needInputFormatUpdate = false;
|
||||
}
|
||||
|
||||
if (_needInputStreamUpdate) {
|
||||
if (_inputStream) {
|
||||
const Stream::Buffers& buffers = _inputStream->getBuffers();
|
||||
const Stream::Offsets& offsets = _inputStream->getOffsets();
|
||||
const Stream::Strides& strides = _inputStream->getStrides();
|
||||
|
||||
for (int i = 0; i < buffers.size(); i++) {
|
||||
GLuint vbo = gpu::GLBackend::getBufferID((*buffers[i]));
|
||||
glBindVertexBuffer(i, vbo, offsets[i], strides[i]);
|
||||
}
|
||||
|
||||
CHECK_GL_ERROR();
|
||||
}
|
||||
_needInputStreamUpdate = false;
|
||||
}
|
||||
*/
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -41,21 +41,34 @@ void makeBindings(GLBackend::GLShader* shader) {
|
|||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "attribPosition");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "attribPosition");
|
||||
}
|
||||
|
||||
//Check for gpu specific attribute slotBindings
|
||||
loc = glGetAttribLocation(glprogram, "gl_Vertex");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "position");
|
||||
glBindAttribLocation(glprogram, gpu::Stream::POSITION, "gl_Vertex");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "normal");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "normal");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribNormal");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::NORMAL, "attribNormal");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "color");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "color");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribColor");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::COLOR, "attribColor");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "texcoord");
|
||||
if (loc >= 0) {
|
||||
|
@ -75,6 +88,10 @@ void makeBindings(GLBackend::GLShader* shader) {
|
|||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
|
||||
}
|
||||
loc = glGetAttribLocation(glprogram, "attribTexcoord1");
|
||||
if (loc >= 0) {
|
||||
glBindAttribLocation(glprogram, gpu::Stream::TEXCOORD1, "texcoord1");
|
||||
}
|
||||
|
||||
loc = glGetAttribLocation(glprogram, "clusterIndices");
|
||||
if (loc >= 0) {
|
||||
|
|
|
@ -82,8 +82,6 @@ void GLBackend::syncTransformStateCache() {
|
|||
}
|
||||
|
||||
void GLBackend::updateTransform() {
|
||||
GLint originalMatrixMode;
|
||||
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
|
||||
// Check all the dirty flags and update the state accordingly
|
||||
if (_transform._invalidViewport) {
|
||||
_transform._transformCamera._viewport = glm::vec4(_transform._viewport);
|
||||
|
@ -138,6 +136,9 @@ void GLBackend::updateTransform() {
|
|||
|
||||
#if (GPU_TRANSFORM_PROFILE == GPU_LEGACY)
|
||||
// Do it again for fixed pipeline until we can get rid of it
|
||||
GLint originalMatrixMode;
|
||||
glGetIntegerv(GL_MATRIX_MODE, &originalMatrixMode);
|
||||
|
||||
if (_transform._invalidProj) {
|
||||
if (_transform._lastMode != GL_PROJECTION) {
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
|
@ -173,12 +174,12 @@ void GLBackend::updateTransform() {
|
|||
}
|
||||
(void) CHECK_GL_ERROR();
|
||||
}
|
||||
|
||||
glMatrixMode(originalMatrixMode);
|
||||
#endif
|
||||
|
||||
// Flags are clean
|
||||
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
|
||||
|
||||
glMatrixMode(originalMatrixMode);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <qjsondocument.h>
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
#include "NetworkLogging.h"
|
||||
#include "DataServerAccountInfo.h"
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include <math.h>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
#include "Assignment.h"
|
||||
#include "HifiSockAddr.h"
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <QtCore/QJsonValue>
|
||||
#include <QtCore/QString>
|
||||
#include <QtCore/QStringList>
|
||||
#include <QtCore/QVariantMap>
|
||||
|
||||
class JSONBreakableMarshal {
|
||||
public:
|
||||
|
|
42
libraries/networking/src/NetworkPacket.cpp
Normal file
42
libraries/networking/src/NetworkPacket.cpp
Normal file
|
@ -0,0 +1,42 @@
|
|||
//
|
||||
// NetworkPacket.cpp
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/9/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <QtDebug>
|
||||
|
||||
#include "SharedUtil.h"
|
||||
#include "NetworkLogging.h"
|
||||
|
||||
#include "NetworkPacket.h"
|
||||
|
||||
void NetworkPacket::copyContents(const SharedNodePointer& node, const QByteArray& packet) {
|
||||
if (packet.size() && packet.size() <= MAX_PACKET_SIZE) {
|
||||
_node = node;
|
||||
_byteArray = packet;
|
||||
} else {
|
||||
qCDebug(networking, ">>> NetworkPacket::copyContents() unexpected length = %d", packet.size());
|
||||
}
|
||||
}
|
||||
|
||||
NetworkPacket::NetworkPacket(const NetworkPacket& packet) {
|
||||
copyContents(packet.getNode(), packet.getByteArray());
|
||||
}
|
||||
|
||||
NetworkPacket::NetworkPacket(const SharedNodePointer& node, const QByteArray& packet) {
|
||||
copyContents(node, packet);
|
||||
};
|
||||
|
||||
// copy assignment
|
||||
NetworkPacket& NetworkPacket::operator=(NetworkPacket const& other) {
|
||||
copyContents(other.getNode(), other.getByteArray());
|
||||
return *this;
|
||||
}
|
37
libraries/networking/src/NetworkPacket.h
Normal file
37
libraries/networking/src/NetworkPacket.h
Normal file
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
// NetworkPacket.h
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Brad Hefta-Gaub on 8/9/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// A really simple class that stores a network packet between being received and being processed
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_NetworkPacket_h
|
||||
#define hifi_NetworkPacket_h
|
||||
|
||||
#include "NodeList.h"
|
||||
|
||||
/// Storage of not-yet processed inbound, or not yet sent outbound generic UDP network packet
|
||||
class NetworkPacket {
|
||||
public:
|
||||
NetworkPacket() { }
|
||||
NetworkPacket(const NetworkPacket& packet); // copy constructor
|
||||
NetworkPacket& operator= (const NetworkPacket& other); // copy assignment
|
||||
NetworkPacket(const SharedNodePointer& node, const QByteArray& byteArray);
|
||||
|
||||
const SharedNodePointer& getNode() const { return _node; }
|
||||
const QByteArray& getByteArray() const { return _byteArray; }
|
||||
|
||||
private:
|
||||
void copyContents(const SharedNodePointer& node, const QByteArray& byteArray);
|
||||
|
||||
SharedNodePointer _node;
|
||||
QByteArray _byteArray;
|
||||
};
|
||||
|
||||
#endif // hifi_NetworkPacket_h
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <QtCore/QDateTime>
|
||||
#include <QtCore/QDebug>
|
||||
#include <QtCore/QDataStream>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <UUID.h>
|
||||
|
|
286
libraries/networking/src/PacketHeaders.cpp
Normal file
286
libraries/networking/src/PacketHeaders.cpp
Normal file
|
@ -0,0 +1,286 @@
|
|||
//
|
||||
// PacketHeaders.cpp
|
||||
// libraries/networking/src
|
||||
//
|
||||
// Created by Stephen Birarda on 6/28/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "PacketHeaders.h"
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
int arithmeticCodingValueFromBuffer(const char* checkValue) {
|
||||
if (((uchar) *checkValue) < 255) {
|
||||
return *checkValue;
|
||||
} else {
|
||||
return 255 + arithmeticCodingValueFromBuffer(checkValue + 1);
|
||||
}
|
||||
}
|
||||
|
||||
int numBytesArithmeticCodingFromBuffer(const char* checkValue) {
|
||||
if (((uchar) *checkValue) < 255) {
|
||||
return 1;
|
||||
} else {
|
||||
return 1 + numBytesArithmeticCodingFromBuffer(checkValue + 1);
|
||||
}
|
||||
}
|
||||
|
||||
int packArithmeticallyCodedValue(int value, char* destination) {
|
||||
if (value < 255) {
|
||||
// less than 255, just pack our value
|
||||
destination[0] = (uchar) value;
|
||||
return 1;
|
||||
} else {
|
||||
// pack 255 and then recursively pack on
|
||||
((unsigned char*)destination)[0] = 255;
|
||||
return 1 + packArithmeticallyCodedValue(value - 255, destination + 1);
|
||||
}
|
||||
}
|
||||
|
||||
PacketVersion versionForPacketType(PacketType packetType) {
|
||||
switch (packetType) {
|
||||
case PacketTypeMicrophoneAudioNoEcho:
|
||||
case PacketTypeMicrophoneAudioWithEcho:
|
||||
return 2;
|
||||
case PacketTypeSilentAudioFrame:
|
||||
return 4;
|
||||
case PacketTypeMixedAudio:
|
||||
return 1;
|
||||
case PacketTypeInjectAudio:
|
||||
return 1;
|
||||
case PacketTypeAvatarData:
|
||||
return 7;
|
||||
case PacketTypeAvatarIdentity:
|
||||
return 1;
|
||||
case PacketTypeEnvironmentData:
|
||||
return 2;
|
||||
case PacketTypeDomainList:
|
||||
case PacketTypeDomainListRequest:
|
||||
return 5;
|
||||
case PacketTypeCreateAssignment:
|
||||
case PacketTypeRequestAssignment:
|
||||
return 2;
|
||||
case PacketTypeOctreeStats:
|
||||
return 1;
|
||||
case PacketTypeStopNode:
|
||||
return 1;
|
||||
case PacketTypeEntityAdd:
|
||||
case PacketTypeEntityEdit:
|
||||
case PacketTypeEntityData:
|
||||
return VERSION_ENTITIES_HAVE_SIMULATION_OWNER_AND_ACTIONS_OVER_WIRE;
|
||||
case PacketTypeEntityErase:
|
||||
return 2;
|
||||
case PacketTypeAudioStreamStats:
|
||||
return 1;
|
||||
case PacketTypeIceServerHeartbeat:
|
||||
case PacketTypeIceServerQuery:
|
||||
return 1;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
#define PACKET_TYPE_NAME_LOOKUP(x) case x: return QString(#x);
|
||||
|
||||
QString nameForPacketType(PacketType packetType) {
|
||||
switch (packetType) {
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeUnknown);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeStunResponse);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainList);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypePing);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypePingReply);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeKillAvatar);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeAvatarData);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeInjectAudio);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeMixedAudio);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeMicrophoneAudioNoEcho);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeMicrophoneAudioWithEcho);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeBulkAvatarData);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeSilentAudioFrame);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEnvironmentData);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainListRequest);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeRequestAssignment);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeCreateAssignment);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainConnectionDenied);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeMuteEnvironment);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeAudioStreamStats);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDataServerConfirm);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeOctreeStats);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeJurisdiction);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeJurisdictionRequest);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeAvatarIdentity);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeAvatarBillboard);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainConnectRequest);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainServerRequireDTLS);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeNodeJsonStats);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityQuery);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityData);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityErase);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeOctreeDataNack);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeStopNode);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeAudioEnvironment);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityEditNack);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeSignedTransactionPayment);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeIceServerHeartbeat);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeDomainServerAddedNode);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeIceServerQuery);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeIceServerPeerInformation);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeUnverifiedPing);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeUnverifiedPingReply);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityAdd);
|
||||
PACKET_TYPE_NAME_LOOKUP(PacketTypeEntityEdit);
|
||||
default:
|
||||
return QString("Type: ") + QString::number((int)packetType);
|
||||
}
|
||||
return QString("unexpected");
|
||||
}
|
||||
|
||||
|
||||
|
||||
QByteArray byteArrayWithUUIDPopulatedHeader(PacketType packetType, const QUuid& connectionUUID) {
|
||||
QByteArray freshByteArray(MAX_PACKET_HEADER_BYTES, 0);
|
||||
freshByteArray.resize(populatePacketHeaderWithUUID(freshByteArray, packetType, connectionUUID));
|
||||
return freshByteArray;
|
||||
}
|
||||
|
||||
int populatePacketHeaderWithUUID(QByteArray& packet, PacketType packetType, const QUuid& connectionUUID) {
|
||||
if (packet.size() < numBytesForPacketHeaderGivenPacketType(packetType)) {
|
||||
packet.resize(numBytesForPacketHeaderGivenPacketType(packetType));
|
||||
}
|
||||
|
||||
return populatePacketHeaderWithUUID(packet.data(), packetType, connectionUUID);
|
||||
}
|
||||
|
||||
int populatePacketHeaderWithUUID(char* packet, PacketType packetType, const QUuid& connectionUUID) {
|
||||
int numTypeBytes = packArithmeticallyCodedValue(packetType, packet);
|
||||
packet[numTypeBytes] = versionForPacketType(packetType);
|
||||
|
||||
char* position = packet + numTypeBytes + sizeof(PacketVersion);
|
||||
|
||||
QByteArray rfcUUID = connectionUUID.toRfc4122();
|
||||
memcpy(position, rfcUUID.constData(), NUM_BYTES_RFC4122_UUID);
|
||||
position += NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
if (!NON_VERIFIED_PACKETS.contains(packetType)) {
|
||||
// pack 16 bytes of zeros where the md5 hash will be placed once data is packed
|
||||
memset(position, 0, NUM_BYTES_MD5_HASH);
|
||||
position += NUM_BYTES_MD5_HASH;
|
||||
}
|
||||
|
||||
if (SEQUENCE_NUMBERED_PACKETS.contains(packetType)) {
|
||||
// Pack zeros for the number of bytes that the sequence number requires.
|
||||
// The LimitedNodeList will handle packing in the sequence number when sending out the packet.
|
||||
memset(position, 0, sizeof(PacketSequenceNumber));
|
||||
position += sizeof(PacketSequenceNumber);
|
||||
}
|
||||
|
||||
// return the number of bytes written for pointer pushing
|
||||
return position - packet;
|
||||
}
|
||||
|
||||
int numBytesForPacketHeader(const QByteArray& packet) {
|
||||
PacketType packetType = packetTypeForPacket(packet);
|
||||
return numBytesForPacketHeaderGivenPacketType(packetType);
|
||||
}
|
||||
|
||||
int numBytesForPacketHeader(const char* packet) {
|
||||
PacketType packetType = packetTypeForPacket(packet);
|
||||
return numBytesForPacketHeaderGivenPacketType(packetType);
|
||||
}
|
||||
|
||||
int numBytesForArithmeticCodedPacketType(PacketType packetType) {
|
||||
return (int) ceilf((float) packetType / 255);
|
||||
}
|
||||
|
||||
int numBytesForPacketHeaderGivenPacketType(PacketType packetType) {
|
||||
return numBytesForArithmeticCodedPacketType(packetType)
|
||||
+ numHashBytesForType(packetType)
|
||||
+ numSequenceNumberBytesForType(packetType)
|
||||
+ NUM_STATIC_HEADER_BYTES;
|
||||
}
|
||||
|
||||
int numHashBytesForType(PacketType packetType) {
|
||||
return (NON_VERIFIED_PACKETS.contains(packetType) ? 0 : NUM_BYTES_MD5_HASH);
|
||||
}
|
||||
|
||||
int numSequenceNumberBytesForType(PacketType packetType) {
|
||||
return (SEQUENCE_NUMBERED_PACKETS.contains(packetType) ? sizeof(PacketSequenceNumber) : 0);
|
||||
}
|
||||
|
||||
QUuid uuidFromPacketHeader(const QByteArray& packet) {
|
||||
return QUuid::fromRfc4122(packet.mid(numBytesArithmeticCodingFromBuffer(packet.data()) + sizeof(PacketVersion),
|
||||
NUM_BYTES_RFC4122_UUID));
|
||||
}
|
||||
|
||||
int hashOffsetForPacketType(PacketType packetType) {
|
||||
return numBytesForArithmeticCodedPacketType(packetType) + NUM_STATIC_HEADER_BYTES;
|
||||
}
|
||||
|
||||
int sequenceNumberOffsetForPacketType(PacketType packetType) {
|
||||
return numBytesForPacketHeaderGivenPacketType(packetType) - sizeof(PacketSequenceNumber);
|
||||
}
|
||||
|
||||
QByteArray hashFromPacketHeader(const QByteArray& packet) {
|
||||
return packet.mid(hashOffsetForPacketType(packetTypeForPacket(packet)), NUM_BYTES_MD5_HASH);
|
||||
}
|
||||
|
||||
QByteArray hashForPacketAndConnectionUUID(const QByteArray& packet, const QUuid& connectionUUID) {
|
||||
return QCryptographicHash::hash(packet.mid(numBytesForPacketHeader(packet)) + connectionUUID.toRfc4122(),
|
||||
QCryptographicHash::Md5);
|
||||
}
|
||||
|
||||
PacketSequenceNumber sequenceNumberFromHeader(const QByteArray& packet, PacketType packetType) {
|
||||
if (packetType == PacketTypeUnknown) {
|
||||
packetType = packetTypeForPacket(packet);
|
||||
}
|
||||
|
||||
PacketSequenceNumber result = DEFAULT_SEQUENCE_NUMBER;
|
||||
|
||||
if (SEQUENCE_NUMBERED_PACKETS.contains(packetType)) {
|
||||
memcpy(&result, packet.data() + sequenceNumberOffsetForPacketType(packetType), sizeof(PacketSequenceNumber));
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void replaceHashInPacket(QByteArray& packet, const QUuid& connectionUUID, PacketType packetType) {
|
||||
if (packetType == PacketTypeUnknown) {
|
||||
packetType = packetTypeForPacket(packet);
|
||||
}
|
||||
|
||||
packet.replace(hashOffsetForPacketType(packetType), NUM_BYTES_MD5_HASH,
|
||||
hashForPacketAndConnectionUUID(packet, connectionUUID));
|
||||
}
|
||||
|
||||
void replaceSequenceNumberInPacket(QByteArray& packet, PacketSequenceNumber sequenceNumber, PacketType packetType) {
|
||||
if (packetType == PacketTypeUnknown) {
|
||||
packetType = packetTypeForPacket(packet);
|
||||
}
|
||||
|
||||
packet.replace(sequenceNumberOffsetForPacketType(packetType),
|
||||
sizeof(PacketSequenceNumber), reinterpret_cast<char*>(&sequenceNumber), sizeof(PacketSequenceNumber));
|
||||
}
|
||||
|
||||
void replaceHashAndSequenceNumberInPacket(QByteArray& packet, const QUuid& connectionUUID, PacketSequenceNumber sequenceNumber,
|
||||
PacketType packetType) {
|
||||
if (packetType == PacketTypeUnknown) {
|
||||
packetType = packetTypeForPacket(packet);
|
||||
}
|
||||
|
||||
replaceHashInPacket(packet, connectionUUID, packetType);
|
||||
replaceSequenceNumberInPacket(packet, sequenceNumber, packetType);
|
||||
}
|
||||
|
||||
PacketType packetTypeForPacket(const QByteArray& packet) {
|
||||
return (PacketType) arithmeticCodingValueFromBuffer(packet.data());
|
||||
}
|
||||
|
||||
PacketType packetTypeForPacket(const char* packet) {
|
||||
return (PacketType) arithmeticCodingValueFromBuffer(packet);
|
||||
}
|
|
@ -34,13 +34,18 @@ ResourceCache::ResourceCache(QObject* parent) :
|
|||
}
|
||||
|
||||
ResourceCache::~ResourceCache() {
|
||||
// the unused resources may themselves reference resources that will be added to the unused
|
||||
// list on destruction, so keep clearing until there are no references left
|
||||
while (!_unusedResources.isEmpty()) {
|
||||
foreach (const QSharedPointer<Resource>& resource, _unusedResources) {
|
||||
resource->setCache(nullptr);
|
||||
clearUnusedResource();
|
||||
}
|
||||
|
||||
void ResourceCache::refreshAll() {
|
||||
// Clear all unused resources so we don't have to reload them
|
||||
clearUnusedResource();
|
||||
|
||||
// Refresh all remaining resources in use
|
||||
foreach (auto resource, _resources) {
|
||||
if (!resource.isNull()) {
|
||||
resource.data()->refresh();
|
||||
}
|
||||
_unusedResources.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,6 +53,8 @@ void ResourceCache::refresh(const QUrl& url) {
|
|||
QSharedPointer<Resource> resource = _resources.value(url);
|
||||
if (!resource.isNull()) {
|
||||
resource->refresh();
|
||||
} else {
|
||||
_resources.remove(url);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,6 +141,17 @@ void ResourceCache::reserveUnusedResource(qint64 resourceSize) {
|
|||
}
|
||||
}
|
||||
|
||||
void ResourceCache::clearUnusedResource() {
|
||||
// the unused resources may themselves reference resources that will be added to the unused
|
||||
// list on destruction, so keep clearing until there are no references left
|
||||
while (!_unusedResources.isEmpty()) {
|
||||
foreach (const QSharedPointer<Resource>& resource, _unusedResources) {
|
||||
resource->setCache(nullptr);
|
||||
}
|
||||
_unusedResources.clear();
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceCache::attemptRequest(Resource* resource) {
|
||||
auto sharedItems = DependencyManager::get<ResourceCacheSharedItems>();
|
||||
if (_requestLimit <= 0) {
|
||||
|
@ -253,19 +271,20 @@ void Resource::refresh() {
|
|||
_replyTimer->deleteLater();
|
||||
_replyTimer = nullptr;
|
||||
}
|
||||
|
||||
init();
|
||||
_request.setAttribute(QNetworkRequest::CacheLoadControlAttribute, QNetworkRequest::AlwaysNetwork);
|
||||
if (!_startedLoading) {
|
||||
attemptRequest();
|
||||
}
|
||||
ensureLoading();
|
||||
emit onRefresh();
|
||||
}
|
||||
|
||||
void Resource::allReferencesCleared() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "allReferencesCleared");
|
||||
return;
|
||||
}
|
||||
if (_cache) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "allReferencesCleared");
|
||||
return;
|
||||
}
|
||||
|
||||
// create and reinsert new shared pointer
|
||||
QSharedPointer<Resource> self(this, &Resource::allReferencesCleared);
|
||||
setSelf(self);
|
||||
|
@ -312,8 +331,7 @@ void Resource::reinsert() {
|
|||
_cache->_resources.insert(_url, _self);
|
||||
}
|
||||
|
||||
const int REPLY_TIMEOUT_MS = 5000;
|
||||
|
||||
static const int REPLY_TIMEOUT_MS = 5000;
|
||||
void Resource::handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal) {
|
||||
if (!_reply->isFinished()) {
|
||||
_bytesReceived = bytesReceived;
|
||||
|
|
|
@ -51,7 +51,7 @@ static const qint64 MAX_UNUSED_MAX_SIZE = 10 * BYTES_PER_GIGABYTES;
|
|||
class ResourceCacheSharedItems : public Dependency {
|
||||
SINGLETON_DEPENDENCY
|
||||
public:
|
||||
QList<QPointer<Resource> > _pendingRequests;
|
||||
QList<QPointer<Resource>> _pendingRequests;
|
||||
QList<Resource*> _loadingRequests;
|
||||
private:
|
||||
ResourceCacheSharedItems() { }
|
||||
|
@ -78,17 +78,14 @@ public:
|
|||
|
||||
ResourceCache(QObject* parent = NULL);
|
||||
virtual ~ResourceCache();
|
||||
|
||||
|
||||
void refreshAll();
|
||||
void refresh(const QUrl& url);
|
||||
|
||||
public slots:
|
||||
void checkAsynchronousGets();
|
||||
|
||||
protected:
|
||||
qint64 _unusedResourcesMaxSize = DEFAULT_UNUSED_MAX_SIZE;
|
||||
qint64 _unusedResourcesSize = 0;
|
||||
QMap<int, QSharedPointer<Resource> > _unusedResources;
|
||||
|
||||
/// Loads a resource from the specified URL.
|
||||
/// \param fallback a fallback URL to load if the desired one is unavailable
|
||||
/// \param delayLoad if true, don't load the resource immediately; wait until load is first requested
|
||||
|
@ -103,6 +100,7 @@ protected:
|
|||
void addUnusedResource(const QSharedPointer<Resource>& resource);
|
||||
void removeUnusedResource(const QSharedPointer<Resource>& resource);
|
||||
void reserveUnusedResource(qint64 resourceSize);
|
||||
void clearUnusedResource();
|
||||
|
||||
static void attemptRequest(Resource* resource);
|
||||
static void requestCompleted(Resource* resource);
|
||||
|
@ -110,7 +108,7 @@ protected:
|
|||
private:
|
||||
friend class Resource;
|
||||
|
||||
QHash<QUrl, QWeakPointer<Resource> > _resources;
|
||||
QHash<QUrl, QWeakPointer<Resource>> _resources;
|
||||
int _lastLRUKey = 0;
|
||||
|
||||
static int _requestLimit;
|
||||
|
@ -118,7 +116,10 @@ private:
|
|||
void getResourceAsynchronously(const QUrl& url);
|
||||
QReadWriteLock _resourcesToBeGottenLock;
|
||||
QQueue<QUrl> _resourcesToBeGotten;
|
||||
|
||||
|
||||
qint64 _unusedResourcesMaxSize = DEFAULT_UNUSED_MAX_SIZE;
|
||||
qint64 _unusedResourcesSize = 0;
|
||||
QMap<int, QSharedPointer<Resource>> _unusedResources;
|
||||
};
|
||||
|
||||
/// Base class for resources.
|
||||
|
@ -172,12 +173,11 @@ public:
|
|||
const QUrl& getURL() const { return _url; }
|
||||
|
||||
signals:
|
||||
|
||||
/// Fired when the resource has been loaded.
|
||||
void loaded();
|
||||
void onRefresh();
|
||||
|
||||
protected slots:
|
||||
|
||||
void attemptRequest();
|
||||
|
||||
/// Refreshes the resource if the last modified date on the network
|
||||
|
@ -185,7 +185,6 @@ protected slots:
|
|||
void maybeRefresh();
|
||||
|
||||
protected:
|
||||
|
||||
virtual void init();
|
||||
|
||||
/// Called when the download has finished. The recipient should delete the reply when done with it.
|
||||
|
@ -207,14 +206,12 @@ protected:
|
|||
QPointer<ResourceCache> _cache;
|
||||
|
||||
private slots:
|
||||
|
||||
void handleDownloadProgress(qint64 bytesReceived, qint64 bytesTotal);
|
||||
void handleReplyError();
|
||||
void handleReplyFinished();
|
||||
void handleReplyTimeout();
|
||||
|
||||
private:
|
||||
|
||||
void setLRUKey(int lruKey) { _lruKey = lruKey; }
|
||||
|
||||
void makeRequest();
|
||||
|
|
|
@ -30,23 +30,6 @@ JurisdictionMap& JurisdictionMap::operator=(const JurisdictionMap& other) {
|
|||
return *this;
|
||||
}
|
||||
|
||||
#ifdef HAS_MOVE_SEMANTICS
|
||||
// Move constructor
|
||||
JurisdictionMap::JurisdictionMap(JurisdictionMap&& other) : _rootOctalCode(NULL) {
|
||||
init(other._rootOctalCode, other._endNodes);
|
||||
other._rootOctalCode = NULL;
|
||||
other._endNodes.clear();
|
||||
}
|
||||
|
||||
// move assignment
|
||||
JurisdictionMap& JurisdictionMap::operator=(JurisdictionMap&& other) {
|
||||
init(other._rootOctalCode, other._endNodes);
|
||||
other._rootOctalCode = NULL;
|
||||
other._endNodes.clear();
|
||||
return *this;
|
||||
}
|
||||
#endif
|
||||
|
||||
// Copy constructor
|
||||
JurisdictionMap::JurisdictionMap(const JurisdictionMap& other) : _rootOctalCode(NULL) {
|
||||
copyContents(other);
|
||||
|
|
|
@ -38,13 +38,6 @@ public:
|
|||
// standard assignment
|
||||
JurisdictionMap& operator=(const JurisdictionMap& other); // copy assignment
|
||||
|
||||
#ifdef HAS_MOVE_SEMANTICS
|
||||
// move constructor and assignment
|
||||
JurisdictionMap(JurisdictionMap&& other); // move constructor
|
||||
JurisdictionMap& operator= (JurisdictionMap&& other); // move assignment
|
||||
#endif
|
||||
|
||||
// application constructors
|
||||
JurisdictionMap(const char* filename);
|
||||
JurisdictionMap(unsigned char* rootOctalCode, const std::vector<unsigned char*>& endNodes);
|
||||
JurisdictionMap(const char* rootHextString, const char* endNodesHextString);
|
||||
|
|
|
@ -21,7 +21,6 @@
|
|||
|
||||
#include "AbstractViewStateInterface.h"
|
||||
#include "AmbientOcclusionEffect.h"
|
||||
#include "GlowEffect.h"
|
||||
#include "ProgramObject.h"
|
||||
#include "RenderUtil.h"
|
||||
#include "TextureCache.h"
|
||||
|
@ -107,7 +106,7 @@ void AmbientOcclusionEffect::render() {
|
|||
glBindTexture(GL_TEXTURE_2D, _rotationTextureID);
|
||||
|
||||
// render with the occlusion shader to the secondary/tertiary buffer
|
||||
auto freeFramebuffer = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
|
||||
auto freeFramebuffer = nullptr; // DependencyManager::get<GlowEffect>()->getFreeFramebuffer(); // FIXME
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(freeFramebuffer));
|
||||
|
||||
float left, right, bottom, top, nearVal, farVal;
|
||||
|
@ -150,7 +149,7 @@ void AmbientOcclusionEffect::render() {
|
|||
glEnable(GL_BLEND);
|
||||
glBlendFuncSeparate(GL_ZERO, GL_SRC_COLOR, GL_ZERO, GL_ONE);
|
||||
|
||||
auto freeFramebufferTexture = freeFramebuffer->getRenderBuffer(0);
|
||||
auto freeFramebufferTexture = nullptr; // freeFramebuffer->getRenderBuffer(0); // FIXME
|
||||
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(freeFramebufferTexture));
|
||||
|
||||
_blurProgram->bind();
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
void AnimationHandle::setURL(const QUrl& url) {
|
||||
if (_url != url) {
|
||||
_animation = DependencyManager::get<AnimationCache>()->getAnimation(_url = url);
|
||||
QObject::connect(_animation.data(), &Resource::onRefresh, this, &AnimationHandle::clearJoints);
|
||||
_jointMappings.clear();
|
||||
}
|
||||
}
|
||||
|
@ -110,11 +111,15 @@ void AnimationHandle::setAnimationDetails(const AnimationDetails& details) {
|
|||
|
||||
|
||||
void AnimationHandle::simulate(float deltaTime) {
|
||||
if (!_animation || !_animation->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
|
||||
_animationLoop.simulate(deltaTime);
|
||||
|
||||
// update the joint mappings if necessary/possible
|
||||
if (_jointMappings.isEmpty()) {
|
||||
if (_model->isActive()) {
|
||||
if (_model && _model->isActive()) {
|
||||
_jointMappings = _model->getGeometry()->getJointMappings(_animation);
|
||||
}
|
||||
if (_jointMappings.isEmpty()) {
|
||||
|
@ -146,6 +151,10 @@ void AnimationHandle::simulate(float deltaTime) {
|
|||
}
|
||||
|
||||
void AnimationHandle::applyFrame(float frameIndex) {
|
||||
if (!_animation || !_animation->isLoaded()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const FBXGeometry& animationGeometry = _animation->getGeometry();
|
||||
int frameCount = animationGeometry.animationFrames.size();
|
||||
const FBXAnimationFrame& floorFrame = animationGeometry.animationFrames.at((int)glm::floor(frameIndex) % frameCount);
|
||||
|
|
|
@ -94,6 +94,8 @@ private:
|
|||
void replaceMatchingPriorities(float newPriority);
|
||||
void restoreJoints();
|
||||
|
||||
void clearJoints() { _jointMappings.clear(); }
|
||||
|
||||
Model* _model;
|
||||
WeakAnimationHandlePointer _self;
|
||||
AnimationPointer _animation;
|
||||
|
|
|
@ -18,7 +18,6 @@
|
|||
#include "AbstractViewStateInterface.h"
|
||||
#include "DeferredLightingEffect.h"
|
||||
#include "GeometryCache.h"
|
||||
#include "GlowEffect.h"
|
||||
#include "RenderUtil.h"
|
||||
#include "TextureCache.h"
|
||||
|
||||
|
@ -238,8 +237,10 @@ void DeferredLightingEffect::render(RenderArgs* args) {
|
|||
QSize framebufferSize = textureCache->getFrameBufferSize();
|
||||
|
||||
// binding the first framebuffer
|
||||
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
|
||||
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
|
||||
batch.setFramebuffer(freeFBO);
|
||||
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
batch.clearColorFramebuffer(freeFBO->getBufferMask(), glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
|
||||
|
||||
|
@ -251,18 +252,10 @@ void DeferredLightingEffect::render(RenderArgs* args) {
|
|||
|
||||
batch.setResourceTexture(3, textureCache->getPrimaryDepthTexture());
|
||||
|
||||
// get the viewport side (left, right, both)
|
||||
int viewport[4];
|
||||
glGetIntegerv(GL_VIEWPORT, viewport);
|
||||
const int VIEWPORT_X_INDEX = 0;
|
||||
const int VIEWPORT_Y_INDEX = 1;
|
||||
const int VIEWPORT_WIDTH_INDEX = 2;
|
||||
const int VIEWPORT_HEIGHT_INDEX = 3;
|
||||
|
||||
float sMin = viewport[VIEWPORT_X_INDEX] / (float)framebufferSize.width();
|
||||
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)framebufferSize.width();
|
||||
float tMin = viewport[VIEWPORT_Y_INDEX] / (float)framebufferSize.height();
|
||||
float tHeight = viewport[VIEWPORT_HEIGHT_INDEX] / (float)framebufferSize.height();
|
||||
float sMin = args->_viewport.x / (float)framebufferSize.width();
|
||||
float sWidth = args->_viewport.z / (float)framebufferSize.width();
|
||||
float tMin = args->_viewport.y / (float)framebufferSize.height();
|
||||
float tHeight = args->_viewport.w / (float)framebufferSize.height();
|
||||
|
||||
bool useSkyboxCubemap = (_skybox) && (_skybox->getCubemap());
|
||||
|
||||
|
@ -547,7 +540,7 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
|
|||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
QSize framebufferSize = textureCache->getFrameBufferSize();
|
||||
|
||||
auto freeFBO = DependencyManager::get<GlowEffect>()->getFreeFramebuffer();
|
||||
auto freeFBO = DependencyManager::get<TextureCache>()->getSecondaryFramebuffer();
|
||||
|
||||
batch.setFramebuffer(textureCache->getPrimaryFramebuffer());
|
||||
batch.setPipeline(_blitLightBuffer);
|
||||
|
@ -556,27 +549,19 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
|
|||
|
||||
batch.setProjectionTransform(glm::mat4());
|
||||
batch.setViewTransform(Transform());
|
||||
|
||||
float sMin = args->_viewport.x / (float)framebufferSize.width();
|
||||
float sWidth = args->_viewport.z / (float)framebufferSize.width();
|
||||
float tMin = args->_viewport.y / (float)framebufferSize.height();
|
||||
float tHeight = args->_viewport.w / (float)framebufferSize.height();
|
||||
|
||||
int viewport[4];
|
||||
glGetIntegerv(GL_VIEWPORT, viewport);
|
||||
const int VIEWPORT_X_INDEX = 0;
|
||||
const int VIEWPORT_Y_INDEX = 1;
|
||||
const int VIEWPORT_WIDTH_INDEX = 2;
|
||||
const int VIEWPORT_HEIGHT_INDEX = 3;
|
||||
|
||||
float sMin = viewport[VIEWPORT_X_INDEX] / (float)framebufferSize.width();
|
||||
float sWidth = viewport[VIEWPORT_WIDTH_INDEX] / (float)framebufferSize.width();
|
||||
float tMin = viewport[VIEWPORT_Y_INDEX] / (float)framebufferSize.height();
|
||||
float tHeight = viewport[VIEWPORT_HEIGHT_INDEX] / (float)framebufferSize.height();
|
||||
batch.setViewportTransform(args->_viewport);
|
||||
|
||||
Transform model;
|
||||
model.setTranslation(glm::vec3(sMin, tMin, 0.0));
|
||||
model.setScale(glm::vec3(sWidth, tHeight, 1.0));
|
||||
batch.setModelTransform(model);
|
||||
|
||||
|
||||
batch.setViewportTransform(glm::ivec4(viewport[0], viewport[1], viewport[2], viewport[3]));
|
||||
|
||||
batch.draw(gpu::TRIANGLE_STRIP, 4);
|
||||
|
||||
|
||||
|
|
|
@ -55,8 +55,6 @@ const int NUM_VERTICES_PER_TRIANGLE = 3;
|
|||
const int NUM_TRIANGLES_PER_QUAD = 2;
|
||||
const int NUM_VERTICES_PER_TRIANGULATED_QUAD = NUM_VERTICES_PER_TRIANGLE * NUM_TRIANGLES_PER_QUAD;
|
||||
const int NUM_COORDS_PER_VERTEX = 3;
|
||||
const int NUM_BYTES_PER_VERTEX = NUM_COORDS_PER_VERTEX * sizeof(GLfloat);
|
||||
const int NUM_BYTES_PER_INDEX = sizeof(GLushort);
|
||||
|
||||
void GeometryCache::renderSphere(float radius, int slices, int stacks, const glm::vec4& color, bool solid, int id) {
|
||||
gpu::Batch batch;
|
||||
|
@ -308,106 +306,6 @@ void GeometryCache::renderSphere(gpu::Batch& batch, float radius, int slices, in
|
|||
}
|
||||
}
|
||||
|
||||
void GeometryCache::renderCone(float base, float height, int slices, int stacks) {
|
||||
VerticesIndices& vbo = _coneVBOs[IntPair(slices, stacks)];
|
||||
int vertices = (stacks + 2) * slices;
|
||||
int baseTriangles = slices - 2;
|
||||
int indices = NUM_VERTICES_PER_TRIANGULATED_QUAD * slices * stacks + NUM_VERTICES_PER_TRIANGLE * baseTriangles;
|
||||
if (vbo.first == 0) {
|
||||
GLfloat* vertexData = new GLfloat[vertices * NUM_COORDS_PER_VERTEX * 2];
|
||||
GLfloat* vertex = vertexData;
|
||||
// cap
|
||||
for (int i = 0; i < slices; i++) {
|
||||
float theta = TWO_PI * i / slices;
|
||||
|
||||
//normals
|
||||
*(vertex++) = 0.0f;
|
||||
*(vertex++) = 0.0f;
|
||||
*(vertex++) = -1.0f;
|
||||
|
||||
// vertices
|
||||
*(vertex++) = cosf(theta);
|
||||
*(vertex++) = sinf(theta);
|
||||
*(vertex++) = 0.0f;
|
||||
}
|
||||
// body
|
||||
for (int i = 0; i <= stacks; i++) {
|
||||
float z = (float)i / stacks;
|
||||
float radius = 1.0f - z;
|
||||
|
||||
for (int j = 0; j < slices; j++) {
|
||||
float theta = TWO_PI * j / slices;
|
||||
|
||||
//normals
|
||||
*(vertex++) = cosf(theta) / SQUARE_ROOT_OF_2;
|
||||
*(vertex++) = sinf(theta) / SQUARE_ROOT_OF_2;
|
||||
*(vertex++) = 1.0f / SQUARE_ROOT_OF_2;
|
||||
|
||||
// vertices
|
||||
*(vertex++) = radius * cosf(theta);
|
||||
*(vertex++) = radius * sinf(theta);
|
||||
*(vertex++) = z;
|
||||
}
|
||||
}
|
||||
|
||||
glGenBuffers(1, &vbo.first);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo.first);
|
||||
glBufferData(GL_ARRAY_BUFFER, 2 * vertices * NUM_BYTES_PER_VERTEX, vertexData, GL_STATIC_DRAW);
|
||||
delete[] vertexData;
|
||||
|
||||
GLushort* indexData = new GLushort[indices];
|
||||
GLushort* index = indexData;
|
||||
for (int i = 0; i < baseTriangles; i++) {
|
||||
*(index++) = 0;
|
||||
*(index++) = i + 2;
|
||||
*(index++) = i + 1;
|
||||
}
|
||||
for (int i = 1; i <= stacks; i++) {
|
||||
GLushort bottom = i * slices;
|
||||
GLushort top = bottom + slices;
|
||||
for (int j = 0; j < slices; j++) {
|
||||
int next = (j + 1) % slices;
|
||||
|
||||
*(index++) = bottom + j;
|
||||
*(index++) = top + next;
|
||||
*(index++) = top + j;
|
||||
|
||||
*(index++) = bottom + j;
|
||||
*(index++) = bottom + next;
|
||||
*(index++) = top + next;
|
||||
}
|
||||
}
|
||||
|
||||
glGenBuffers(1, &vbo.second);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo.second);
|
||||
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices * NUM_BYTES_PER_INDEX, indexData, GL_STATIC_DRAW);
|
||||
delete[] indexData;
|
||||
|
||||
} else {
|
||||
glBindBuffer(GL_ARRAY_BUFFER, vbo.first);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, vbo.second);
|
||||
}
|
||||
glEnableClientState(GL_VERTEX_ARRAY);
|
||||
glEnableClientState(GL_NORMAL_ARRAY);
|
||||
|
||||
int stride = NUM_VERTICES_PER_TRIANGULATED_QUAD * sizeof(float);
|
||||
glNormalPointer(GL_FLOAT, stride, 0);
|
||||
glVertexPointer(NUM_COORDS_PER_VERTEX, GL_FLOAT, stride, (const void *)(NUM_COORDS_PER_VERTEX * sizeof(float)));
|
||||
|
||||
glPushMatrix();
|
||||
glScalef(base, base, height);
|
||||
|
||||
glDrawRangeElementsEXT(GL_TRIANGLES, 0, vertices - 1, indices, GL_UNSIGNED_SHORT, 0);
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
glDisableClientState(GL_VERTEX_ARRAY);
|
||||
glDisableClientState(GL_NORMAL_ARRAY);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
}
|
||||
|
||||
void GeometryCache::renderGrid(int xDivisions, int yDivisions, const glm::vec4& color) {
|
||||
gpu::Batch batch;
|
||||
renderGrid(batch, xDivisions, yDivisions, color);
|
||||
|
@ -1942,7 +1840,7 @@ QSharedPointer<NetworkGeometry> NetworkGeometry::getLODOrFallback(float distance
|
|||
}
|
||||
}
|
||||
}
|
||||
if (lod->isLoaded()) {
|
||||
if (lod && lod->isLoaded()) {
|
||||
hysteresis = lodDistance;
|
||||
return lod;
|
||||
}
|
||||
|
@ -2061,21 +1959,16 @@ void NetworkGeometry::setTextureWithNameToURL(const QString& name, const QUrl& u
|
|||
|
||||
QSharedPointer<NetworkTexture> matchingTexture = QSharedPointer<NetworkTexture>();
|
||||
if (part.diffuseTextureName == name) {
|
||||
part.diffuseTexture =
|
||||
textureCache->getTexture(url, DEFAULT_TEXTURE,
|
||||
_geometry.meshes[i].isEye, QByteArray());
|
||||
part.diffuseTexture = textureCache->getTexture(url, DEFAULT_TEXTURE, _geometry.meshes[i].isEye);
|
||||
part.diffuseTexture->setLoadPriorities(_loadPriorities);
|
||||
} else if (part.normalTextureName == name) {
|
||||
part.normalTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
|
||||
false, QByteArray());
|
||||
part.normalTexture = textureCache->getTexture(url);
|
||||
part.normalTexture->setLoadPriorities(_loadPriorities);
|
||||
} else if (part.specularTextureName == name) {
|
||||
part.specularTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
|
||||
false, QByteArray());
|
||||
part.specularTexture = textureCache->getTexture(url);
|
||||
part.specularTexture->setLoadPriorities(_loadPriorities);
|
||||
} else if (part.emissiveTextureName == name) {
|
||||
part.emissiveTexture = textureCache->getTexture(url, DEFAULT_TEXTURE,
|
||||
false, QByteArray());
|
||||
part.emissiveTexture = textureCache->getTexture(url);
|
||||
part.emissiveTexture->setLoadPriorities(_loadPriorities);
|
||||
}
|
||||
}
|
||||
|
@ -2095,22 +1988,22 @@ QStringList NetworkGeometry::getTextureNames() const {
|
|||
for (int j = 0; j < mesh.parts.size(); j++) {
|
||||
const NetworkMeshPart& part = mesh.parts[j];
|
||||
|
||||
if (!part.diffuseTextureName.isEmpty()) {
|
||||
if (!part.diffuseTextureName.isEmpty() && part.diffuseTexture) {
|
||||
QString textureURL = part.diffuseTexture->getURL().toString();
|
||||
result << part.diffuseTextureName + ":" + textureURL;
|
||||
}
|
||||
|
||||
if (!part.normalTextureName.isEmpty()) {
|
||||
if (!part.normalTextureName.isEmpty() && part.normalTexture) {
|
||||
QString textureURL = part.normalTexture->getURL().toString();
|
||||
result << part.normalTextureName + ":" + textureURL;
|
||||
}
|
||||
|
||||
if (!part.specularTextureName.isEmpty()) {
|
||||
if (!part.specularTextureName.isEmpty() && part.specularTexture) {
|
||||
QString textureURL = part.specularTexture->getURL().toString();
|
||||
result << part.specularTextureName + ":" + textureURL;
|
||||
}
|
||||
|
||||
if (!part.emissiveTextureName.isEmpty()) {
|
||||
if (!part.emissiveTextureName.isEmpty() && part.emissiveTexture) {
|
||||
QString textureURL = part.emissiveTexture->getURL().toString();
|
||||
result << part.emissiveTextureName + ":" + textureURL;
|
||||
}
|
||||
|
|
|
@ -133,8 +133,6 @@ public:
|
|||
int allocateID() { return _nextID++; }
|
||||
static const int UNKNOWN_ID;
|
||||
|
||||
void renderCone(float base, float height, int slices, int stacks);
|
||||
|
||||
void renderSphere(float radius, int slices, int stacks, const glm::vec3& color, bool solid = true, int id = UNKNOWN_ID)
|
||||
{ renderSphere(radius, slices, stacks, glm::vec4(color, 1.0f), solid, id); }
|
||||
void renderSphere(gpu::Batch& batch, float radius, int slices, int stacks, const glm::vec3& color, bool solid = true, int id = UNKNOWN_ID)
|
||||
|
|
|
@ -1,225 +0,0 @@
|
|||
//
|
||||
// GlowEffect.cpp
|
||||
// interface/src/renderer
|
||||
//
|
||||
// Created by Andrzej Kapolka on 8/7/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
|
||||
#include <gpu/GPUConfig.h>
|
||||
|
||||
#include <QOpenGLFramebufferObject>
|
||||
#include <QWindow>
|
||||
|
||||
#include <PathUtils.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "GlowEffect.h"
|
||||
#include "ProgramObject.h"
|
||||
#include "RenderUtil.h"
|
||||
#include "TextureCache.h"
|
||||
#include "RenderUtilsLogging.h"
|
||||
|
||||
#include "gpu/GLBackend.h"
|
||||
|
||||
GlowEffect::GlowEffect()
|
||||
: _initialized(false),
|
||||
_isOddFrame(false),
|
||||
_isFirstFrame(true),
|
||||
_intensity(0.0f),
|
||||
_enabled(false) {
|
||||
}
|
||||
|
||||
GlowEffect::~GlowEffect() {
|
||||
if (_initialized) {
|
||||
delete _addProgram;
|
||||
delete _horizontalBlurProgram;
|
||||
delete _verticalBlurAddProgram;
|
||||
delete _verticalBlurProgram;
|
||||
delete _addSeparateProgram;
|
||||
delete _diffuseProgram;
|
||||
}
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer GlowEffect::getFreeFramebuffer() const {
|
||||
return (_isOddFrame ?
|
||||
DependencyManager::get<TextureCache>()->getSecondaryFramebuffer():
|
||||
DependencyManager::get<TextureCache>()->getTertiaryFramebuffer());
|
||||
}
|
||||
|
||||
static ProgramObject* createProgram(const QString& name) {
|
||||
ProgramObject* program = new ProgramObject();
|
||||
program->addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/" + name + ".frag");
|
||||
program->link();
|
||||
|
||||
program->bind();
|
||||
program->setUniformValue("originalTexture", 0);
|
||||
program->release();
|
||||
|
||||
return program;
|
||||
}
|
||||
|
||||
void GlowEffect::init(bool enabled) {
|
||||
if (_initialized) {
|
||||
qCDebug(renderutils, "[ERROR] GlowEffeect is already initialized.");
|
||||
return;
|
||||
}
|
||||
|
||||
_addProgram = createProgram("glow_add");
|
||||
_horizontalBlurProgram = createProgram("horizontal_blur");
|
||||
_verticalBlurAddProgram = createProgram("vertical_blur_add");
|
||||
_verticalBlurProgram = createProgram("vertical_blur");
|
||||
_addSeparateProgram = createProgram("glow_add_separate");
|
||||
_diffuseProgram = createProgram("diffuse");
|
||||
|
||||
_verticalBlurAddProgram->bind();
|
||||
_verticalBlurAddProgram->setUniformValue("horizontallyBlurredTexture", 1);
|
||||
_verticalBlurAddProgram->release();
|
||||
|
||||
_addSeparateProgram->bind();
|
||||
_addSeparateProgram->setUniformValue("blurredTexture", 1);
|
||||
_addSeparateProgram->release();
|
||||
|
||||
_diffuseProgram->bind();
|
||||
_diffuseProgram->setUniformValue("diffusedTexture", 1);
|
||||
_diffuseProgram->release();
|
||||
|
||||
_diffusionScaleLocation = _diffuseProgram->uniformLocation("diffusionScale");
|
||||
|
||||
_initialized = true;
|
||||
_enabled = enabled;
|
||||
}
|
||||
|
||||
void GlowEffect::prepare(RenderArgs* renderArgs) {
|
||||
auto primaryFBO = DependencyManager::get<TextureCache>()->getPrimaryFramebuffer();
|
||||
GLuint fbo = gpu::GLBackend::getFramebufferID(primaryFBO);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, fbo);
|
||||
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
_isEmpty = true;
|
||||
_isOddFrame = !_isOddFrame;
|
||||
}
|
||||
|
||||
void GlowEffect::begin(RenderArgs* renderArgs, float intensity) {
|
||||
// store the current intensity and add the new amount
|
||||
_intensityStack.push(_intensity);
|
||||
glBlendColor(0.0f, 0.0f, 0.0f, _intensity += intensity);
|
||||
_isEmpty &= (_intensity == 0.0f);
|
||||
}
|
||||
|
||||
void GlowEffect::end(RenderArgs* renderArgs) {
|
||||
// restore the saved intensity
|
||||
glBlendColor(0.0f, 0.0f, 0.0f, _intensity = _intensityStack.pop());
|
||||
}
|
||||
|
||||
gpu::FramebufferPointer GlowEffect::render(RenderArgs* renderArgs) {
|
||||
PerformanceTimer perfTimer("glowEffect");
|
||||
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
|
||||
auto primaryFBO = gpu::GLBackend::getFramebufferID(textureCache->getPrimaryFramebuffer());
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, textureCache->getPrimaryColorTextureID());
|
||||
auto framebufferSize = textureCache->getFrameBufferSize();
|
||||
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
|
||||
glDisable(GL_BLEND);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDepthMask(GL_FALSE);
|
||||
|
||||
gpu::FramebufferPointer destFBO = textureCache->getSecondaryFramebuffer();
|
||||
if (!_enabled || _isEmpty) {
|
||||
// copy the primary to the screen
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, primaryFBO);
|
||||
glBlitFramebuffer(
|
||||
0, 0, framebufferSize.width(), framebufferSize.height(),
|
||||
0, 0, framebufferSize.width(), framebufferSize.height(),
|
||||
GL_COLOR_BUFFER_BIT, GL_NEAREST);
|
||||
} else {
|
||||
// diffuse into the secondary/tertiary (alternating between frames)
|
||||
auto oldDiffusedFBO =
|
||||
textureCache->getSecondaryFramebuffer();
|
||||
auto newDiffusedFBO =
|
||||
textureCache->getTertiaryFramebuffer();
|
||||
if (_isOddFrame) {
|
||||
qSwap(oldDiffusedFBO, newDiffusedFBO);
|
||||
}
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(newDiffusedFBO));
|
||||
|
||||
if (_isFirstFrame) {
|
||||
glClear(GL_COLOR_BUFFER_BIT);
|
||||
|
||||
} else {
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(oldDiffusedFBO->getRenderBuffer(0)));
|
||||
|
||||
_diffuseProgram->bind();
|
||||
|
||||
_diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / framebufferSize.width(), 1.0f / framebufferSize.height());
|
||||
|
||||
renderFullscreenQuad();
|
||||
|
||||
_diffuseProgram->release();
|
||||
}
|
||||
|
||||
destFBO = oldDiffusedFBO;
|
||||
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
// add diffused texture to the primary
|
||||
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(newDiffusedFBO->getRenderBuffer(0)));
|
||||
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(destFBO));
|
||||
glViewport(0, 0, framebufferSize.width(), framebufferSize.height());
|
||||
_addSeparateProgram->bind();
|
||||
renderFullscreenQuad();
|
||||
_addSeparateProgram->release();
|
||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
}
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glPopMatrix();
|
||||
|
||||
glEnable(GL_BLEND);
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glDepthMask(GL_TRUE);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
_isFirstFrame = false;
|
||||
|
||||
return destFBO;
|
||||
}
|
||||
|
||||
void GlowEffect::toggleGlowEffect(bool enabled) {
|
||||
_enabled = enabled;
|
||||
}
|
||||
|
||||
Glower::Glower(float amount) {
|
||||
RenderArgs renderArgs;
|
||||
DependencyManager::get<GlowEffect>()->begin(&renderArgs, amount);
|
||||
}
|
||||
Glower::Glower(RenderArgs* renderArgs, float amount) : _renderArgs(renderArgs) {
|
||||
DependencyManager::get<GlowEffect>()->begin(_renderArgs, amount);
|
||||
}
|
||||
|
||||
Glower::~Glower() {
|
||||
DependencyManager::get<GlowEffect>()->end(_renderArgs);
|
||||
}
|
||||
|
|
@ -1,97 +0,0 @@
|
|||
//
|
||||
// GlowEffect.h
|
||||
// interface/src/renderer
|
||||
//
|
||||
// Created by Andrzej Kapolka on 8/7/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_GlowEffect_h
|
||||
#define hifi_GlowEffect_h
|
||||
|
||||
#include <gpu/GPUConfig.h>
|
||||
#include <gpu/Framebuffer.h>
|
||||
|
||||
#include "RenderArgs.h"
|
||||
|
||||
#include <QObject>
|
||||
#include <QGLWidget>
|
||||
#include <QStack>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
class ProgramObject;
|
||||
|
||||
/// A generic full screen glow effect.
|
||||
class GlowEffect : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
|
||||
/// Returns a pointer to the framebuffer object that the glow effect is *not* using for persistent state
|
||||
/// (either the secondary or the tertiary).
|
||||
gpu::FramebufferPointer getFreeFramebuffer() const;
|
||||
|
||||
void init(bool enabled);
|
||||
|
||||
/// Prepares the glow effect for rendering the current frame. To be called before rendering the scene.
|
||||
void prepare(RenderArgs* renderArgs);
|
||||
|
||||
/// Starts using the glow effect.
|
||||
/// \param intensity the desired glow intensity, from zero to one
|
||||
void begin(RenderArgs* renderArgs, float intensity = 1.0f);
|
||||
|
||||
/// Stops using the glow effect.
|
||||
void end(RenderArgs* renderArgs);
|
||||
|
||||
/// Returns the current glow intensity.
|
||||
float getIntensity() const { return _intensity; }
|
||||
|
||||
/// Renders the glow effect. To be called after rendering the scene.
|
||||
/// \param toTexture whether to render to a texture, rather than to the frame buffer
|
||||
/// \return the framebuffer object to which we rendered, or NULL if to the frame buffer
|
||||
gpu::FramebufferPointer render(RenderArgs* renderArgs);
|
||||
|
||||
public slots:
|
||||
void toggleGlowEffect(bool enabled);
|
||||
|
||||
private:
|
||||
GlowEffect();
|
||||
virtual ~GlowEffect();
|
||||
|
||||
bool _initialized;
|
||||
|
||||
ProgramObject* _addProgram;
|
||||
ProgramObject* _horizontalBlurProgram;
|
||||
ProgramObject* _verticalBlurAddProgram;
|
||||
ProgramObject* _verticalBlurProgram;
|
||||
ProgramObject* _addSeparateProgram;
|
||||
ProgramObject* _diffuseProgram;
|
||||
int _diffusionScaleLocation;
|
||||
|
||||
bool _isEmpty; ///< set when nothing in the scene is currently glowing
|
||||
bool _isOddFrame; ///< controls the alternation between texture targets in diffuse add mode
|
||||
bool _isFirstFrame; ///< for persistent modes, notes whether this is the first frame rendered
|
||||
|
||||
float _intensity;
|
||||
QStack<float> _intensityStack;
|
||||
bool _enabled;
|
||||
};
|
||||
|
||||
/// RAII-style glow handler. Applies glow when in scope.
|
||||
class Glower {
|
||||
public:
|
||||
|
||||
Glower(float amount = 1.0f);
|
||||
Glower(RenderArgs* renderArgs, float amount = 1.0f);
|
||||
~Glower();
|
||||
|
||||
private:
|
||||
RenderArgs* _renderArgs;
|
||||
};
|
||||
|
||||
#endif // hifi_GlowEffect_h
|
|
@ -32,7 +32,6 @@
|
|||
#include "AbstractViewStateInterface.h"
|
||||
#include "AnimationHandle.h"
|
||||
#include "DeferredLightingEffect.h"
|
||||
#include "GlowEffect.h"
|
||||
#include "Model.h"
|
||||
#include "RenderUtilsLogging.h"
|
||||
|
||||
|
@ -457,7 +456,8 @@ bool Model::updateGeometry() {
|
|||
}
|
||||
deleteGeometry();
|
||||
_dilatedTextures.clear();
|
||||
_geometry = geometry;
|
||||
setGeometry(geometry);
|
||||
|
||||
_meshGroupsKnown = false;
|
||||
_readyWhenAdded = false; // in case any of our users are using scenes
|
||||
invalidCalculatedMeshBoxes(); // if we have to reload, we need to assume our mesh boxes are all invalid
|
||||
|
@ -824,7 +824,7 @@ void Model::renderSetup(RenderArgs* args) {
|
|||
}
|
||||
}
|
||||
|
||||
if (!_meshGroupsKnown && isLoadedWithTextures()) {
|
||||
if (!_meshGroupsKnown && isLoaded()) {
|
||||
segregateMeshGroups();
|
||||
}
|
||||
}
|
||||
|
@ -883,7 +883,7 @@ void Model::setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scen
|
|||
|
||||
|
||||
bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges) {
|
||||
if (!_meshGroupsKnown && isLoadedWithTextures()) {
|
||||
if (!_meshGroupsKnown && isLoaded()) {
|
||||
segregateMeshGroups();
|
||||
}
|
||||
|
||||
|
@ -913,7 +913,7 @@ bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChan
|
|||
}
|
||||
|
||||
bool Model::addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges, render::Item::Status::Getters& statusGetters) {
|
||||
if (!_meshGroupsKnown && isLoadedWithTextures()) {
|
||||
if (!_meshGroupsKnown && isLoaded()) {
|
||||
segregateMeshGroups();
|
||||
}
|
||||
|
||||
|
@ -1142,13 +1142,32 @@ void Model::setURL(const QUrl& url, const QUrl& fallback, bool retainCurrent, bo
|
|||
onInvalidate();
|
||||
|
||||
// if so instructed, keep the current geometry until the new one is loaded
|
||||
_nextBaseGeometry = _nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(url, fallback, delayLoad);
|
||||
_nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(url, fallback, delayLoad);
|
||||
_nextLODHysteresis = NetworkGeometry::NO_HYSTERESIS;
|
||||
if (!retainCurrent || !isActive() || (_nextGeometry && _nextGeometry->isLoaded())) {
|
||||
applyNextGeometry();
|
||||
}
|
||||
}
|
||||
|
||||
void Model::geometryRefreshed() {
|
||||
QObject* sender = QObject::sender();
|
||||
|
||||
if (sender == _geometry) {
|
||||
_readyWhenAdded = false; // reset out render items.
|
||||
_needsReload = true;
|
||||
invalidCalculatedMeshBoxes();
|
||||
|
||||
onInvalidate();
|
||||
|
||||
// if so instructed, keep the current geometry until the new one is loaded
|
||||
_nextGeometry = DependencyManager::get<GeometryCache>()->getGeometry(_url);
|
||||
_nextLODHysteresis = NetworkGeometry::NO_HYSTERESIS;
|
||||
applyNextGeometry();
|
||||
} else {
|
||||
sender->disconnect(this, SLOT(geometryRefreshed()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const QSharedPointer<NetworkGeometry> Model::getCollisionGeometry(bool delayLoad)
|
||||
{
|
||||
|
@ -1156,7 +1175,11 @@ const QSharedPointer<NetworkGeometry> Model::getCollisionGeometry(bool delayLoad
|
|||
_collisionGeometry = DependencyManager::get<GeometryCache>()->getGeometry(_collisionUrl, QUrl(), delayLoad);
|
||||
}
|
||||
|
||||
return _collisionGeometry;
|
||||
if (_collisionGeometry && _collisionGeometry->isLoaded()) {
|
||||
return _collisionGeometry;
|
||||
}
|
||||
|
||||
return QSharedPointer<NetworkGeometry>();
|
||||
}
|
||||
|
||||
void Model::setCollisionModelURL(const QUrl& url) {
|
||||
|
@ -1776,6 +1799,18 @@ void Model::setBlendedVertices(int blendNumber, const QWeakPointer<NetworkGeomet
|
|||
}
|
||||
}
|
||||
|
||||
void Model::setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry) {
|
||||
if (_geometry == newGeometry) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (_geometry) {
|
||||
_geometry->disconnect(_geometry.data(), &Resource::onRefresh, this, &Model::geometryRefreshed);
|
||||
}
|
||||
_geometry = newGeometry;
|
||||
QObject::connect(_geometry.data(), &Resource::onRefresh, this, &Model::geometryRefreshed);
|
||||
}
|
||||
|
||||
void Model::applyNextGeometry() {
|
||||
// delete our local geometry and custom textures
|
||||
deleteGeometry();
|
||||
|
@ -1783,13 +1818,12 @@ void Model::applyNextGeometry() {
|
|||
_lodHysteresis = _nextLODHysteresis;
|
||||
|
||||
// we retain a reference to the base geometry so that its reference count doesn't fall to zero
|
||||
_baseGeometry = _nextBaseGeometry;
|
||||
_geometry = _nextGeometry;
|
||||
setGeometry(_nextGeometry);
|
||||
|
||||
_meshGroupsKnown = false;
|
||||
_readyWhenAdded = false; // in case any of our users are using scenes
|
||||
_needsReload = false; // we are loaded now!
|
||||
invalidCalculatedMeshBoxes();
|
||||
_nextBaseGeometry.reset();
|
||||
_nextGeometry.reset();
|
||||
}
|
||||
|
||||
|
@ -2029,12 +2063,10 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
|
|||
}
|
||||
}
|
||||
}
|
||||
static bool showDiffuse = true;
|
||||
if (showDiffuse && diffuseMap) {
|
||||
if (diffuseMap && static_cast<NetworkTexture*>(diffuseMap)->isLoaded()) {
|
||||
batch.setResourceTexture(0, diffuseMap->getGPUTexture());
|
||||
|
||||
} else {
|
||||
batch.setResourceTexture(0, textureCache->getWhiteTexture());
|
||||
batch.setResourceTexture(0, textureCache->getGrayTexture());
|
||||
}
|
||||
|
||||
if (locations->texcoordMatrices >= 0) {
|
||||
|
@ -2049,16 +2081,15 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
|
|||
}
|
||||
|
||||
if (!mesh.tangents.isEmpty()) {
|
||||
Texture* normalMap = networkPart.normalTexture.data();
|
||||
batch.setResourceTexture(1, !normalMap ?
|
||||
textureCache->getBlueTexture() : normalMap->getGPUTexture());
|
||||
|
||||
NetworkTexture* normalMap = networkPart.normalTexture.data();
|
||||
batch.setResourceTexture(1, (!normalMap || !normalMap->isLoaded()) ?
|
||||
textureCache->getBlueTexture() : normalMap->getGPUTexture());
|
||||
}
|
||||
|
||||
if (locations->specularTextureUnit >= 0) {
|
||||
Texture* specularMap = networkPart.specularTexture.data();
|
||||
batch.setResourceTexture(locations->specularTextureUnit, !specularMap ?
|
||||
textureCache->getWhiteTexture() : specularMap->getGPUTexture());
|
||||
NetworkTexture* specularMap = networkPart.specularTexture.data();
|
||||
batch.setResourceTexture(locations->specularTextureUnit, (!specularMap || !specularMap->isLoaded()) ?
|
||||
textureCache->getBlackTexture() : specularMap->getGPUTexture());
|
||||
}
|
||||
|
||||
if (args) {
|
||||
|
@ -2073,9 +2104,9 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
|
|||
float emissiveScale = part.emissiveParams.y;
|
||||
GLBATCH(glUniform2f)(locations->emissiveParams, emissiveOffset, emissiveScale);
|
||||
|
||||
Texture* emissiveMap = networkPart.emissiveTexture.data();
|
||||
batch.setResourceTexture(locations->emissiveTextureUnit, !emissiveMap ?
|
||||
textureCache->getWhiteTexture() : emissiveMap->getGPUTexture());
|
||||
NetworkTexture* emissiveMap = networkPart.emissiveTexture.data();
|
||||
batch.setResourceTexture(locations->emissiveTextureUnit, (!emissiveMap || !emissiveMap->isLoaded()) ?
|
||||
textureCache->getGrayTexture() : emissiveMap->getGPUTexture());
|
||||
}
|
||||
|
||||
if (translucent && locations->lightBufferUnit >= 0) {
|
||||
|
@ -2183,12 +2214,13 @@ void Model::pickPrograms(gpu::Batch& batch, RenderMode mode, bool translucent, f
|
|||
}
|
||||
|
||||
if ((locations->glowIntensity > -1) && (mode != RenderArgs::SHADOW_RENDER_MODE)) {
|
||||
GLBATCH(glUniform1f)(locations->glowIntensity, DependencyManager::get<GlowEffect>()->getIntensity());
|
||||
const float DEFAULT_GLOW_INTENSITY = 1.0f; // FIXME - glow is removed
|
||||
GLBATCH(glUniform1f)(locations->glowIntensity, DEFAULT_GLOW_INTENSITY);
|
||||
}
|
||||
}
|
||||
|
||||
bool Model::initWhenReady(render::ScenePointer scene) {
|
||||
if (isActive() && isRenderable() && !_meshGroupsKnown && isLoadedWithTextures()) {
|
||||
if (isActive() && isRenderable() && !_meshGroupsKnown && isLoaded()) {
|
||||
segregateMeshGroups();
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
|
|
|
@ -106,6 +106,7 @@ public:
|
|||
void setVisibleInScene(bool newValue, std::shared_ptr<render::Scene> scene);
|
||||
bool isVisible() const { return _isVisible; }
|
||||
|
||||
bool isLoaded() const { return _geometry && _geometry->isLoaded(); }
|
||||
bool isLoadedWithTextures() const { return _geometry && _geometry->isLoadedWithTextures(); }
|
||||
|
||||
void init();
|
||||
|
@ -116,7 +117,7 @@ public:
|
|||
|
||||
// new Scene/Engine rendering support
|
||||
bool needsFixupInScene() { return !_readyWhenAdded && readyToAddToScene(); }
|
||||
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) { return !_needsReload && isRenderable() && isActive() && isLoadedWithTextures(); }
|
||||
bool readyToAddToScene(RenderArgs* renderArgs = nullptr) { return !_needsReload && isRenderable() && isActive() && isLoaded(); }
|
||||
bool addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
|
||||
bool addToScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges, render::Item::Status::Getters& statusGetters);
|
||||
void removeFromScene(std::shared_ptr<render::Scene> scene, render::PendingChanges& pendingChanges);
|
||||
|
@ -245,6 +246,7 @@ public:
|
|||
|
||||
protected:
|
||||
QSharedPointer<NetworkGeometry> _geometry;
|
||||
void setGeometry(const QSharedPointer<NetworkGeometry>& newGeometry);
|
||||
|
||||
glm::vec3 _scale;
|
||||
glm::vec3 _offset;
|
||||
|
@ -321,6 +323,9 @@ protected:
|
|||
// hook for derived classes to be notified when setUrl invalidates the current model.
|
||||
virtual void onInvalidate() {};
|
||||
|
||||
protected slots:
|
||||
void geometryRefreshed();
|
||||
|
||||
private:
|
||||
|
||||
friend class AnimationHandle;
|
||||
|
@ -330,15 +335,12 @@ private:
|
|||
QVector<JointState> createJointStates(const FBXGeometry& geometry);
|
||||
void initJointTransforms();
|
||||
|
||||
QSharedPointer<NetworkGeometry> _baseGeometry; ///< reference required to prevent collection of base
|
||||
QSharedPointer<NetworkGeometry> _nextBaseGeometry;
|
||||
QSharedPointer<NetworkGeometry> _nextGeometry;
|
||||
float _lodDistance;
|
||||
float _lodHysteresis;
|
||||
float _nextLODHysteresis;
|
||||
|
||||
QSharedPointer<NetworkGeometry> _collisionGeometry;
|
||||
QSharedPointer<NetworkGeometry> _saveNonCollisionGeometry;
|
||||
|
||||
float _pupilDilation;
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
|
@ -524,7 +526,6 @@ private:
|
|||
QMap<render::ItemID, render::PayloadPointer> _renderItems;
|
||||
bool _readyWhenAdded = false;
|
||||
bool _needsReload = true;
|
||||
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(QPointer<Model>)
|
||||
|
|
|
@ -187,9 +187,6 @@ void DrawTransparentDeferred::run(const SceneContextPointer& sceneContext, const
|
|||
args->_context->syncCache();
|
||||
args->_context->render((*args->_batch));
|
||||
args->_batch = nullptr;
|
||||
|
||||
// reset blend function to standard...
|
||||
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
|
||||
}
|
||||
|
||||
gpu::PipelinePointer DrawOverlay3D::_opaquePipeline;
|
||||
|
|
|
@ -15,54 +15,4 @@
|
|||
/// Renders a quad from (-1, -1, 0) to (1, 1, 0) with texture coordinates from (sMin, tMin) to (sMax, tMax).
|
||||
void renderFullscreenQuad(float sMin = 0.0f, float sMax = 1.0f, float tMin = 0.0f, float tMax = 1.0f);
|
||||
|
||||
template <typename F, GLenum matrix>
|
||||
void withMatrixPush(F f) {
|
||||
glMatrixMode(matrix);
|
||||
glPushMatrix();
|
||||
f();
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withProjectionPush(F f) {
|
||||
withMatrixPush<GL_PROJECTION>(f);
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withProjectionIdentity(F f) {
|
||||
withProjectionPush([&] {
|
||||
glLoadIdentity();
|
||||
f();
|
||||
});
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withProjectionMatrix(GLfloat* matrix, F f) {
|
||||
withProjectionPush([&] {
|
||||
glLoadMatrixf(matrix);
|
||||
f();
|
||||
});
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withModelviewPush(F f) {
|
||||
withMatrixPush<GL_MODELVIEW>(f);
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withModelviewIdentity(F f) {
|
||||
withModelviewPush([&] {
|
||||
glLoadIdentity();
|
||||
f();
|
||||
});
|
||||
}
|
||||
|
||||
template <typename F>
|
||||
void withModelviewMatrix(GLfloat* matrix, F f) {
|
||||
withModelviewPush([&] {
|
||||
glLoadMatrixf(matrix);
|
||||
f();
|
||||
});
|
||||
}
|
||||
|
||||
#endif // hifi_RenderUtil_h
|
||||
|
|
|
@ -118,9 +118,9 @@ const gpu::TexturePointer& TextureCache::getPermutationNormalTexture() {
|
|||
}
|
||||
|
||||
const unsigned char OPAQUE_WHITE[] = { 0xFF, 0xFF, 0xFF, 0xFF };
|
||||
//const unsigned char TRANSPARENT_WHITE[] = { 0xFF, 0xFF, 0xFF, 0x0 };
|
||||
//const unsigned char OPAQUE_BLACK[] = { 0x0, 0x0, 0x0, 0xFF };
|
||||
const unsigned char OPAQUE_GRAY[] = { 0x80, 0x80, 0x80, 0xFF };
|
||||
const unsigned char OPAQUE_BLUE[] = { 0x80, 0x80, 0xFF, 0xFF };
|
||||
const unsigned char OPAQUE_BLACK[] = { 0x00, 0x00, 0x00, 0xFF };
|
||||
|
||||
/*
|
||||
static void loadSingleColorTexture(const unsigned char* color) {
|
||||
|
@ -137,6 +137,14 @@ const gpu::TexturePointer& TextureCache::getWhiteTexture() {
|
|||
return _whiteTexture;
|
||||
}
|
||||
|
||||
const gpu::TexturePointer& TextureCache::getGrayTexture() {
|
||||
if (!_grayTexture) {
|
||||
_grayTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
|
||||
_grayTexture->assignStoredMip(0, _whiteTexture->getTexelFormat(), sizeof(OPAQUE_WHITE), OPAQUE_GRAY);
|
||||
}
|
||||
return _grayTexture;
|
||||
}
|
||||
|
||||
const gpu::TexturePointer& TextureCache::getBlueTexture() {
|
||||
if (!_blueTexture) {
|
||||
_blueTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
|
||||
|
@ -145,6 +153,14 @@ const gpu::TexturePointer& TextureCache::getBlueTexture() {
|
|||
return _blueTexture;
|
||||
}
|
||||
|
||||
const gpu::TexturePointer& TextureCache::getBlackTexture() {
|
||||
if (!_blackTexture) {
|
||||
_blackTexture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA), 1, 1));
|
||||
_blackTexture->assignStoredMip(0, _whiteTexture->getTexelFormat(), sizeof(OPAQUE_BLACK), OPAQUE_BLACK);
|
||||
}
|
||||
return _blackTexture;
|
||||
}
|
||||
|
||||
/// Extra data for creating textures.
|
||||
class TextureExtra {
|
||||
public:
|
||||
|
|
|
@ -52,9 +52,15 @@ public:
|
|||
/// Returns an opaque white texture (useful for a default).
|
||||
const gpu::TexturePointer& getWhiteTexture();
|
||||
|
||||
/// Returns an opaque gray texture (useful for a default).
|
||||
const gpu::TexturePointer& getGrayTexture();
|
||||
|
||||
/// Returns the a pale blue texture (useful for a normal map).
|
||||
const gpu::TexturePointer& getBlueTexture();
|
||||
|
||||
/// Returns the a black texture (useful for a default).
|
||||
const gpu::TexturePointer& getBlackTexture();
|
||||
|
||||
/// Returns a texture version of an image file
|
||||
static gpu::TexturePointer getImageTexture(const QString& path);
|
||||
|
||||
|
@ -112,7 +118,9 @@ private:
|
|||
|
||||
gpu::TexturePointer _permutationNormalTexture;
|
||||
gpu::TexturePointer _whiteTexture;
|
||||
gpu::TexturePointer _grayTexture;
|
||||
gpu::TexturePointer _blueTexture;
|
||||
gpu::TexturePointer _blackTexture;
|
||||
|
||||
|
||||
QHash<QUrl, QWeakPointer<NetworkTexture> > _dilatableNetworkTextures;
|
||||
|
|
|
@ -104,6 +104,7 @@ public:
|
|||
gpu::Context* _context = nullptr;
|
||||
OctreeRenderer* _renderer = nullptr;
|
||||
ViewFrustum* _viewFrustum = nullptr;
|
||||
glm::ivec4 _viewport{ 0, 0, 1, 1 };
|
||||
float _sizeScale = 1.0f;
|
||||
int _boundaryLevelAdjust = 0;
|
||||
RenderMode _renderMode = DEFAULT_RENDER_MODE;
|
||||
|
|
|
@ -102,13 +102,15 @@ class QQuickMenuItem;
|
|||
QObject* addItem(QObject* parent, const QString& text) {
|
||||
// FIXME add more checking here to ensure no name conflicts
|
||||
QQuickMenuItem* returnedValue{ nullptr };
|
||||
#ifndef QT_NO_DEBUG
|
||||
bool invokeResult =
|
||||
#endif
|
||||
QMetaObject::invokeMethod(parent, "addItem", Qt::DirectConnection, Q_RETURN_ARG(QQuickMenuItem*, returnedValue),
|
||||
Q_ARG(QString, text));
|
||||
|
||||
#ifndef QT_NO_DEBUG
|
||||
Q_ASSERT(invokeResult);
|
||||
#else
|
||||
Q_UNUSED(invokeResult);
|
||||
#endif
|
||||
QObject* result = reinterpret_cast<QObject*>(returnedValue);
|
||||
return result;
|
||||
}
|
||||
|
@ -206,12 +208,14 @@ void VrMenu::insertAction(QAction* before, QAction* action) {
|
|||
result = ::addItem(menu, action->text());
|
||||
} else {
|
||||
QQuickMenuItem* returnedValue{ nullptr };
|
||||
#ifndef QT_NO_DEBUG
|
||||
bool invokeResult =
|
||||
#endif
|
||||
QMetaObject::invokeMethod(menu, "insertItem", Qt::DirectConnection, Q_RETURN_ARG(QQuickMenuItem*, returnedValue),
|
||||
Q_ARG(int, index), Q_ARG(QString, action->text()));
|
||||
#ifndef QT_NO_DEBUG
|
||||
Q_ASSERT(invokeResult);
|
||||
#else
|
||||
Q_UNUSED(invokeResult);
|
||||
#endif
|
||||
result = reinterpret_cast<QObject*>(returnedValue);
|
||||
}
|
||||
Q_ASSERT(result);
|
||||
|
|
|
@ -125,14 +125,12 @@ public:
|
|||
DisplayModelElementProxy,
|
||||
DisplayDebugTimingDetails,
|
||||
DontDoPrecisionPicking,
|
||||
DontFadeOnOctreeServerChanges,
|
||||
DontRenderEntitiesAsScene,
|
||||
EchoLocalAudio,
|
||||
EchoServerAudio,
|
||||
EditEntitiesHelp,
|
||||
Enable3DTVMode,
|
||||
EnableCharacterController,
|
||||
EnableGlowEffect,
|
||||
EnableVRMode,
|
||||
ExpandMyAvatarSimulateTiming,
|
||||
ExpandMyAvatarTiming,
|
||||
|
|
|
@ -192,17 +192,10 @@ int main (int argc, char** argv) {
|
|||
targetStringStream << "#ifndef scribe_" << targetName << "_h" << std::endl;
|
||||
targetStringStream << "#define scribe_" << targetName << "_h" << std::endl << std::endl;
|
||||
|
||||
// targetStringStream << "const char " << targetName << "[] = R\"XXXX(" << destStringStream.str() << ")XXXX\";";
|
||||
std::istringstream destStringStreamAgain(destStringStream.str());
|
||||
targetStringStream << "const char " << targetName << "[] = \n";
|
||||
while (!destStringStreamAgain.eof()) {
|
||||
std::string lineToken;
|
||||
std::getline(destStringStreamAgain, lineToken);
|
||||
// targetStringStream << "\"" << lineToken << "\"\n";
|
||||
targetStringStream << "R\"X(" << lineToken << ")X\"\"\\n\"\n";
|
||||
}
|
||||
|
||||
targetStringStream << ";\n" << std::endl << std::endl;
|
||||
// targetStringStream << "const char " << targetName << "[] = R\"XXXX(" << destStringStream.str() << ")XXXX\";";
|
||||
targetStringStream << "const char " << targetName << "[] = R\"SCRIBE(";
|
||||
targetStringStream << destStringStream.str();
|
||||
targetStringStream << "\n)SCRIBE\";\n\n";
|
||||
targetStringStream << "#endif" << std::endl;
|
||||
} else {
|
||||
targetStringStream << destStringStream.str();
|
||||
|
|
Loading…
Reference in a new issue