Merge remote-tracking branch 'upstream/master' into plugins

Conflicts:
	interface/CMakeLists.txt
	interface/src/Application.cpp
	interface/src/Menu.cpp
	interface/src/Menu.h
	interface/src/avatar/MyAvatar.h
	tests/ui/src/main.cpp
This commit is contained in:
Brad Davis 2015-07-30 12:55:22 -07:00
commit f7a2513f26
51 changed files with 3473 additions and 1237 deletions

View file

@ -927,9 +927,9 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "use_stdev_for_desired_calc";
_streamSettings._useStDevForJitterCalc = audioBufferGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
if (_streamSettings._useStDevForJitterCalc) {
qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using stdev method for jitter calc if dynamic jitter buffers enabled";
} else {
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using max-gap method for jitter calc if dynamic jitter buffers enabled";
}
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "window_starve_threshold";

View file

@ -0,0 +1,38 @@
#
# FindconnexionClient.cmake
#
# Once done this will define
#
# 3DCONNEXIONCLIENT_INCLUDE_DIRS
#
# Created on 10/06/2015 by Marcel Verhagen
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
# setup hints for 3DCONNEXIONCLIENT search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("connexionclient")
if (APPLE)
find_library(3DconnexionClient 3DconnexionClient)
if(EXISTS ${3DconnexionClient})
set(CONNEXIONCLIENT_FOUND true)
set(CONNEXIONCLIENT_INCLUDE_DIR ${3DconnexionClient})
set(CONNEXIONCLIENT_LIBRARY ${3DconnexionClient})
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS "-weak_framework 3DconnexionClient")
message(STATUS "Found 3Dconnexion")
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIR CONNEXIONCLIENT_LIBRARY)
endif()
endif()
if (WIN32)
find_path(CONNEXIONCLIENT_INCLUDE_DIRS I3dMouseParams.h PATH_SUFFIXES Inc HINTS ${CONNEXIONCLIENT_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(connexionClient DEFAULT_MSG CONNEXIONCLIENT_INCLUDE_DIRS)
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIRS CONNEXIONCLIENT_SEARCH_DIRS)
endif()

View file

@ -291,22 +291,18 @@ var toolBar = (function () {
var RESIZE_TIMEOUT = 120000; // 2 minutes
var RESIZE_MAX_CHECKS = RESIZE_TIMEOUT / RESIZE_INTERVAL;
function addModel(url) {
var position;
var entityID = createNewEntity({
type: "Model",
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
}, false);
position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
if (position.x > 0 && position.y > 0 && position.z > 0) {
var entityId = Entities.addEntity({
type: "Model",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
});
if (entityID) {
print("Model added: " + url);
var checkCount = 0;
function resize() {
var entityProperties = Entities.getEntityProperties(entityId);
var entityProperties = Entities.getEntityProperties(entityID);
var naturalDimensions = entityProperties.naturalDimensions;
checkCount++;
@ -318,21 +314,41 @@ var toolBar = (function () {
print("Resize failed: timed out waiting for model (" + url + ") to load");
}
} else {
Entities.editEntity(entityId, { dimensions: naturalDimensions });
Entities.editEntity(entityID, { dimensions: naturalDimensions });
// Reset selection so that the selection overlays will be updated
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
}
}
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
Script.setTimeout(resize, RESIZE_INTERVAL);
} else {
Window.alert("Can't add model: Model would be out of bounds.");
}
}
function createNewEntity(properties, dragOnCreate) {
// Default to true if not passed in
dragOnCreate = dragOnCreate == undefined ? true : dragOnCreate;
var dimensions = properties.dimensions ? properties.dimensions : DEFAULT_DIMENSIONS;
var position = getPositionToCreateEntity();
var entityID = null;
if (position != null) {
position = grid.snapToSurface(grid.snapToGrid(position, false, dimensions), dimensions),
properties.position = position;
entityID = Entities.addEntity(properties);
if (dragOnCreate) {
placingEntityID = entityID;
}
} else {
Window.alert("Can't create " + properties.type + ": " + properties.type + " would be out of bounds.");
}
return entityID;
}
var newModelButtonDown = false;
var browseMarketplaceButtonDown = false;
that.mousePressEvent = function (event) {
@ -363,127 +379,82 @@ var toolBar = (function () {
}
if (newCubeButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Box",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Box",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
Window.alert("Can't create box: Box would be out of bounds.");
}
return true;
}
if (newSphereButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Sphere",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Sphere",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
Window.alert("Can't create sphere: Sphere would be out of bounds.");
}
return true;
}
if (newLightButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Light",
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Light",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_LIGHT_DIMENSIONS), DEFAULT_LIGHT_DIMENSIONS),
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
} else {
Window.alert("Can't create Light: Light would be out of bounds.");
}
return true;
}
if (newTextButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Text",
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Text",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
} else {
Window.alert("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newWebButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Web",
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Web",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
} else {
Window.alert("Can't create Web Entity: would be out of bounds.");
}
return true;
}
if (newZoneButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Zone",
dimensions: { x: 10, y: 10, z: 10 },
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Zone",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
});
} else {
Window.alert("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newPolyVoxButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "PolyVox",
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "PolyVox",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS),
DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
} else {
Window.alert("Can't create PolyVox: would be out of bounds.");
}
return true;
}
@ -666,7 +637,7 @@ function handleIdleMouse() {
idleMouseTimerId = null;
if (isActive) {
highlightEntityUnderCursor(lastMousePosition, true);
}
}
}
function highlightEntityUnderCursor(position, accurateRay) {
@ -837,15 +808,15 @@ function setupModelMenus() {
}
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Entity List...", shortcutKey: "CTRL+META+L", afterItem: "Models" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
afterItem: "Entity List...", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
afterItem: "Allow Selecting of Large Models", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
afterItem: "Allow Selecting of Small Models", isCheckable: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
afterItem: "Allow Selecting of Lights" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
afterItem: "Select All Entities In Box" });
Menu.addMenuItem({ menuName: "File", menuItemName: "Models", isSeparator: true, beforeItem: "Settings" });
@ -962,7 +933,7 @@ function selectAllEtitiesInCurrentSelectionBox(keepIfTouching) {
entities.splice(i, 1);
--i;
}
}
}
}
selectionManager.setSelections(entities);
}
@ -1038,17 +1009,29 @@ function handeMenuEvent(menuItem) {
tooltip.show(false);
}
// This function tries to find a reasonable position to place a new entity based on the camera
// position. If a reasonable position within the world bounds can't be found, `null` will
// be returned. The returned position will also take into account grid snapping settings.
function getPositionToCreateEntity() {
var distance = cameraManager.enabled ? cameraManager.zoomDistance : DEFAULT_ENTITY_DRAG_DROP_DISTANCE;
var direction = Quat.getFront(Camera.orientation);
var offset = Vec3.multiply(distance, direction);
var position = Vec3.sum(Camera.position, offset);
var placementPosition = Vec3.sum(Camera.position, offset);
position.x = Math.max(0, position.x);
position.y = Math.max(0, position.y);
position.z = Math.max(0, position.z);
var cameraPosition = Camera.position;
return position;
var cameraOutOfBounds = cameraPosition.x < 0 || cameraPosition.y < 0 || cameraPosition.z < 0;
var placementOutOfBounds = placementPosition.x < 0 || placementPosition.y < 0 || placementPosition.z < 0;
if (cameraOutOfBounds && placementOutOfBounds) {
return null;
}
placementPosition.x = Math.max(0, placementPosition.x);
placementPosition.y = Math.max(0, placementPosition.y);
placementPosition.z = Math.max(0, placementPosition.z);
return placementPosition;
}
function importSVO(importURL) {
@ -1064,17 +1047,18 @@ function importSVO(importURL) {
if (success) {
var VERY_LARGE = 10000;
var position = { x: 0, y: 0, z: 0};
var position = { x: 0, y: 0, z: 0 };
if (Clipboard.getClipboardContentsLargestDimension() < VERY_LARGE) {
position = getPositionToCreateEntity();
}
if (position.x > 0 && position.y > 0 && position.z > 0) {
if (position != null) {
var pastedEntityIDs = Clipboard.pasteEntities(position);
if (isActive) {
selectionManager.setSelections(pastedEntityIDs);
}
Window.raiseMainWindow();
Window.raiseMainWindow();
} else {
Window.alert("Can't import objects: objects would be out of bounds.");
}
@ -1264,7 +1248,7 @@ PropertiesTool = function(opts) {
if (data.properties.keyLightDirection !== undefined) {
data.properties.keyLightDirection = Vec3.fromPolar(
data.properties.keyLightDirection.x * DEGREES_TO_RADIANS, data.properties.keyLightDirection.y * DEGREES_TO_RADIANS);
}
}
Entities.editEntity(selectionManager.selections[0], data.properties);
if (data.properties.name != undefined) {
entityListTool.sendUpdate();
@ -1360,8 +1344,8 @@ PropertiesTool = function(opts) {
var properties = selectionManager.savedProperties[selectionManager.selections[i]];
if (properties.type == "Zone") {
var centerOfZone = properties.boundingBox.center;
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
z: centerOfZone.z };
Entities.editEntity(selectionManager.selections[i], {

View file

@ -0,0 +1,28 @@
//
// hitEffect.js
// examples
//
// Created by Eric Levin on July 20, 2015
// Copyright 2015 High Fidelity, Inc.
//
// An example of how to toggle a screen-space hit effect using the Scene global object.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var hitEffectEnabled = false;
toggleHitEffect();
function toggleHitEffect() {
Script.setTimeout(function() {
hitEffectEnabled = !hitEffectEnabled;
Scene.setEngineDisplayHitEffect(hitEffectEnabled);
toggleHitEffect();
}, 1000);
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,79 @@
//
// 3DConnexion.cpp
// hifi
//
// Created by MarcelEdward Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef I3D_MOUSE_PARAMS_H
#define I3D_MOUSE_PARAMS_H
// Parameters for the 3D mouse based on the SDK from 3Dconnexion
class I3dMouseSensor {
public:
enum Speed {
SPEED_LOW = 0,
SPEED_MID,
SPEED_HIGH
};
virtual bool IsPanZoom() const = 0;
virtual bool IsRotate() const = 0;
virtual Speed GetSpeed() const = 0;
virtual void SetPanZoom(bool isPanZoom) = 0;
virtual void SetRotate(bool isRotate) = 0;
virtual void SetSpeed(Speed speed) = 0;
protected:
virtual ~I3dMouseSensor() {}
};
class I3dMouseNavigation {
public:
enum Pivot {
PIVOT_MANUAL = 0,
PIVOT_AUTO,
PIVOT_AUTO_OVERRIDE
};
enum Navigation {
NAVIGATION_OBJECT_MODE = 0,
NAVIGATION_CAMERA_MODE,
NAVIGATION_FLY_MODE,
NAVIGATION_WALK_MODE,
NAVIGATION_HELICOPTER_MODE
};
enum PivotVisibility {
PIVOT_HIDE = 0,
PIVOT_SHOW,
PIVOT_SHOW_MOVING
};
virtual Navigation GetNavigationMode() const = 0;
virtual Pivot GetPivotMode() const = 0;
virtual PivotVisibility GetPivotVisibility() const = 0;
virtual bool IsLockHorizon() const = 0;
virtual void SetLockHorizon(bool bOn) = 0;
virtual void SetNavigationMode(Navigation navigation) = 0;
virtual void SetPivotMode(Pivot pivot) = 0;
virtual void SetPivotVisibility(PivotVisibility visibility) = 0;
protected:
virtual ~I3dMouseNavigation(){}
};
class I3dMouseParam : public I3dMouseSensor, public I3dMouseNavigation {
public:
virtual ~I3dMouseParam() {}
};
#endif

View file

@ -0,0 +1,4 @@
The mac version does not require any files here. 3D connexion should be installed from
http://www.3dconnexion.eu/service/drivers.html
For windows a header file is required Inc/I3dMouseParams.h

View file

@ -117,6 +117,7 @@
#include "devices/Leapmotion.h"
#include "devices/RealSense.h"
#include "devices/MIDIManager.h"
#include "devices/3Dconnexion.h"
#include "scripting/AccountScriptingInterface.h"
#include "scripting/AudioDeviceScriptingInterface.h"
@ -658,6 +659,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_myAvatar->updateMotionBehaviorFromMenu();
_myAvatar->updateStandingHMDModeFromMenu();
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
ConnexionClient::init();
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::DomainConnectionDenied, this, "handleDomainConnectionDeniedPacket");
}
@ -775,6 +779,7 @@ Application::~Application() {
Leapmotion::destroy();
RealSense::destroy();
ConnexionClient::destroy();
qInstallMessageHandler(NULL); // NOTE: Do this as late as possible so we continue to get our log messages
}
@ -1131,6 +1136,13 @@ void Application::paintGL() {
_offscreenContext->makeCurrent();
_frameCount++;
Stats::getInstance()->setRenderDetails(renderArgs._details);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::Batch batch;
batch.resetStages();
renderArgs._context->render(batch);
}
void Application::runTests() {
@ -1595,6 +1607,7 @@ void Application::focusOutEvent(QFocusEvent* event) {
inputPlugin->pluginFocusOutEvent();
}
}
ConnexionData::getInstance().focusOutEvent();
// synthesize events for keys currently pressed, since we may not get their release events
foreach (int key, _keysPressed) {
@ -3481,6 +3494,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
renderContext._maxDrawnOverlay3DItems = sceneInterface->getEngineMaxDrawnOverlay3DItems();
renderContext._drawItemStatus = sceneInterface->doEngineDisplayItemStatus();
renderContext._drawHitEffect = sceneInterface->doEngineDisplayHitEffect();
renderContext._occlusionStatus = Menu::getInstance()->isOptionChecked(MenuOption::DebugAmbientOcclusion);

View file

@ -28,6 +28,7 @@
#include "devices/DdeFaceTracker.h"
#include "devices/Faceshift.h"
#include "devices/RealSense.h"
#include "devices/3Dconnexion.h"
#include "MainWindow.h"
#include "scripting/MenuScriptingInterface.h"
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
@ -257,9 +258,7 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::NamesAboveHeads, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehaviorFromMenu()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ShiftHipsForIdleAnimations, 0, false,
avatar, SLOT(updateMotionBehaviorFromMenu()));
avatar, SLOT(updateMotionBehavior()));
MenuWrapper* viewMenu = addMenu("View");
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
@ -440,6 +439,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,
&ConnexionClient::getInstance(),
SLOT(toggleConnexion(bool)));
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);

View file

@ -159,6 +159,7 @@ namespace MenuOption {
const QString CenterPlayerInView = "Center Player In View";
const QString Chat = "Chat...";
const QString Collisions = "Collisions";
const QString Connexion = "Activate 3D Connexion Devices";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";
const QString CopyAddress = "Copy Address to Clipboard";
@ -270,7 +271,6 @@ namespace MenuOption {
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
const QString SimpleShadows = "Simple";
const QString ShiftHipsForIdleAnimations = "Shift hips for idle animations";
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
const QString Stars = "Stars";
const QString Stats = "Stats";

View file

@ -106,7 +106,7 @@ bool ModelPackager::loadModel() {
}
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
QByteArray fbxContents = fbx.readAll();
_geometry = readFBX(fbxContents, QVariantHash());
_geometry = readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath());
// make sure we have some basic mappings
populateBasicMapping(_mapping, _fbxInfo.filePath(), _geometry);

View file

@ -154,6 +154,7 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setDepthTest(gpu::State::DepthTest(false));
state->setAntialiasedLineEnable(true); // line smoothing also smooth points
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_starsPipeline.reset(gpu::Pipeline::create(program, state));
@ -217,10 +218,6 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
// Render the stars
batch.setPipeline(_starsPipeline);
batch._glEnable(GL_PROGRAM_POINT_SIZE_EXT);
batch._glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
batch._glEnable(GL_POINT_SMOOTH);
batch.setInputFormat(streamFormat);
batch.setInputBuffer(VERTICES_SLOT, posView);
batch.setInputBuffer(COLOR_SLOT, colView);

View file

@ -97,7 +97,6 @@ MyAvatar::MyAvatar() :
_lookAtTargetAvatar(),
_shouldRender(true),
_billboardValid(false),
_feetTouchFloor(true),
_eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
@ -182,9 +181,6 @@ void MyAvatar::update(float deltaTime) {
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
simulate(deltaTime);
if (_feetTouchFloor) {
_skeletonModel.updateStandingFoot();
}
}
void MyAvatar::simulate(float deltaTime) {
@ -1194,11 +1190,7 @@ glm::vec3 MyAvatar::getSkeletonPosition() const {
// The avatar is rotated PI about the yAxis, so we have to correct for it
// to get the skeleton offset contribution in the world-frame.
const glm::quat FLIP = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 skeletonOffset = _skeletonOffset;
if (_feetTouchFloor) {
skeletonOffset += _skeletonModel.getStandingOffset();
}
return _position + getOrientation() * FLIP * skeletonOffset;
return _position + getOrientation() * FLIP * _skeletonOffset;
}
return Avatar::getPosition();
}
@ -1679,7 +1671,6 @@ void MyAvatar::updateMotionBehaviorFromMenu() {
_motionBehaviors &= ~AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED;
}
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
_feetTouchFloor = menu->isOptionChecked(MenuOption::ShiftHipsForIdleAnimations);
}
void MyAvatar::updateStandingHMDModeFromMenu() {

View file

@ -35,7 +35,7 @@ class MyAvatar : public Avatar {
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
public:
MyAvatar();
MyAvatar();
~MyAvatar();
QByteArray toByteArray();
@ -286,6 +286,7 @@ private:
QList<AnimationHandlePointer> _animationHandles;
bool _feetTouchFloor;
eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder;
@ -294,7 +295,7 @@ private:
Setting::Handle<float> _realWorldFieldOfView;
// private methods
// private methods
void updateOrientation(float deltaTime);
glm::vec3 applyKeyboardMotor(float deltaTime, const glm::vec3& velocity, bool isHovering);
glm::vec3 applyScriptedMotor(float deltaTime, const glm::vec3& velocity);

View file

@ -24,12 +24,6 @@
#include "Util.h"
#include "InterfaceLogging.h"
enum StandingFootState {
LEFT_FOOT,
RIGHT_FOOT,
NO_FOOT
};
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
Model(parent),
_triangleFanID(DependencyManager::get<GeometryCache>()->allocateID()),
@ -37,9 +31,6 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
_boundingShape(),
_boundingShapeLocalOffset(0.0f),
_defaultEyeModelPosition(glm::vec3(0.0f, 0.0f, 0.0f)),
_standingFoot(NO_FOOT),
_standingOffset(0.0f),
_clampedFootPosition(0.0f),
_headClipDistance(DEFAULT_NEAR_CLIP),
_isFirstPerson(false)
{
@ -577,65 +568,6 @@ glm::vec3 SkeletonModel::getDefaultEyeModelPosition() const {
return _owningAvatar->getScale() * _defaultEyeModelPosition;
}
/// \return offset of hips after foot animation
void SkeletonModel::updateStandingFoot() {
if (_geometry == NULL) {
return;
}
glm::vec3 offset(0.0f);
int leftFootIndex = _geometry->getFBXGeometry().leftToeJointIndex;
int rightFootIndex = _geometry->getFBXGeometry().rightToeJointIndex;
if (leftFootIndex != -1 && rightFootIndex != -1) {
glm::vec3 leftPosition, rightPosition;
getJointPosition(leftFootIndex, leftPosition);
getJointPosition(rightFootIndex, rightPosition);
int lowestFoot = (leftPosition.y < rightPosition.y) ? LEFT_FOOT : RIGHT_FOOT;
const float MIN_STEP_HEIGHT_THRESHOLD = 0.05f;
bool oneFoot = fabsf(leftPosition.y - rightPosition.y) > MIN_STEP_HEIGHT_THRESHOLD;
int currentFoot = oneFoot ? lowestFoot : _standingFoot;
if (_standingFoot == NO_FOOT) {
currentFoot = lowestFoot;
}
if (currentFoot != _standingFoot) {
if (_standingFoot == NO_FOOT) {
// pick the lowest foot
glm::vec3 lowestPosition = (currentFoot == LEFT_FOOT) ? leftPosition : rightPosition;
// we ignore zero length positions which can happen for a few frames until skeleton is fully loaded
if (glm::length(lowestPosition) > 0.0f) {
_standingFoot = currentFoot;
_clampedFootPosition = lowestPosition;
}
} else {
// swap feet
_standingFoot = currentFoot;
glm::vec3 nextPosition = leftPosition;
glm::vec3 prevPosition = rightPosition;
if (_standingFoot == RIGHT_FOOT) {
nextPosition = rightPosition;
prevPosition = leftPosition;
}
glm::vec3 oldOffset = _clampedFootPosition - prevPosition;
_clampedFootPosition = oldOffset + nextPosition;
offset = _clampedFootPosition - nextPosition;
}
} else {
glm::vec3 nextPosition = (_standingFoot == LEFT_FOOT) ? leftPosition : rightPosition;
offset = _clampedFootPosition - nextPosition;
}
// clamp the offset to not exceed some max distance
const float MAX_STEP_OFFSET = 1.0f;
float stepDistance = glm::length(offset);
if (stepDistance > MAX_STEP_OFFSET) {
offset *= (MAX_STEP_OFFSET / stepDistance);
}
}
_standingOffset = offset;
}
float DENSITY_OF_WATER = 1000.0f; // kg/m^3
float MIN_JOINT_MASS = 1.0f;
float VERY_BIG_MASS = 1.0e6f;

View file

@ -97,10 +97,6 @@ public:
/// \return whether or not the head was found.
glm::vec3 getDefaultEyeModelPosition() const;
/// skeleton offset caused by moving feet
void updateStandingFoot();
const glm::vec3& getStandingOffset() const { return _standingOffset; }
void computeBoundingShape(const FBXGeometry& geometry);
void renderBoundingCollisionShapes(gpu::Batch& batch, float alpha);
float getBoundingShapeRadius() const { return _boundingShape.getRadius(); }
@ -170,9 +166,6 @@ private:
glm::vec3 _boundingShapeLocalOffset;
glm::vec3 _defaultEyeModelPosition;
int _standingFoot;
glm::vec3 _standingOffset;
glm::vec3 _clampedFootPosition;
float _headClipDistance; // Near clip distance to use if no separate head model

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,244 @@
// 3DConnexion.h
// hifi
//
// Created by Marcel Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ConnexionClient_h
#define hifi_ConnexionClient_h
#include <qobject.h>
#include <qlibrary.h>
#include "InterfaceLogging.h"
#include "Application.h"
#include "ui/UserInputMapper.h"
#ifndef HAVE_CONNEXIONCLIENT
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static void init() {};
static void destroy() {};
static bool Is3dmouseAttached() { return false; };
public slots:
void toggleConnexion(bool shouldEnable) {};
};
#endif // NOT_HAVE_CONNEXIONCLIENT
#ifdef HAVE_CONNEXIONCLIENT
// the windows connexion rawinput
#ifdef _WIN32
#include "I3dMouseParams.h"
#include <QAbstractNativeEventFilter>
#include <QAbstractEventDispatcher>
#include <Winsock2.h>
#include <windows.h>
// windows rawinput parameters
class MouseParameters : public I3dMouseParam {
public:
MouseParameters();
~MouseParameters();
// I3dmouseSensor interface
bool IsPanZoom() const;
bool IsRotate() const;
Speed GetSpeed() const;
void SetPanZoom(bool isPanZoom);
void SetRotate(bool isRotate);
void SetSpeed(Speed speed);
// I3dmouseNavigation interface
Navigation GetNavigationMode() const;
Pivot GetPivotMode() const;
PivotVisibility GetPivotVisibility() const;
bool IsLockHorizon() const;
void SetLockHorizon(bool bOn);
void SetNavigationMode(Navigation navigation);
void SetPivotMode(Pivot pivot);
void SetPivotVisibility(PivotVisibility visibility);
static bool Is3dmouseAttached();
private:
MouseParameters(const MouseParameters&);
const MouseParameters& operator = (const MouseParameters&);
Navigation fNavigation;
Pivot fPivot;
PivotVisibility fPivotVisibility;
bool fIsLockHorizon;
bool fIsPanZoom;
bool fIsRotate;
Speed fSpeed;
};
class ConnexionClient : public QObject, public QAbstractNativeEventFilter {
Q_OBJECT
public:
ConnexionClient();
~ConnexionClient();
static ConnexionClient& getInstance();
ConnexionClient* client;
static void init();
static void destroy();
static bool Is3dmouseAttached();
I3dMouseParam& MouseParams();
const I3dMouseParam& MouseParams() const;
virtual void Move3d(HANDLE device, std::vector<float>& motionData);
virtual void On3dmouseKeyDown(HANDLE device, int virtualKeyCode);
virtual void On3dmouseKeyUp(HANDLE device, int virtualKeyCode);
virtual bool nativeEventFilter(const QByteArray& eventType, void* message, long* result) Q_DECL_OVERRIDE
{
MSG* msg = static_cast< MSG * >(message);
return ConnexionClient::RawInputEventFilter(message, result);
}
public slots:
void toggleConnexion(bool shouldEnable);
signals:
void Move3d(std::vector<float>& motionData);
void On3dmouseKeyDown(int virtualKeyCode);
void On3dmouseKeyUp(int virtualKeyCode);
private:
bool InitializeRawInput(HWND hwndTarget);
static bool RawInputEventFilter(void* msg, long* result);
void OnRawInput(UINT nInputCode, HRAWINPUT hRawInput);
UINT GetRawInputBuffer(PRAWINPUT pData, PUINT pcbSize, UINT cbSizeHeader);
bool TranslateRawInputData(UINT nInputCode, PRAWINPUT pRawInput);
void On3dmouseInput();
class TInputData {
public:
TInputData() : fAxes(6) {}
bool IsZero() {
return (0.0f == fAxes[0] && 0.0f == fAxes[1] && 0.0f == fAxes[2] &&
0.0f == fAxes[3] && 0.0f == fAxes[4] && 0.0f == fAxes[5]);
}
int fTimeToLive; // For telling if the device was unplugged while sending data
bool fIsDirty;
std::vector<float> fAxes;
};
HWND fWindow;
// Data cache to handle multiple rawinput devices
std::map< HANDLE, TInputData> fDevice2Data;
std::map< HANDLE, unsigned long> fDevice2Keystate;
// 3dmouse parameters
MouseParameters f3dMouseParams; // Rotate, Pan Zoom etc.
// use to calculate distance traveled since last event
DWORD fLast3dmouseInputTime;
};
// the osx connexion api
#else
#include <glm/glm.hpp>
#include "3DconnexionClient/ConnexionClientAPI.h"
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static bool Is3dmouseAttached();
static void init();
static void destroy();
public slots:
void toggleConnexion(bool shouldEnable);
};
#endif // __APPLE__
#endif // HAVE_CONNEXIONCLIENT
// connnects to the userinputmapper
class ConnexionData : public QObject {
Q_OBJECT
public:
static ConnexionData& getInstance();
ConnexionData();
enum PositionChannel {
POSITION_AXIS_X_POS = 1,
POSITION_AXIS_X_NEG = 2,
POSITION_AXIS_Y_POS = 3,
POSITION_AXIS_Y_NEG = 4,
POSITION_AXIS_Z_POS = 5,
POSITION_AXIS_Z_NEG = 6,
ROTATION_AXIS_X_POS = 7,
ROTATION_AXIS_X_NEG = 8,
ROTATION_AXIS_Y_POS = 9,
ROTATION_AXIS_Y_NEG = 10,
ROTATION_AXIS_Z_POS = 11,
ROTATION_AXIS_Z_NEG = 12
};
enum ButtonChannel {
BUTTON_1 = 1,
BUTTON_2 = 2,
BUTTON_3 = 3
};
typedef std::unordered_set<int> ButtonPressedMap;
typedef std::map<int, float> AxisStateMap;
float getButton(int channel) const;
float getAxis(int channel) const;
UserInputMapper::Input makeInput(ConnexionData::PositionChannel axis);
UserInputMapper::Input makeInput(ConnexionData::ButtonChannel button);
void registerToUserInputMapper(UserInputMapper& mapper);
void assignDefaultInputMapping(UserInputMapper& mapper);
void update();
void focusOutEvent();
int getDeviceID() { return _deviceID; }
void setDeviceID(int deviceID) { _deviceID = deviceID; }
QString _name;
glm::vec3 cc_position;
glm::vec3 cc_rotation;
int clientId;
void setButton(int lastButtonState);
void handleAxisEvent();
protected:
int _deviceID = 0;
ButtonPressedMap _buttonPressedMap;
AxisStateMap _axisStateMap;
};
#endif // defined(hifi_ConnexionClient_h)

View file

@ -117,7 +117,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
batch.setProjectionTransform(mat4());
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glBindTexture(GL_TEXTURE_2D, _uiTexture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture);
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}

View file

@ -62,7 +62,7 @@ void AnimationReader::run() {
QSharedPointer<Resource> animation = _animation.toStrongRef();
if (!animation.isNull()) {
QMetaObject::invokeMethod(animation.data(), "setGeometry",
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash())));
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash(), _reply->property("url").toString())));
}
_reply->deleteLater();
}

View file

@ -178,8 +178,7 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
batch.setModelTransform(getTransformToCenter());
bool textured = false, culled = false, emissive = false;
if (_texture) {
batch._glActiveTexture(GL_TEXTURE0);
batch._glBindTexture(GL_TEXTURE_2D, _texture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _texture);
textured = emissive = true;
}

View file

@ -17,6 +17,7 @@
#include <QTextStream>
#include <QtDebug>
#include <QtEndian>
#include <QFileInfo>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
@ -1454,9 +1455,21 @@ void buildModelMesh(ExtractedMesh& extracted) {
}
#endif // USE_MODEL_MESH
QByteArray fileOnUrl(const QByteArray& filenameString, const QString& url) {
QString path = QFileInfo(url).path();
QByteArray filename = filenameString;
QFileInfo checkFile(path + "/" + filename.replace('\\', '/'));
//check if the file exists at the RelativeFileName
if (checkFile.exists() && checkFile.isFile()) {
filename = filename.replace('\\', '/');
} else {
// there is no texture at the fbx dir with the filename added. Assume it is in the fbx dir.
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
}
return filename;
}
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QHash<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
QHash<QString, int> meshIDsToMeshIndices;
@ -1781,9 +1794,8 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
TextureParam tex;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
// trim off any path information
QByteArray filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
textureFilenames.insert(getID(object.properties), filename);
} else if (subobject.name == "TextureName") {
// trim the name from the timestamp
@ -1857,7 +1869,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
} else if (subobject.name == "Content" && !subobject.properties.isEmpty()) {
content = subobject.properties.at(0).toByteArray();
@ -2717,12 +2729,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
return geometry;
}
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QBuffer buffer(const_cast<QByteArray*>(&model));
buffer.open(QIODevice::ReadOnly);
return readFBX(&buffer, mapping, loadLightmaps, lightmapLevel);
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
}
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, loadLightmaps, lightmapLevel);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, url, loadLightmaps, lightmapLevel);
}

View file

@ -271,10 +271,10 @@ Q_DECLARE_METATYPE(FBXGeometry)
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
#endif // hifi_FBXReader_h

View file

@ -306,7 +306,8 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
}
QByteArray token = tokenizer.getDatum();
//qCDebug(modelformat) << token;
if (token == "g") {
// we don't support separate objects in the same file, so treat "o" the same as "g".
if (token == "g" || token == "o") {
if (sawG) {
// we've encountered the beginning of the next group.
tokenizer.pushBackToken(OBJTokenizer::DATUM_TOKEN);

View file

@ -288,6 +288,10 @@ void Batch::getQuery(const QueryPointer& query) {
_params.push_back(_queries.cache(query));
}
void Batch::resetStages() {
ADD_COMMAND(resetStages);
}
void push_back(Batch::Params& params, const vec3& v) {
params.push_back(v.x);
params.push_back(v.y);

View file

@ -94,7 +94,7 @@ public:
void setResourceTexture(uint32 slot, const TexturePointer& view);
void setResourceTexture(uint32 slot, const TextureView& view); // not a command, just a shortcut from a TextureView
// Framebuffer Stage
// Ouput Stage
void setFramebuffer(const FramebufferPointer& framebuffer);
// Clear framebuffer layers
@ -113,34 +113,17 @@ public:
void endQuery(const QueryPointer& query);
void getQuery(const QueryPointer& query);
// Reset the stage caches and states
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
// THe implementation of these functions is in GLBackend.cpp
void _glEnable(unsigned int cap);
void _glDisable(unsigned int cap);
void _glActiveBindTexture(unsigned int unit, unsigned int target, unsigned int texture);
void _glEnableClientState(unsigned int array);
void _glDisableClientState(unsigned int array);
void _glCullFace(unsigned int mode);
void _glAlphaFunc(unsigned int func, float ref);
void _glDepthFunc(unsigned int func);
void _glDepthMask(unsigned char flag);
void _glDepthRange(float zNear, float zFar);
void _glBindBuffer(unsigned int target, unsigned int buffer);
void _glBindTexture(unsigned int target, unsigned int texture);
void _glActiveTexture(unsigned int texture);
void _glTexParameteri(unsigned int target, unsigned int pname, int param);
void _glDrawBuffers(int n, const unsigned int* bufs);
void _glUseProgram(unsigned int program);
void _glUniform1i(int location, int v0);
void _glUniform1f(int location, float v0);
void _glUniform2f(int location, float v0, float v1);
@ -150,9 +133,6 @@ public:
void _glUniform4iv(int location, int count, const int* value);
void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value);
void _glEnableVertexAttribArray(int location);
void _glDisableVertexAttribArray(int location);
void _glColor4f(float red, float green, float blue, float alpha);
void _glLineWidth(float width);
@ -186,31 +166,13 @@ public:
COMMAND_endQuery,
COMMAND_getQuery,
COMMAND_resetStages,
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
COMMAND_glEnable,
COMMAND_glDisable,
COMMAND_glActiveBindTexture,
COMMAND_glEnableClientState,
COMMAND_glDisableClientState,
COMMAND_glCullFace,
COMMAND_glAlphaFunc,
COMMAND_glDepthFunc,
COMMAND_glDepthMask,
COMMAND_glDepthRange,
COMMAND_glBindBuffer,
COMMAND_glBindTexture,
COMMAND_glActiveTexture,
COMMAND_glTexParameteri,
COMMAND_glDrawBuffers,
COMMAND_glUseProgram,
COMMAND_glUniform1i,
COMMAND_glUniform1f,
COMMAND_glUniform2f,
@ -220,9 +182,6 @@ public:
COMMAND_glUniform4iv,
COMMAND_glUniformMatrix4fv,
COMMAND_glEnableVertexAttribArray,
COMMAND_glDisableVertexAttribArray,
COMMAND_glColor4f,
COMMAND_glLineWidth,

View file

@ -45,3 +45,4 @@ void Context::syncCache() {
void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
}

View file

@ -46,28 +46,10 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_endQuery),
(&::gpu::GLBackend::do_getQuery),
(&::gpu::GLBackend::do_glEnable),
(&::gpu::GLBackend::do_glDisable),
(&::gpu::GLBackend::do_resetStages),
(&::gpu::GLBackend::do_glEnableClientState),
(&::gpu::GLBackend::do_glDisableClientState),
(&::gpu::GLBackend::do_glActiveBindTexture),
(&::gpu::GLBackend::do_glCullFace),
(&::gpu::GLBackend::do_glAlphaFunc),
(&::gpu::GLBackend::do_glDepthFunc),
(&::gpu::GLBackend::do_glDepthMask),
(&::gpu::GLBackend::do_glDepthRange),
(&::gpu::GLBackend::do_glBindBuffer),
(&::gpu::GLBackend::do_glBindTexture),
(&::gpu::GLBackend::do_glActiveTexture),
(&::gpu::GLBackend::do_glTexParameteri),
(&::gpu::GLBackend::do_glDrawBuffers),
(&::gpu::GLBackend::do_glUseProgram),
(&::gpu::GLBackend::do_glUniform1i),
(&::gpu::GLBackend::do_glUniform1f),
(&::gpu::GLBackend::do_glUniform2f),
@ -77,9 +59,6 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glUniform4iv),
(&::gpu::GLBackend::do_glUniformMatrix4fv),
(&::gpu::GLBackend::do_glEnableVertexAttribArray),
(&::gpu::GLBackend::do_glDisableVertexAttribArray),
(&::gpu::GLBackend::do_glColor4f),
(&::gpu::GLBackend::do_glLineWidth),
};
@ -128,6 +107,8 @@ GLBackend::GLBackend() :
}
GLBackend::~GLBackend() {
resetStages();
killInput();
killTransform();
}
@ -246,6 +227,22 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void GLBackend::do_resetStages(Batch& batch, uint32 paramOffset) {
resetStages();
}
void GLBackend::resetStages() {
resetInputStage();
resetPipelineStage();
resetTransformStage();
resetUniformStage();
resetResourceStage();
resetOutputStage();
resetQueryStage();
(void) CHECK_GL_ERROR();
}
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
@ -255,211 +252,24 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
//#define DO_IT_NOW(call, offset) runLastCommand();
#define DO_IT_NOW(call, offset)
void Batch::_glEnable(GLenum cap) {
ADD_COMMAND_GL(glEnable);
_params.push_back(cap);
DO_IT_NOW(_glEnable, 1);
}
void GLBackend::do_glEnable(Batch& batch, uint32 paramOffset) {
glEnable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisable(GLenum cap) {
ADD_COMMAND_GL(glDisable);
_params.push_back(cap);
DO_IT_NOW(_glDisable, 1);
}
void GLBackend::do_glDisable(Batch& batch, uint32 paramOffset) {
glDisable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableClientState(GLenum array) {
ADD_COMMAND_GL(glEnableClientState);
_params.push_back(array);
DO_IT_NOW(_glEnableClientState, 1);
}
void GLBackend::do_glEnableClientState(Batch& batch, uint32 paramOffset) {
glEnableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableClientState(GLenum array) {
ADD_COMMAND_GL(glDisableClientState);
_params.push_back(array);
DO_IT_NOW(_glDisableClientState, 1);
}
void GLBackend::do_glDisableClientState(Batch& batch, uint32 paramOffset) {
glDisableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glCullFace(GLenum mode) {
ADD_COMMAND_GL(glCullFace);
_params.push_back(mode);
DO_IT_NOW(_glCullFace, 1);
}
void GLBackend::do_glCullFace(Batch& batch, uint32 paramOffset) {
glCullFace(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glAlphaFunc(GLenum func, GLclampf ref) {
ADD_COMMAND_GL(glAlphaFunc);
_params.push_back(ref);
_params.push_back(func);
DO_IT_NOW(_glAlphaFunc, 2);
}
void GLBackend::do_glAlphaFunc(Batch& batch, uint32 paramOffset) {
glAlphaFunc(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthFunc(GLenum func) {
ADD_COMMAND_GL(glDepthFunc);
_params.push_back(func);
DO_IT_NOW(_glDepthFunc, 1);
}
void GLBackend::do_glDepthFunc(Batch& batch, uint32 paramOffset) {
glDepthFunc(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthMask(GLboolean flag) {
ADD_COMMAND_GL(glDepthMask);
_params.push_back(flag);
DO_IT_NOW(_glDepthMask, 1);
}
void GLBackend::do_glDepthMask(Batch& batch, uint32 paramOffset) {
glDepthMask(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthRange(GLfloat zNear, GLfloat zFar) {
ADD_COMMAND_GL(glDepthRange);
_params.push_back(zFar);
_params.push_back(zNear);
DO_IT_NOW(_glDepthRange, 2);
}
void GLBackend::do_glDepthRange(Batch& batch, uint32 paramOffset) {
glDepthRange(
batch._params[paramOffset + 1]._float,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindBuffer(GLenum target, GLuint buffer) {
ADD_COMMAND_GL(glBindBuffer);
_params.push_back(buffer);
_params.push_back(target);
DO_IT_NOW(_glBindBuffer, 2);
}
void GLBackend::do_glBindBuffer(Batch& batch, uint32 paramOffset) {
glBindBuffer(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindTexture(GLenum target, GLuint texture) {
ADD_COMMAND_GL(glBindTexture);
void Batch::_glActiveBindTexture(GLenum unit, GLenum target, GLuint texture) {
ADD_COMMAND_GL(glActiveBindTexture);
_params.push_back(texture);
_params.push_back(target);
_params.push_back(unit);
DO_IT_NOW(_glBindTexture, 2);
DO_IT_NOW(_glActiveBindTexture, 3);
}
void GLBackend::do_glBindTexture(Batch& batch, uint32 paramOffset) {
void GLBackend::do_glActiveBindTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset + 2]._uint);
glBindTexture(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glActiveTexture(GLenum texture) {
ADD_COMMAND_GL(glActiveTexture);
_params.push_back(texture);
DO_IT_NOW(_glActiveTexture, 1);
}
void GLBackend::do_glActiveTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glTexParameteri(GLenum target, GLenum pname, GLint param) {
ADD_COMMAND_GL(glTexParameteri);
_params.push_back(param);
_params.push_back(pname);
_params.push_back(target);
DO_IT_NOW(glTexParameteri, 3);
}
void GLBackend::do_glTexParameteri(Batch& batch, uint32 paramOffset) {
glTexParameteri(batch._params[paramOffset + 2]._uint,
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._int);
(void) CHECK_GL_ERROR();
}
void Batch::_glDrawBuffers(GLsizei n, const GLenum* bufs) {
ADD_COMMAND_GL(glDrawBuffers);
_params.push_back(cacheData(n * sizeof(GLenum), bufs));
_params.push_back(n);
DO_IT_NOW(_glDrawBuffers, 2);
}
void GLBackend::do_glDrawBuffers(Batch& batch, uint32 paramOffset) {
glDrawBuffers(
batch._params[paramOffset + 1]._uint,
(const GLenum*)batch.editData(batch._params[paramOffset + 0]._uint));
(void) CHECK_GL_ERROR();
}
void Batch::_glUseProgram(GLuint program) {
ADD_COMMAND_GL(glUseProgram);
_params.push_back(program);
DO_IT_NOW(_glUseProgram, 1);
}
void GLBackend::do_glUseProgram(Batch& batch, uint32 paramOffset) {
_pipeline._program = batch._params[paramOffset]._uint;
// for this call we still want to execute the glUseProgram in the order of the glCOmmand to avoid any issue
_pipeline._invalidProgram = false;
glUseProgram(_pipeline._program);
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform1i(GLint location, GLint v0) {
if (location < 0) {
return;
@ -660,30 +470,6 @@ void GLBackend::do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glEnableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glEnableVertexAttribArray, 1);
}
void GLBackend::do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glEnableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glDisableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glDisableVertexAttribArray, 1);
}
void GLBackend::do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glDisableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glColor4f(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha) {
ADD_COMMAND_GL(glColor4f);

View file

@ -195,8 +195,17 @@ public:
static const int MAX_NUM_ATTRIBUTES = Stream::NUM_INPUT_SLOTS;
static const int MAX_NUM_INPUT_BUFFERS = 16;
uint32 getNumInputBuffers() const { return _input._buffersState.size(); }
uint32 getNumInputBuffers() const { return _input._invalidBuffers.size(); }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_UNIFORM_BUFFERS = 12;
uint32 getMaxNumUniformBuffers() const { return MAX_NUM_UNIFORM_BUFFERS; }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_RESOURCE_TEXTURES = 16;
uint32 getMaxNumResourceTextures() const { return MAX_NUM_RESOURCE_TEXTURES; }
// The State setters called by the GLState::Commands when a new state is assigned
void do_setStateFillMode(int32 mode);
@ -250,12 +259,16 @@ protected:
void killInput();
void syncInputStateCache();
void updateInput();
void resetInputStage();
struct InputStageState {
bool _invalidFormat = true;
Stream::FormatPointer _format;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
BuffersState _buffersState;
BuffersState _invalidBuffers;
Buffers _buffers;
Offsets _bufferOffsets;
@ -266,23 +279,20 @@ protected:
Offset _indexBufferOffset;
Type _indexBufferType;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
GLuint _defaultVAO;
InputStageState() :
_invalidFormat(true),
_format(0),
_buffersState(0),
_buffers(_buffersState.size(), BufferPointer(0)),
_bufferOffsets(_buffersState.size(), 0),
_bufferStrides(_buffersState.size(), 0),
_bufferVBOs(_buffersState.size(), 0),
_attributeActivation(0),
_invalidBuffers(0),
_buffers(_invalidBuffers.size(), BufferPointer(0)),
_bufferOffsets(_invalidBuffers.size(), 0),
_bufferStrides(_invalidBuffers.size(), 0),
_bufferVBOs(_invalidBuffers.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0),
_indexBufferType(UINT32),
_attributeActivation(0),
_defaultVAO(0)
{}
} _input;
@ -298,6 +308,7 @@ protected:
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncTransformStateCache();
void updateTransform();
void resetTransformStage();
struct TransformStageState {
TransformObject _transformObject;
TransformCamera _transformCamera;
@ -330,12 +341,31 @@ protected:
// Uniform Stage
void do_setUniformBuffer(Batch& batch, uint32 paramOffset);
void releaseUniformBuffer(int slot);
void resetUniformStage();
struct UniformStageState {
Buffers _buffers;
UniformStageState():
_buffers(MAX_NUM_UNIFORM_BUFFERS, nullptr)
{}
} _uniform;
// Resource Stage
void do_setResourceTexture(Batch& batch, uint32 paramOffset);
struct UniformStageState {
};
void releaseResourceTexture(int slot);
void resetResourceStage();
struct ResourceStageState {
Textures _textures;
ResourceStageState():
_textures(MAX_NUM_RESOURCE_TEXTURES, nullptr)
{}
} _resource;
// Pipeline Stage
void do_setPipeline(Batch& batch, uint32 paramOffset);
void do_setStateBlendFactor(Batch& batch, uint32 paramOffset);
@ -349,6 +379,7 @@ protected:
void syncPipelineStateCache();
// Grab the actual gl state into it's gpu::State equivalent. THis is used by the above call syncPipleineStateCache()
void getCurrentGLState(State::Data& state);
void resetPipelineStage();
struct PipelineStageState {
@ -388,6 +419,7 @@ protected:
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncOutputStateCache();
void resetOutputStage();
struct OutputStageState {
@ -402,31 +434,20 @@ protected:
void do_endQuery(Batch& batch, uint32 paramOffset);
void do_getQuery(Batch& batch, uint32 paramOffset);
void resetQueryStage();
struct QueryStageState {
};
// Reset stages
void do_resetStages(Batch& batch, uint32 paramOffset);
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void do_glEnable(Batch& batch, uint32 paramOffset);
void do_glDisable(Batch& batch, uint32 paramOffset);
void do_glActiveBindTexture(Batch& batch, uint32 paramOffset);
void do_glEnableClientState(Batch& batch, uint32 paramOffset);
void do_glDisableClientState(Batch& batch, uint32 paramOffset);
void do_glCullFace(Batch& batch, uint32 paramOffset);
void do_glAlphaFunc(Batch& batch, uint32 paramOffset);
void do_glDepthFunc(Batch& batch, uint32 paramOffset);
void do_glDepthMask(Batch& batch, uint32 paramOffset);
void do_glDepthRange(Batch& batch, uint32 paramOffset);
void do_glBindBuffer(Batch& batch, uint32 paramOffset);
void do_glBindTexture(Batch& batch, uint32 paramOffset);
void do_glActiveTexture(Batch& batch, uint32 paramOffset);
void do_glTexParameteri(Batch& batch, uint32 paramOffset);
void do_glDrawBuffers(Batch& batch, uint32 paramOffset);
void do_glUseProgram(Batch& batch, uint32 paramOffset);
void do_glUniform1i(Batch& batch, uint32 paramOffset);
void do_glUniform1f(Batch& batch, uint32 paramOffset);
void do_glUniform2f(Batch& batch, uint32 paramOffset);
@ -436,9 +457,6 @@ protected:
void do_glUniform4iv(Batch& batch, uint32 paramOffset);
void do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset);
void do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glColor4f(Batch& batch, uint32 paramOffset);
void do_glLineWidth(Batch& batch, uint32 paramOffset);

View file

@ -52,7 +52,7 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
}
if (isModified) {
_input._buffersState.set(channel);
_input._invalidBuffers.set(channel);
}
}
}
@ -154,7 +154,7 @@ void GLBackend::updateInput() {
_stats._ISNumFormatChanges++;
}
if (_input._buffersState.any()) {
if (_input._invalidBuffers.any()) {
int numBuffers = _input._buffers.size();
auto buffer = _input._buffers.data();
auto vbo = _input._bufferVBOs.data();
@ -162,7 +162,7 @@ void GLBackend::updateInput() {
auto stride = _input._bufferStrides.data();
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
if (_input._buffersState.test(bufferNum)) {
if (_input._invalidBuffers.test(bufferNum)) {
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
}
buffer++;
@ -170,11 +170,11 @@ void GLBackend::updateInput() {
offset++;
stride++;
}
_input._buffersState.reset();
_input._invalidBuffers.reset();
(void) CHECK_GL_ERROR();
}
#else
if (_input._invalidFormat || _input._buffersState.any()) {
if (_input._invalidFormat || _input._invalidBuffers.any()) {
if (_input._invalidFormat) {
InputStageState::ActivationCache newActivation;
@ -232,7 +232,7 @@ void GLBackend::updateInput() {
if ((channelIt).first < buffers.size()) {
int bufferNum = (channelIt).first;
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
if (_input._invalidBuffers.test(bufferNum) || _input._invalidFormat) {
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
GLuint vbo = _input._bufferVBOs[bufferNum];
if (boundVBO != vbo) {
@ -240,7 +240,7 @@ void GLBackend::updateInput() {
(void) CHECK_GL_ERROR();
boundVBO = vbo;
}
_input._buffersState[bufferNum] = false;
_input._invalidBuffers[bufferNum] = false;
for (unsigned int i = 0; i < channel._slots.size(); i++) {
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
@ -285,6 +285,51 @@ void GLBackend::updateInput() {
#endif
}
void GLBackend::resetInputStage() {
// Reset index buffer
_input._indexBufferType = UINT32;
_input._indexBufferOffset = 0;
_input._indexBuffer.reset();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
(void) CHECK_GL_ERROR();
#if defined(SUPPORT_VAO)
// TODO
#else
glBindBuffer(GL_ARRAY_BUFFER, 0);
size_t i = 0;
#if defined(SUPPORT_LEGACY_OPENGL)
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
glDisableClientState(attributeSlotToClassicAttribName[i]);
}
glVertexPointer(4, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, 0);
glColorPointer(4, GL_FLOAT, 0, 0);
glTexCoordPointer(4, GL_FLOAT, 0, 0);
#endif
for (; i < _input._attributeActivation.size(); i++) {
glDisableVertexAttribArray(i);
glVertexAttribPointer(i, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
#endif
// Reset vertex buffer and format
_input._format.reset();
_input._invalidFormat = false;
_input._attributeActivation.reset();
for (int i = 0; i < _input._buffers.size(); i++) {
_input._buffers[i].reset();
_input._bufferOffsets[i] = 0;
_input._bufferStrides[i] = 0;
_input._bufferVBOs[i] = 0;
}
_input._invalidBuffers.reset();
}
void GLBackend::do_setIndexBuffer(Batch& batch, uint32 paramOffset) {
_input._indexBufferType = (Type) batch._params[paramOffset + 2]._uint;

View file

@ -177,6 +177,13 @@ void GLBackend::syncOutputStateCache() {
_output._framebuffer.reset();
}
void GLBackend::resetOutputStage() {
if (_output._framebuffer) {
_output._framebuffer.reset();
_output._drawFBO = 0;
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
void GLBackend::do_setFramebuffer(Batch& batch, uint32 paramOffset) {
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
@ -206,12 +213,21 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
if (masks & Framebuffer::BUFFER_STENCIL) {
glClearStencil(stencil);
glmask |= GL_STENCIL_BUFFER_BIT;
// TODO: we will probably need to also check the write mask of stencil like we do
// for depth buffer, but as would say a famous Fez owner "We'll cross that bridge when we come to it"
}
bool restoreDepthMask = false;
if (masks & Framebuffer::BUFFER_DEPTH) {
glClearDepth(depth);
glmask |= GL_DEPTH_BUFFER_BIT;
}
bool cacheDepthMask = _pipeline._stateCache.depthTest.getWriteMask();
if (!cacheDepthMask) {
restoreDepthMask = true;
glDepthMask(GL_TRUE);
}
}
std::vector<GLenum> drawBuffers;
if (masks & Framebuffer::BUFFER_COLORS) {
@ -245,6 +261,11 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
glDisable(GL_SCISSOR_TEST);
}
// Restore write mask meaning turn back off
if (restoreDepthMask) {
glDepthMask(GL_FALSE);
}
// Restore the color draw buffers only if a frmaebuffer is bound
if (_output._framebuffer && !drawBuffers.empty()) {
auto glFramebuffer = syncGPUObject(*_output._framebuffer);

View file

@ -164,15 +164,74 @@ void GLBackend::updatePipeline() {
#endif
}
void GLBackend::resetPipelineStage() {
// First reset State to default
State::Signature resetSignature(0);
resetPipelineState(resetSignature);
_pipeline._state = nullptr;
_pipeline._invalidState = false;
// Second the shader side
_pipeline._invalidProgram = false;
_pipeline._program = 0;
_pipeline._pipeline.reset();
glUseProgram(0);
}
void GLBackend::releaseUniformBuffer(int slot) {
#if (GPU_FEATURE_PROFILE == GPU_CORE)
auto& buf = _uniform._buffers[slot];
if (buf) {
auto* object = Backend::getGPUObject<GLBackend::GLBuffer>(*buf);
if (object) {
GLuint bo = object->_buffer;
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
buf.reset();
}
#endif
}
void GLBackend::resetUniformStage() {
for (int i = 0; i < _uniform._buffers.size(); i++) {
releaseUniformBuffer(i);
}
}
void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 3]._uint;
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
#if (GPU_FEATURE_PROFILE == GPU_CORE)
GLuint bo = getBufferID(*uniformBuffer);
glBindBufferRange(GL_UNIFORM_BUFFER, slot, bo, rangeStart, rangeSize);
if (!uniformBuffer) {
releaseUniformBuffer(slot);
return;
}
// check cache before thinking
if (_uniform._buffers[slot] == uniformBuffer) {
return;
}
// Sync BufferObject
auto* object = GLBackend::syncGPUObject(*uniformBuffer);
if (object) {
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, rangeStart, rangeSize);
_uniform._buffers[slot] = uniformBuffer;
(void) CHECK_GL_ERROR();
} else {
releaseResourceTexture(slot);
return;
}
#else
// because we rely on the program uniform mechanism we need to have
// the program bound, thank you MacOSX Legacy profile.
@ -184,19 +243,49 @@ void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
// NOT working so we ll stick to the uniform float array until we move to core profile
// GLuint bo = getBufferID(*uniformBuffer);
//glUniformBufferEXT(_shader._program, slot, bo);
#endif
(void) CHECK_GL_ERROR();
#endif
}
void GLBackend::releaseResourceTexture(int slot) {
auto& tex = _resource._textures[slot];
if (tex) {
auto* object = Backend::getGPUObject<GLBackend::GLTexture>(*tex);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(target, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
tex.reset();
}
}
void GLBackend::resetResourceStage() {
for (int i = 0; i < _resource._textures.size(); i++) {
releaseResourceTexture(i);
}
}
void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
TexturePointer uniformTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
if (!uniformTexture) {
if (!resourceTexture) {
releaseResourceTexture(slot);
return;
}
// check cache before thinking
if (_resource._textures[slot] == resourceTexture) {
return;
}
GLTexture* object = GLBackend::syncGPUObject(*uniformTexture);
// Always make sure the GLObject is in sync
GLTexture* object = GLBackend::syncGPUObject(*resourceTexture);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
@ -205,7 +294,10 @@ void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
_resource._textures[slot] = resourceTexture;
} else {
releaseResourceTexture(slot);
return;
}
}

View file

@ -104,3 +104,7 @@ void GLBackend::do_getQuery(Batch& batch, uint32 paramOffset) {
(void)CHECK_GL_ERROR();
}
}
void GLBackend::resetQueryStage() {
}

View file

@ -482,9 +482,14 @@ void GLBackend::syncPipelineStateCache() {
State::Data state;
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS);
// Point size is always on
glEnable(GL_PROGRAM_POINT_SIZE_EXT);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
getCurrentGLState(state);
State::Signature signature = State::evalSignature(state);
_pipeline._stateCache = state;
_pipeline._stateSignatureCache = signature;
}

View file

@ -184,4 +184,6 @@ void GLBackend::updateTransform() {
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
}
void GLBackend::resetTransformStage() {
}

View file

@ -164,7 +164,7 @@ const gpu::PipelinePointer& AmbientOcclusion::getBlendPipeline() {
// Blend on transparent
state->setBlendFunction(true,
gpu::State::SRC_COLOR, gpu::State::BLEND_OP_ADD, gpu::State::DEST_COLOR);
gpu::State::INV_SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::SRC_ALPHA);
// Good to go add the brand new pipeline
_blendPipeline.reset(gpu::Pipeline::create(program, state));

View file

@ -1998,7 +1998,7 @@ void GeometryReader::run() {
} else if (_url.path().toLower().endsWith("palaceoforinthilian4.fbx")) {
lightmapLevel = 3.5f;
}
fbxgeo = readFBX(_reply, _mapping, grabLightmaps, lightmapLevel);
fbxgeo = readFBX(_reply, _mapping, _url.path(), grabLightmaps, lightmapLevel);
} else if (_url.path().toLower().endsWith(".obj")) {
fbxgeo = OBJReader().readOBJ(_reply, _mapping, &_url);
}

View file

@ -0,0 +1,86 @@
//
// HitEffect.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 7/14/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
#include <glm/gtc/random.hpp>
#include <PathUtils.h>
#include <SharedUtil.h>
#include "AbstractViewStateInterface.h"
#include "HitEffect.h"
#include "TextureCache.h"
#include "DependencyManager.h"
#include "ViewFrustum.h"
#include "GeometryCache.h"
#include <gpu/Context.h>
#include "hit_effect_vert.h"
#include "hit_effect_frag.h"
HitEffect::HitEffect() {
}
const gpu::PipelinePointer& HitEffect::getHitEffectPipeline() {
if (!_hitEffectPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(hit_effect_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(hit_effect_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
// Good to go add the brand new pipeline
_hitEffectPipeline.reset(gpu::Pipeline::create(program, state));
}
return _hitEffectPipeline;
}
void HitEffect::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);
RenderArgs* args = renderContext->args;
gpu::Batch batch;
glm::mat4 projMat;
Transform viewMat;
args->_viewFrustum->evalProjectionMatrix(projMat);
args->_viewFrustum->evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
batch.setPipeline(getHitEffectPipeline());
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
glm::vec2 bottomLeft(-1.0f, -1.0f);
glm::vec2 topRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, color);
// Ready to render
args->_context->render((batch));
}

View file

@ -0,0 +1,33 @@
//
// hitEffect.h
// hifi
//
// Created by eric levin on 7/17/15.
//
//
#ifndef hifi_hitEffect_h
#define hifi_hitEffect_h
#include <DependencyManager.h>
#include "render/DrawTask.h"
class AbstractViewStateInterface;
class ProgramObject;
class HitEffect {
public:
HitEffect();
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
typedef render::Job::Model<HitEffect> JobModel;
const gpu::PipelinePointer& getHitEffectPipeline();
private:
gpu::PipelinePointer _hitEffectPipeline;
};
#endif

View file

@ -1,3 +1,4 @@
//
// RenderDeferredTask.cpp
// render-utils/src/
@ -20,6 +21,7 @@
#include "FramebufferCache.h"
#include "DeferredLightingEffect.h"
#include "TextureCache.h"
#include "HitEffect.h"
#include "render/DrawStatus.h"
#include "AmbientOcclusionEffect.h"
@ -78,6 +80,7 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
_jobs.push_back(Job(new DepthSortItems::JobModel("DepthSortOpaque", _jobs.back().getOutput())));
auto& renderedOpaques = _jobs.back().getOutput();
_jobs.push_back(Job(new DrawOpaqueDeferred::JobModel("DrawOpaqueDeferred", _jobs.back().getOutput())));
_jobs.push_back(Job(new DrawLight::JobModel("DrawLight")));
_jobs.push_back(Job(new RenderDeferred::JobModel("RenderDeferred")));
_jobs.push_back(Job(new ResolveDeferred::JobModel("ResolveDeferred")));
@ -105,8 +108,10 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
_drawStatusJobIndex = _jobs.size() - 1;
_jobs.push_back(Job(new DrawOverlay3D::JobModel("DrawOverlay3D")));
_jobs.push_back(Job(new HitEffect::JobModel("HitEffect")));
_jobs.back().setEnabled(false);
_drawHitEffectJobIndex = _jobs.size() -1;
_jobs.push_back(Job(new ResetGLState::JobModel()));
// Give ourselves 3 frmaes of timer queries
_timerQueries.push_back(std::make_shared<gpu::Query>());
@ -133,6 +138,10 @@ void RenderDeferredTask::run(const SceneContextPointer& sceneContext, const Rend
// Make sure we turn the displayItemStatus on/off
setDrawItemStatus(renderContext->_drawItemStatus);
//Make sure we display hit effect on screen, as desired from a script
setDrawHitEffect(renderContext->_drawHitEffect);
// TODO: turn on/off AO through menu item
setOcclusionStatus(renderContext->_occlusionStatus);

View file

@ -78,9 +78,13 @@ public:
render::Jobs _jobs;
int _drawStatusJobIndex = -1;
int _drawHitEffectJobIndex = -1;
void setDrawItemStatus(bool draw) { if (_drawStatusJobIndex >= 0) { _jobs[_drawStatusJobIndex].setEnabled(draw); } }
bool doDrawItemStatus() const { if (_drawStatusJobIndex >= 0) { return _jobs[_drawStatusJobIndex].isEnabled(); } else { return false; } }
void setDrawHitEffect(bool draw) { if (_drawHitEffectJobIndex >= 0) { _jobs[_drawHitEffectJobIndex].setEnabled(draw); } }
bool doDrawHitEffect() const { if (_drawHitEffectJobIndex >=0) { return _jobs[_drawHitEffectJobIndex].isEnabled(); } else { return false; } }
int _occlusionJobIndex = -1;

View file

@ -18,6 +18,9 @@
<$declareStandardTransform()$>
// Based on NVidia HBAO implementation in D3D11
// http://www.nvidia.co.uk/object/siggraph-2008-HBAO.html
varying vec2 varTexcoord;
uniform sampler2D depthTexture;
@ -30,80 +33,213 @@ uniform float g_intensity;
uniform float bufferWidth;
uniform float bufferHeight;
#define SAMPLE_COUNT 4
const float PI = 3.14159265;
float getRandom(vec2 uv) {
const vec2 FocalLen = vec2(1.0, 1.0);
const vec2 LinMAD = vec2(0.1-10.0, 0.1+10.0) / (2.0*0.1*10.0);
const vec2 AORes = vec2(1024.0, 768.0);
const vec2 InvAORes = vec2(1.0/1024.0, 1.0/768.0);
const vec2 NoiseScale = vec2(1024.0, 768.0) / 4.0;
const float AOStrength = 1.9;
const float R = 0.3;
const float R2 = 0.3*0.3;
const float NegInvR2 = - 1.0 / (0.3*0.3);
const float TanBias = tan(30.0 * PI / 180.0);
const float MaxRadiusPixels = 50.0;
const int NumDirections = 6;
const int NumSamples = 4;
float ViewSpaceZFromDepth(float d){
// [0,1] -> [-1,1] clip space
d = d * 2.0 - 1.0;
// Get view space Z
return -1.0 / (LinMAD.x * d + LinMAD.y);
}
vec3 UVToViewSpace(vec2 uv, float z){
//uv = UVToViewA * uv + UVToViewB;
return vec3(uv * z, z);
}
vec3 GetViewPos(vec2 uv){
float z = ViewSpaceZFromDepth(texture2D(depthTexture, uv).r);
return UVToViewSpace(uv, z);
}
vec3 GetViewPosPoint(ivec2 uv){
vec2 coord = vec2(gl_FragCoord.xy) + uv;
//float z = texelFetch(texture0, coord, 0).r;
float z = texture2D(depthTexture, uv).r;
return UVToViewSpace(uv, z);
}
float TanToSin(float x){
return x * inversesqrt(x*x + 1.0);
}
float InvLength(vec2 V){
return inversesqrt(dot(V,V));
}
float Tangent(vec3 V){
return V.z * InvLength(V.xy);
}
float BiasedTangent(vec3 V){
return V.z * InvLength(V.xy) + TanBias;
}
float Tangent(vec3 P, vec3 S){
return -(P.z - S.z) * InvLength(S.xy - P.xy);
}
float Length2(vec3 V){
return dot(V,V);
}
vec3 MinDiff(vec3 P, vec3 Pr, vec3 Pl){
vec3 V1 = Pr - P;
vec3 V2 = P - Pl;
return (Length2(V1) < Length2(V2)) ? V1 : V2;
}
vec2 SnapUVOffset(vec2 uv){
return round(uv * AORes) * InvAORes;
}
float Falloff(float d2){
return d2 * NegInvR2 + 1.0f;
}
float HorizonOcclusion( vec2 deltaUV, vec3 P, vec3 dPdu, vec3 dPdv, float randstep, float numSamples){
float ao = 0;
// Offset the first coord with some noise
vec2 uv = varTexcoord + SnapUVOffset(randstep*deltaUV);
deltaUV = SnapUVOffset( deltaUV );
// Calculate the tangent vector
vec3 T = deltaUV.x * dPdu + deltaUV.y * dPdv;
// Get the angle of the tangent vector from the viewspace axis
float tanH = BiasedTangent(T);
float sinH = TanToSin(tanH);
float tanS;
float d2;
vec3 S;
// Sample to find the maximum angle
for(float s = 1; s <= numSamples; ++s){
uv += deltaUV;
S = GetViewPos(uv);
tanS = Tangent(P, S);
d2 = Length2(S - P);
// Is the sample within the radius and the angle greater?
if(d2 < R2 && tanS > tanH)
{
float sinS = TanToSin(tanS);
// Apply falloff based on the distance
ao += Falloff(d2) * (sinS - sinH);
tanH = tanS;
sinH = sinS;
}
}
return ao;
}
vec2 RotateDirections(vec2 Dir, vec2 CosSin){
return vec2(Dir.x*CosSin.x - Dir.y*CosSin.y, Dir.x*CosSin.y + Dir.y*CosSin.x);
}
void ComputeSteps(inout vec2 stepSizeUv, inout float numSteps, float rayRadiusPix, float rand){
// Avoid oversampling if numSteps is greater than the kernel radius in pixels
numSteps = min(NumSamples, rayRadiusPix);
// Divide by Ns+1 so that the farthest samples are not fully attenuated
float stepSizePix = rayRadiusPix / (numSteps + 1);
// Clamp numSteps if it is greater than the max kernel footprint
float maxNumSteps = MaxRadiusPixels / stepSizePix;
if (maxNumSteps < numSteps)
{
// Use dithering to avoid AO discontinuities
numSteps = floor(maxNumSteps + rand);
numSteps = max(numSteps, 1);
stepSizePix = MaxRadiusPixels / numSteps;
}
// Step size in uv space
stepSizeUv = stepSizePix * InvAORes;
}
float getRandom(vec2 uv){
return fract(sin(dot(uv.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void main(void) {
vec3 sampleKernel[4] = { vec3(0.2, 0.0, 0.0),
vec3(0.0, 0.2, 0.0),
vec3(0.0, 0.0, 0.2),
vec3(0.2, 0.2, 0.2) };
void main(void){
float numDirections = NumDirections;
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec3 P, Pr, Pl, Pt, Pb;
P = GetViewPos(varTexcoord);
vec3 eyeDir = vec3(0.0, 0.0, -3.0);
vec3 cameraPositionWorldSpace;
<$transformEyeToWorldDir(cam, eyeDir, cameraPositionWorldSpace)$>
// Sample neighboring pixels
Pr = GetViewPos(varTexcoord + vec2( InvAORes.x, 0));
Pl = GetViewPos(varTexcoord + vec2(-InvAORes.x, 0));
Pt = GetViewPos(varTexcoord + vec2( 0, InvAORes.y));
Pb = GetViewPos(varTexcoord + vec2( 0,-InvAORes.y));
vec4 depthColor = texture2D(depthTexture, varTexcoord);
// Calculate tangent basis vectors using the minimum difference
vec3 dPdu = MinDiff(P, Pr, Pl);
vec3 dPdv = MinDiff(P, Pt, Pb) * (AORes.y * InvAORes.x);
// z in non linear range [0,1]
float depthVal = depthColor.r;
// conversion into NDC [-1,1]
float zNDC = depthVal * 2.0 - 1.0;
float n = 1.0; // the near plane
float f = 30.0; // the far plane
float l = -1.0; // left
float r = 1.0; // right
float b = -1.0; // bottom
float t = 1.0; // top
// conversion into eye space
float zEye = 2*f*n / (zNDC*(f-n)-(f+n));
// Converting from pixel coordinates to NDC
float xNDC = gl_FragCoord.x/bufferWidth * 2.0 - 1.0;
float yNDC = gl_FragCoord.y/bufferHeight * 2.0 - 1.0;
// Unprojecting X and Y from NDC to eye space
float xEye = -zEye*(xNDC*(r-l)+(r+l))/(2.0*n);
float yEye = -zEye*(yNDC*(t-b)+(t+b))/(2.0*n);
vec3 currentFragEyeSpace = vec3(xEye, yEye, zEye);
vec3 currentFragWorldSpace;
<$transformEyeToWorldDir(cam, currentFragEyeSpace, currentFragWorldSpace)$>
vec3 cameraToPositionRay = normalize(currentFragWorldSpace - cameraPositionWorldSpace);
vec3 origin = cameraToPositionRay * depthVal + cameraPositionWorldSpace;
vec3 normal = normalize(texture2D(normalTexture, varTexcoord).xyz);
//normal = normalize(normal * normalMatrix);
vec3 rvec = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx)) * 2.0 - 1.0;
vec3 tangent = normalize(rvec - normal * dot(rvec, normal));
vec3 bitangent = cross(normal, tangent);
mat3 tbn = mat3(tangent, bitangent, normal);
float occlusion = 0.0;
for (int i = 0; i < SAMPLE_COUNT; ++i) {
vec3 samplePos = origin + (tbn * sampleKernel[i]) * g_sample_rad;
vec4 offset = cam._projectionViewUntranslated * vec4(samplePos, 1.0);
offset.xy = (offset.xy / offset.w) * 0.5 + 0.5;
float depth = length(samplePos - cameraPositionWorldSpace);
float sampleDepthVal = texture2D(depthTexture, offset.xy).r;
float rangeDelta = abs(depthVal - sampleDepthVal);
float rangeCheck = smoothstep(0.0, 1.0, g_sample_rad / rangeDelta);
occlusion += rangeCheck * step(sampleDepthVal, depth);
}
occlusion = 1.0 - occlusion / float(SAMPLE_COUNT);
occlusion = clamp(pow(occlusion, g_intensity), 0.0, 1.0);
gl_FragColor = vec4(vec3(occlusion), 1.0);
}
// Get the random samples from the noise function
vec3 random = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx));
// Calculate the projected size of the hemisphere
vec2 rayRadiusUV = 0.5 * R * FocalLen / -P.z;
float rayRadiusPix = rayRadiusUV.x * AORes.x;
float ao = 1.0;
// Make sure the radius of the evaluated hemisphere is more than a pixel
if(rayRadiusPix > 1.0){
ao = 0.0;
float numSteps;
vec2 stepSizeUV;
// Compute the number of steps
ComputeSteps(stepSizeUV, numSteps, rayRadiusPix, random.z);
float alpha = 2.0 * PI / numDirections;
// Calculate the horizon occlusion of each direction
for(float d = 0; d < numDirections; ++d){
float theta = alpha * d;
// Apply noise to the direction
vec2 dir = RotateDirections(vec2(cos(theta), sin(theta)), random.xy);
vec2 deltaUV = dir * stepSizeUV;
// Sample the pixels along the direction
ao += HorizonOcclusion( deltaUV,
P,
dPdu,
dPdv,
random.z,
numSteps);
}
// Average the results and produce the final AO
ao = 1.0 - ao / numDirections * AOStrength;
}
gl_FragColor = vec4(vec3(ao), 1.0);
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// hit_effect.frag
// fragment shader
//
// Created by Eric Levin on 7/20
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
varying vec2 varQuadPosition;
void main(void) {
vec2 center = vec2(0.0, 0.0);
float distFromCenter = distance( vec2(0.0, 0.0), varQuadPosition);
float alpha = mix(0.0, 0.5, pow(distFromCenter,5.));
gl_FragColor = vec4(1.0, 0.0, 0.0, alpha);
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// hit_effect.vert
// vertex shader
//
// Created by Eric Levin on 7/20/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec2 varQuadPosition;
void main(void) {
varQuadPosition = gl_Vertex.xy;
gl_Position = gl_Vertex;
}

View file

@ -21,9 +21,6 @@ uniform sampler2D blurredOcclusionTexture;
void main(void) {
vec4 occlusionColor = texture2D(blurredOcclusionTexture, varTexcoord);
if(occlusionColor.r > 0.8 && occlusionColor.r <= 1.0) {
gl_FragColor = vec4(vec3(0.0), 0.0);
} else {
gl_FragColor = vec4(vec3(occlusionColor.r), 1.0);
}
gl_FragColor = vec4(vec3(0.0), occlusionColor.r);
}

View file

@ -216,46 +216,6 @@ void render::renderItems(const SceneContextPointer& sceneContext, const RenderCo
}
}
void addClearStateCommands(gpu::Batch& batch) {
batch._glDepthMask(true);
batch._glDepthFunc(GL_LESS);
batch._glDisable(GL_CULL_FACE);
batch._glActiveTexture(GL_TEXTURE0 + 1);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0 + 2);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0 + 3);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0);
batch._glBindTexture(GL_TEXTURE_2D, 0);
// deactivate vertex arrays after drawing
batch._glDisableClientState(GL_NORMAL_ARRAY);
batch._glDisableClientState(GL_VERTEX_ARRAY);
batch._glDisableClientState(GL_TEXTURE_COORD_ARRAY);
batch._glDisableClientState(GL_COLOR_ARRAY);
batch._glDisableVertexAttribArray(gpu::Stream::TANGENT);
batch._glDisableVertexAttribArray(gpu::Stream::SKIN_CLUSTER_INDEX);
batch._glDisableVertexAttribArray(gpu::Stream::SKIN_CLUSTER_WEIGHT);
// bind with 0 to switch back to normal operation
batch._glBindBuffer(GL_ARRAY_BUFFER, 0);
batch._glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
batch._glBindTexture(GL_TEXTURE_2D, 0);
// Back to no program
batch._glUseProgram(0);
}
void ResetGLState::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
gpu::Batch theBatch;
addClearStateCommands(theBatch);
assert(renderContext->args);
renderContext->args->_context->render(theBatch);
}
void DrawLight::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);

View file

@ -260,14 +260,6 @@ public:
typedef Job::Model<DrawBackground> JobModel;
};
class ResetGLState {
public:
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
typedef Job::Model<ResetGLState> JobModel;
};
class DrawSceneTask : public Task {
public:

View file

@ -50,6 +50,7 @@ public:
int _maxDrawnOverlay3DItems = -1;
bool _drawItemStatus = false;
bool _drawHitEffect = false;
bool _occlusionStatus = false;

View file

@ -109,6 +109,9 @@ public:
Q_INVOKABLE void setEngineDisplayItemStatus(bool display) { _drawItemStatus = display; }
Q_INVOKABLE bool doEngineDisplayItemStatus() { return _drawItemStatus; }
Q_INVOKABLE void setEngineDisplayHitEffect(bool display) { _drawHitEffect = display; }
Q_INVOKABLE bool doEngineDisplayHitEffect() { return _drawHitEffect; }
signals:
void shouldRenderAvatarsChanged(bool shouldRenderAvatars);
@ -141,6 +144,8 @@ protected:
int _maxDrawnOverlay3DItems = -1;
bool _drawItemStatus = false;
bool _drawHitEffect = false;
};

View file

@ -81,154 +81,6 @@ class MenuConstants : public QObject{
public:
enum Item {
AboutApp,
AddRemoveFriends,
AddressBar,
AlignForearmsWithWrists,
AlternateIK,
Animations,
Atmosphere,
Attachments,
AudioNoiseReduction,
AudioScope,
AudioScopeFiftyFrames,
AudioScopeFiveFrames,
AudioScopeFrames,
AudioScopePause,
AudioScopeTwentyFrames,
AudioStats,
AudioStatsShowInjectedStreams,
BandwidthDetails,
BlueSpeechSphere,
BookmarkLocation,
Bookmarks,
CascadedShadows,
CachesSize,
Chat,
Collisions,
Console,
ControlWithSpeech,
CopyAddress,
CopyPath,
DebugAmbientOcclusion,
DecreaseAvatarSize,
DeleteBookmark,
DisableActivityLogger,
DisableLightEntities,
DisableNackPackets,
DiskCacheEditor,
DisplayHands,
DisplayHandTargets,
DisplayModelBounds,
DisplayModelTriangles,
DisplayModelElementChildProxies,
DisplayModelElementProxy,
DisplayDebugTimingDetails,
DontDoPrecisionPicking,
DontRenderEntitiesAsScene,
EchoLocalAudio,
EchoServerAudio,
EditEntitiesHelp,
Enable3DTVMode,
EnableCharacterController,
EnableVRMode,
ExpandMyAvatarSimulateTiming,
ExpandMyAvatarTiming,
ExpandOtherAvatarTiming,
ExpandPaintGLTiming,
ExpandUpdateTiming,
Faceshift,
FilterSixense,
FirstPerson,
FrameTimer,
Fullscreen,
FullscreenMirror,
GlowWhenSpeaking,
NamesAboveHeads,
GoToUser,
HandMouseInput,
HMDTools,
IncreaseAvatarSize,
KeyboardMotorControl,
LeapMotionOnHMD,
LoadScript,
LoadScriptURL,
LoadRSSDKFile,
LodTools,
Login,
Log,
LowVelocityFilter,
Mirror,
MuteAudio,
MuteEnvironment,
MuteFaceTracking,
NoFaceTracking,
NoShadows,
OctreeStats,
OffAxisProjection,
OnlyDisplayTopTen,
PackageModel,
Pair,
PipelineWarnings,
Preferences,
Quit,
ReloadAllScripts,
RenderBoundingCollisionShapes,
RenderFocusIndicator,
RenderHeadCollisionShapes,
RenderLookAtVectors,
RenderSkeletonCollisionShapes,
RenderTargetFramerate,
RenderTargetFramerateUnlimited,
RenderTargetFramerate60,
RenderTargetFramerate50,
RenderTargetFramerate40,
RenderTargetFramerate30,
RenderTargetFramerateVSyncOn,
RenderResolution,
RenderResolutionOne,
RenderResolutionTwoThird,
RenderResolutionHalf,
RenderResolutionThird,
RenderResolutionQuarter,
RenderAmbientLight,
RenderAmbientLightGlobal,
RenderAmbientLight0,
RenderAmbientLight1,
RenderAmbientLight2,
RenderAmbientLight3,
RenderAmbientLight4,
RenderAmbientLight5,
RenderAmbientLight6,
RenderAmbientLight7,
RenderAmbientLight8,
RenderAmbientLight9,
ResetAvatarSize,
ResetSensors,
RunningScripts,
RunTimingTests,
ScriptEditor,
ScriptedMotorControl,
ShowBordersEntityNodes,
ShowIKConstraints,
SimpleShadows,
ShiftHipsForIdleAnimations,
Stars,
Stats,
StereoAudio,
StopAllScripts,
SuppressShortTimings,
TestPing,
ToolWindow,
TransmitterDrive,
TurnWithHead,
UseAudioForMouth,
UseCamera,
VelocityFilter,
VisibleToEveryone,
VisibleToFriends,
VisibleToNoOne,
Wireframe,
};
public:
@ -329,7 +181,6 @@ public:
MessageDialog::registerType();
VrMenu::registerType();
InfoView::registerType();
qmlRegisterType<MenuConstants>("Hifi", 1, 0, "MenuConstants");
auto offscreenUi = DependencyManager::get<OffscreenUi>();

View file

@ -38,7 +38,7 @@ bool vhacd::VHACDUtil::loadFBX(const QString filename, FBXGeometry& result) {
if (filename.toLower().endsWith(".obj")) {
result = OBJReader().readOBJ(fbxContents, QVariantHash());
} else if (filename.toLower().endsWith(".fbx")) {
result = readFBX(fbxContents, QVariantHash());
result = readFBX(fbxContents, QVariantHash(), filename);
} else {
qDebug() << "unknown file extension";
return false;