resolve conflicts on merge with upstream/master

This commit is contained in:
Stephen Birarda 2015-07-31 17:27:23 -07:00
commit 41c6220731
140 changed files with 7573 additions and 1787 deletions

View file

@ -70,7 +70,7 @@ void Agent::handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointe
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
int piggyBackedSizeWithHeader = packet->getPayloadSize() - statsMessageLength;
std::unique_ptr<char> buffer = std::unique_ptr<char>(new char[piggyBackedSizeWithHeader]);
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggyBackedSizeWithHeader);
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggyBackedSizeWithHeader, packet->getSenderSockAddr());
@ -107,6 +107,7 @@ void Agent::handleAudioPacket(QSharedPointer<NLPacket> packet) {
}
const QString AGENT_LOGGING_NAME = "agent";
const int PING_INTERVAL = 1000;
void Agent::run() {
ThreadedAssignment::commonInit(AGENT_LOGGING_NAME, NodeType::Agent);
@ -118,6 +119,10 @@ void Agent::run() {
<< NodeType::EntityServer
);
_pingTimer = new QTimer(this);
connect(_pingTimer, SIGNAL(timeout()), SLOT(sendPingRequests()));
_pingTimer->start(PING_INTERVAL);
// figure out the URL for the script for this agent assignment
QUrl scriptURL;
if (_payload.isEmpty()) {
@ -193,7 +198,27 @@ void Agent::run() {
void Agent::aboutToFinish() {
_scriptEngine.stop();
_pingTimer->stop();
delete _pingTimer;
// our entity tree is going to go away so tell that to the EntityScriptingInterface
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(NULL);
}
void Agent::sendPingRequests() {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->eachMatchingNode([](const SharedNodePointer& node)->bool {
switch (node->getType()) {
case NodeType::AvatarMixer:
case NodeType::AudioMixer:
case NodeType::EntityServer:
return true;
default:
return false;
}
}, [nodeList](const SharedNodePointer& node) {
nodeList->sendPacket(nodeList->constructPingPacket(), *node);
});
}

View file

@ -58,11 +58,13 @@ private slots:
void handleAudioPacket(QSharedPointer<NLPacket> packet);
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void sendPingRequests();
private:
ScriptEngine _scriptEngine;
EntityEditPacketSender _entityEditSender;
EntityTreeHeadlessViewer _entityViewer;
QTimer* _pingTimer;
MixedAudioStream _receivedAudioStream;
float _lastReceivedAudioLoudness;

View file

@ -924,9 +924,9 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "use_stdev_for_desired_calc";
_streamSettings._useStDevForJitterCalc = audioBufferGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
if (_streamSettings._useStDevForJitterCalc) {
qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using stdev method for jitter calc if dynamic jitter buffers enabled";
} else {
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using max-gap method for jitter calc if dynamic jitter buffers enabled";
}
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "window_starve_threshold";

View file

@ -38,5 +38,19 @@ if (WIN32)
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARIES GLEW_DLL_PATH)
add_paths_to_fixup_libs(${GLEW_DLL_PATH})
elseif (APPLE)
else ()
find_path(GLEW_INCLUDE_DIR GL/glew.h)
find_library(GLEW_LIBRARY NAMES GLEW glew32 glew glew32s PATH_SUFFIXES lib64)
set(GLEW_INCLUDE_DIRS ${GLEW_INCLUDE_DIR})
set(GLEW_LIBRARIES ${GLEW_LIBRARY})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLEW
REQUIRED_VARS GLEW_INCLUDE_DIR GLEW_LIBRARY)
mark_as_advanced(GLEW_INCLUDE_DIR GLEW_LIBRARY)
endif ()

View file

@ -0,0 +1,38 @@
#
# FindconnexionClient.cmake
#
# Once done this will define
#
# 3DCONNEXIONCLIENT_INCLUDE_DIRS
#
# Created on 10/06/2015 by Marcel Verhagen
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
# setup hints for 3DCONNEXIONCLIENT search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("connexionclient")
if (APPLE)
find_library(3DconnexionClient 3DconnexionClient)
if(EXISTS ${3DconnexionClient})
set(CONNEXIONCLIENT_FOUND true)
set(CONNEXIONCLIENT_INCLUDE_DIR ${3DconnexionClient})
set(CONNEXIONCLIENT_LIBRARY ${3DconnexionClient})
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS "-weak_framework 3DconnexionClient")
message(STATUS "Found 3Dconnexion")
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIR CONNEXIONCLIENT_LIBRARY)
endif()
endif()
if (WIN32)
find_path(CONNEXIONCLIENT_INCLUDE_DIRS I3dMouseParams.h PATH_SUFFIXES Inc HINTS ${CONNEXIONCLIENT_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(connexionClient DEFAULT_MSG CONNEXIONCLIENT_INCLUDE_DIRS)
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIRS CONNEXIONCLIENT_SEARCH_DIRS)
endif()

100
examples/afk.js Normal file
View file

@ -0,0 +1,100 @@
//
// #20485: AFK - Away From Keyboard Setting
// *****************************************
//
// Created by Kevin M. Thomas and Thoys 07/16/15.
// Copyright 2015 High Fidelity, Inc.
// kevintown.net
//
// JavaScript for the High Fidelity interface that creates an away from keyboard functionality by providing a UI and keyPressEvent which will mute toggle the connected microphone, face tracking dde and set the avatar to a hand raise pose.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var originalOutputDevice;
var originalName;
var muted = false;
var wasAudioEnabled;
var afkText = "AFK - I Will Return!\n";
// Set up toggleMuteButton text overlay.
var toggleMuteButton = Overlays.addOverlay("text", {
x: 10,
y: 275,
width: 60,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8
});
// Function that overlays text upon state change.
function onMuteStateChanged() {
Overlays.editOverlay(toggleMuteButton, muted ? {text: "Go Live", leftMargin: 5} : {text: "Go AFK", leftMargin: 5});
}
function toggleMute() {
if (!muted) {
if (!AudioDevice.getMuted()) {
AudioDevice.toggleMute();
}
originalOutputDevice = AudioDevice.getOutputDevice();
Menu.setIsOptionChecked("Mute Face Tracking", true);
originalName = MyAvatar.displayName;
AudioDevice.setOutputDevice("none");
MyAvatar.displayName = afkText + MyAvatar.displayName;
MyAvatar.setJointData("LeftShoulder", Quat.fromPitchYawRollDegrees(0, 180, 0));
MyAvatar.setJointData("RightShoulder", Quat.fromPitchYawRollDegrees(0, 180, 0));
} else {
if (AudioDevice.getMuted()) {
AudioDevice.toggleMute();
}
AudioDevice.setOutputDevice(originalOutputDevice);
Menu.setIsOptionChecked("Mute Face Tracking", false);
MyAvatar.setJointData("LeftShoulder", Quat.fromPitchYawRollDegrees(0, 0, 0));
MyAvatar.setJointData("RightShoulder", Quat.fromPitchYawRollDegrees(0, 0, 0));
MyAvatar.clearJointData("LeftShoulder");
MyAvatar.clearJointData("RightShoulder");
MyAvatar.displayName = originalName;
}
muted = !muted;
onMuteStateChanged();
}
// Function that adds mousePressEvent functionality to toggle mic mute, AFK message above display name and toggle avatar arms upward.
function mousePressEvent(event) {
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleMuteButton) {
toggleMute();
}
}
// Call functions.
onMuteStateChanged();
//AudioDevice.muteToggled.connect(onMuteStateChanged);
Controller.mousePressEvent.connect(mousePressEvent);
// Function that adds keyPressEvent functionality to toggle mic mute, AFK message above display name and toggle avatar arms upward.
Controller.keyPressEvent.connect(function(event) {
if (event.text == "y") {
toggleMute();
}
});
// Function that sets a timeout value of 1 second so that the display name does not get overwritten in the event of a crash.
Script.setTimeout(function() {
MyAvatar.displayName = MyAvatar.displayName.replace(afkText, "");
}, 1000);
// Function that calls upon exit to restore avatar display name to original state.
Script.scriptEnding.connect(function(){
if (muted) {
AudioDevice.setOutputDevice(originalOutputDevice);
Overlays.deleteOverlay(toggleMuteButton);
MyAvatar.displayName = originalName;
}
Overlays.deleteOverlay(toggleMuteButton);
});

View file

@ -291,22 +291,18 @@ var toolBar = (function () {
var RESIZE_TIMEOUT = 120000; // 2 minutes
var RESIZE_MAX_CHECKS = RESIZE_TIMEOUT / RESIZE_INTERVAL;
function addModel(url) {
var position;
var entityID = createNewEntity({
type: "Model",
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
}, false);
position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
if (position.x > 0 && position.y > 0 && position.z > 0) {
var entityId = Entities.addEntity({
type: "Model",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
});
if (entityID) {
print("Model added: " + url);
var checkCount = 0;
function resize() {
var entityProperties = Entities.getEntityProperties(entityId);
var entityProperties = Entities.getEntityProperties(entityID);
var naturalDimensions = entityProperties.naturalDimensions;
checkCount++;
@ -318,21 +314,41 @@ var toolBar = (function () {
print("Resize failed: timed out waiting for model (" + url + ") to load");
}
} else {
Entities.editEntity(entityId, { dimensions: naturalDimensions });
Entities.editEntity(entityID, { dimensions: naturalDimensions });
// Reset selection so that the selection overlays will be updated
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
}
}
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
Script.setTimeout(resize, RESIZE_INTERVAL);
} else {
print("Can't add model: Model would be out of bounds.");
}
}
function createNewEntity(properties, dragOnCreate) {
// Default to true if not passed in
dragOnCreate = dragOnCreate == undefined ? true : dragOnCreate;
var dimensions = properties.dimensions ? properties.dimensions : DEFAULT_DIMENSIONS;
var position = getPositionToCreateEntity();
var entityID = null;
if (position != null) {
position = grid.snapToSurface(grid.snapToGrid(position, false, dimensions), dimensions),
properties.position = position;
entityID = Entities.addEntity(properties);
if (dragOnCreate) {
placingEntityID = entityID;
}
} else {
Window.alert("Can't create " + properties.type + ": " + properties.type + " would be out of bounds.");
}
return entityID;
}
var newModelButtonDown = false;
var browseMarketplaceButtonDown = false;
that.mousePressEvent = function (event) {
@ -363,127 +379,82 @@ var toolBar = (function () {
}
if (newCubeButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Box",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Box",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
print("Can't create box: Box would be out of bounds.");
}
return true;
}
if (newSphereButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Sphere",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Sphere",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
print("Can't create sphere: Sphere would be out of bounds.");
}
return true;
}
if (newLightButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Light",
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Light",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_LIGHT_DIMENSIONS), DEFAULT_LIGHT_DIMENSIONS),
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
} else {
print("Can't create Light: Light would be out of bounds.");
}
return true;
}
if (newTextButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Text",
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Text",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
} else {
print("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newWebButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Web",
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Web",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
} else {
print("Can't create Web Entity: would be out of bounds.");
}
return true;
}
if (newZoneButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Zone",
dimensions: { x: 10, y: 10, z: 10 },
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Zone",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
});
} else {
print("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newPolyVoxButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "PolyVox",
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "PolyVox",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS),
DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
} else {
print("Can't create PolyVox: would be out of bounds.");
}
return true;
}
@ -666,7 +637,7 @@ function handleIdleMouse() {
idleMouseTimerId = null;
if (isActive) {
highlightEntityUnderCursor(lastMousePosition, true);
}
}
}
function highlightEntityUnderCursor(position, accurateRay) {
@ -837,15 +808,15 @@ function setupModelMenus() {
}
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Entity List...", shortcutKey: "CTRL+META+L", afterItem: "Models" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
afterItem: "Entity List...", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
afterItem: "Allow Selecting of Large Models", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
afterItem: "Allow Selecting of Small Models", isCheckable: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
afterItem: "Allow Selecting of Lights" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
afterItem: "Select All Entities In Box" });
Menu.addMenuItem({ menuName: "File", menuItemName: "Models", isSeparator: true, beforeItem: "Settings" });
@ -962,7 +933,7 @@ function selectAllEtitiesInCurrentSelectionBox(keepIfTouching) {
entities.splice(i, 1);
--i;
}
}
}
}
selectionManager.setSelections(entities);
}
@ -1038,17 +1009,29 @@ function handeMenuEvent(menuItem) {
tooltip.show(false);
}
// This function tries to find a reasonable position to place a new entity based on the camera
// position. If a reasonable position within the world bounds can't be found, `null` will
// be returned. The returned position will also take into account grid snapping settings.
function getPositionToCreateEntity() {
var distance = cameraManager.enabled ? cameraManager.zoomDistance : DEFAULT_ENTITY_DRAG_DROP_DISTANCE;
var direction = Quat.getFront(Camera.orientation);
var offset = Vec3.multiply(distance, direction);
var position = Vec3.sum(Camera.position, offset);
var placementPosition = Vec3.sum(Camera.position, offset);
position.x = Math.max(0, position.x);
position.y = Math.max(0, position.y);
position.z = Math.max(0, position.z);
var cameraPosition = Camera.position;
return position;
var cameraOutOfBounds = cameraPosition.x < 0 || cameraPosition.y < 0 || cameraPosition.z < 0;
var placementOutOfBounds = placementPosition.x < 0 || placementPosition.y < 0 || placementPosition.z < 0;
if (cameraOutOfBounds && placementOutOfBounds) {
return null;
}
placementPosition.x = Math.max(0, placementPosition.x);
placementPosition.y = Math.max(0, placementPosition.y);
placementPosition.z = Math.max(0, placementPosition.z);
return placementPosition;
}
function importSVO(importURL) {
@ -1064,17 +1047,21 @@ function importSVO(importURL) {
if (success) {
var VERY_LARGE = 10000;
var position = { x: 0, y: 0, z: 0};
var position = { x: 0, y: 0, z: 0 };
if (Clipboard.getClipboardContentsLargestDimension() < VERY_LARGE) {
position = getPositionToCreateEntity();
}
var pastedEntityIDs = Clipboard.pasteEntities(position);
if (position != null) {
var pastedEntityIDs = Clipboard.pasteEntities(position);
if (isActive) {
selectionManager.setSelections(pastedEntityIDs);
if (isActive) {
selectionManager.setSelections(pastedEntityIDs);
}
Window.raiseMainWindow();
} else {
Window.alert("Can't import objects: objects would be out of bounds.");
}
Window.raiseMainWindow();
} else {
Window.alert("There was an error importing the entity file.");
}
@ -1261,7 +1248,7 @@ PropertiesTool = function(opts) {
if (data.properties.keyLightDirection !== undefined) {
data.properties.keyLightDirection = Vec3.fromPolar(
data.properties.keyLightDirection.x * DEGREES_TO_RADIANS, data.properties.keyLightDirection.y * DEGREES_TO_RADIANS);
}
}
Entities.editEntity(selectionManager.selections[0], data.properties);
if (data.properties.name != undefined) {
entityListTool.sendUpdate();
@ -1357,8 +1344,8 @@ PropertiesTool = function(opts) {
var properties = selectionManager.savedProperties[selectionManager.selections[i]];
if (properties.type == "Zone") {
var centerOfZone = properties.boundingBox.center;
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
z: centerOfZone.z };
Entities.editEntity(selectionManager.selections[i], {

View file

@ -0,0 +1,145 @@
//
// #20622: JS Stream Player
// *************************
//
// Created by Kevin M. Thomas and Thoys 07/17/15.
// Copyright 2015 High Fidelity, Inc.
// kevintown.net
//
// JavaScript for the High Fidelity interface that creates a stream player with a UI and keyPressEvents for adding a stream URL in addition to play, stop and volume functionality.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Declare HiFi public bucket.
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
// Declare variables and set up new WebWindow.
var stream;
var volume = 1;
var streamWindow = new WebWindow('Stream', HIFI_PUBLIC_BUCKET + "examples/html/jsstreamplayer.html", 0, 0, false);
// Set up toggleStreamURLButton overlay.
var toggleStreamURLButton = Overlays.addOverlay("text", {
x: 76,
y: 275,
width: 40,
height: 28,
backgroundColor: {red: 0, green: 0, blue: 0},
color: {red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
text: " URL"
});
// Set up toggleStreamPlayButton overlay.
var toggleStreamPlayButton = Overlays.addOverlay("text", {
x: 122,
y: 275,
width: 38,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
text: " Play"
});
// Set up toggleStreamStopButton overlay.
var toggleStreamStopButton = Overlays.addOverlay("text", {
x: 166,
y: 275,
width: 40,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
text: " Stop"
});
// Set up increaseVolumeButton overlay.
var toggleIncreaseVolumeButton = Overlays.addOverlay("text", {
x: 211,
y: 275,
width: 18,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
text: " +"
});
// Set up decreaseVolumeButton overlay.
var toggleDecreaseVolumeButton = Overlays.addOverlay("text", {
x: 234,
y: 275,
width: 15,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
text: " -"
});
// Function that adds mousePressEvent functionality to connect UI to enter stream URL, play and stop stream.
function mousePressEvent(event) {
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamURLButton) {
stream = Window.prompt("Enter Stream: ");
var streamJSON = {
action: "changeStream",
stream: stream
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamPlayButton) {
var streamJSON = {
action: "changeStream",
stream: stream
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamStopButton) {
var streamJSON = {
action: "changeStream",
stream: ""
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleIncreaseVolumeButton) {
volume += 0.2;
var volumeJSON = {
action: "changeVolume",
volume: volume
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleDecreaseVolumeButton) {
volume -= 0.2;
var volumeJSON = {
action: "changeVolume",
volume: volume
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
}
}
// Call function.
Controller.mousePressEvent.connect(mousePressEvent);
streamWindow.setVisible(false);
// Function to delete overlays upon exit.
function onScriptEnding() {
Overlays.deleteOverlay(toggleStreamURLButton);
Overlays.deleteOverlay(toggleStreamPlayButton);
Overlays.deleteOverlay(toggleStreamStopButton);
Overlays.deleteOverlay(toggleIncreaseVolumeButton);
Overlays.deleteOverlay(toggleDecreaseVolumeButton);
}
// Call function.
Script.scriptEnding.connect(onScriptEnding);

View file

@ -0,0 +1,33 @@
//
// #20628: JS Stream Player Domain-Zone-Entity
// ********************************************
//
// Created by Kevin M. Thomas and Thoys 07/20/15.
// Copyright 2015 High Fidelity, Inc.
// kevintown.net
//
// JavaScript for the High Fidelity interface that is an entity script to be placed in a chosen entity inside a domain-zone.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Function which exists inside of an entity which triggers as a user approches it.
(function() {
const SCRIPT_NAME = "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.js";
function isScriptRunning(script) {
script = script.toLowerCase().trim();
var runningScripts = ScriptDiscoveryService.getRunning();
for (i in runningScripts) {
if (runningScripts[i].url.toLowerCase().trim() == script) {
return true;
}
}
return false;
};
if (!isScriptRunning(SCRIPT_NAME)) {
Script.load(SCRIPT_NAME);
}
})

View file

@ -0,0 +1,28 @@
//
// hitEffect.js
// examples
//
// Created by Eric Levin on July 20, 2015
// Copyright 2015 High Fidelity, Inc.
//
// An example of how to toggle a screen-space hit effect using the Scene global object.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var hitEffectEnabled = false;
toggleHitEffect();
function toggleHitEffect() {
Script.setTimeout(function() {
hitEffectEnabled = !hitEffectEnabled;
Scene.setEngineDisplayHitEffect(hitEffectEnabled);
toggleHitEffect();
}, 1000);
}

View file

@ -12,29 +12,297 @@
//
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
const NUM_LAYERS = 16;
const BASE_DIMENSION = { x: 7, y: 2, z: 7 };
const BLOCKS_PER_LAYER = 3;
const BLOCK_SIZE = {x: 0.2, y: 0.1, z: 0.8};
const BLOCK_SPACING = BLOCK_SIZE.x / 3;
Script.include("../../libraries/toolBars.js");
const DEFAULT_NUM_LAYERS = 16;
const DEFAULT_BASE_DIMENSION = { x: 7, y: 2, z: 7 };
const DEFAULT_BLOCKS_PER_LAYER = 3;
const DEFAULT_BLOCK_SIZE = {x: 0.2, y: 0.1, z: 0.8};
const DEFAULT_BLOCK_SPACING = DEFAULT_BLOCK_SIZE.x / DEFAULT_BLOCKS_PER_LAYER;
// BLOCK_HEIGHT_VARIATION removes a random percentages of the default block height per block. (for example 0.001 %)
const BLOCK_HEIGHT_VARIATION = 0.001;
const GRAVITY = {x: 0, y: -2.8, z: 0};
const DENSITY = 2000;
const DAMPING_FACTOR = 0.98;
const ANGULAR_DAMPING_FACTOR = 0.8;
const FRICTION = 0.99;
const RESTITUTION = 0.0;
const SPAWN_DISTANCE = 3;
const BLOCK_YAW_OFFSET = 45;
const DEFAULT_BLOCK_HEIGHT_VARIATION = 0.001;
const DEFAULT_GRAVITY = {x: 0, y: -2.8, z: 0};
const DEFAULT_DENSITY = 2000;
const DEFAULT_DAMPING_FACTOR = 0.98;
const DEFAULT_ANGULAR_DAMPING_FACTOR = 0.8;
const DEFAULT_FRICTION = 0.99;
const DEFAULT_RESTITUTION = 0.0;
const DEFAULT_SPAWN_DISTANCE = 3;
const DEFAULT_BLOCK_YAW_OFFSET = 45;
var editMode = false;
const BUTTON_DIMENSIONS = {width: 49, height: 49};
const MAXIMUM_PERCENTAGE = 100.0;
const NO_ANGLE = 0;
const RIGHT_ANGLE = 90;
var windowWidth = Window.innerWidth;
var size;
var pieces = [];
var ground = false;
var layerRotated = false;
var button;
var cogButton;
var toolBar;
SettingsWindow = function() {
var _this = this;
this.plankyStack = null;
this.webWindow = null;
this.init = function(plankyStack) {
_this.webWindow = new WebWindow('Planky', Script.resolvePath('../../html/plankySettings.html'), 255, 500, true);
_this.webWindow.setVisible(false);
_this.webWindow.eventBridge.webEventReceived.connect(_this.onWebEventReceived);
_this.plankyStack = plankyStack;
};
this.sendData = function(data) {
_this.webWindow.eventBridge.emitScriptEvent(JSON.stringify(data));
};
this.onWebEventReceived = function(data) {
data = JSON.parse(data);
switch (data.action) {
case 'loaded':
_this.sendData({action: 'load', options: _this.plankyStack.options.getJSON()})
break;
case 'value-change':
_this.plankyStack.onValueChanged(data.option, data.value);
break;
case 'factory-reset':
_this.plankyStack.options.factoryReset();
_this.sendData({action: 'load', options: _this.plankyStack.options.getJSON()})
break;
case 'save-default':
_this.plankyStack.options.save();
break;
case 'cleanup':
_this.plankyStack.deRez();
break;
default:
Window.alert('[planky] unknown action ' + data.action);
}
};
};
PlankyOptions = function() {
var _this = this;
this.factoryReset = function() {
_this.setDefaults();
Settings.setValue('plankyOptions', '');
};
this.save = function() {
Settings.setValue('plankyOptions', JSON.stringify(_this.getJSON()));
};
this.load = function() {
_this.setDefaults();
var plankyOptions = Settings.getValue('plankyOptions')
if (plankyOptions === null || plankyOptions === '') {
return;
}
var options = JSON.parse(plankyOptions);
for (option in options) {
_this[option] = options[option];
}
};
this.getJSON = function() {
return {
numLayers: _this.numLayers,
baseDimension: _this.baseDimension,
blocksPerLayer: _this.blocksPerLayer,
blockSize: _this.blockSize,
blockSpacing: _this.blockSpacing,
blockHeightVariation: _this.blockHeightVariation,
gravity: _this.gravity,
density: _this.density,
dampingFactor: _this.dampingFactor,
angularDampingFactor: _this.angularDampingFactor,
friction: _this.friction,
restitution: _this.restitution,
spawnDistance: _this.spawnDistance,
blockYawOffset: _this.blockYawOffset,
};
}
this.setDefaults = function() {
_this.numLayers = DEFAULT_NUM_LAYERS;
_this.baseDimension = DEFAULT_BASE_DIMENSION;
_this.blocksPerLayer = DEFAULT_BLOCKS_PER_LAYER;
_this.blockSize = DEFAULT_BLOCK_SIZE;
_this.blockSpacing = DEFAULT_BLOCK_SPACING;
_this.blockHeightVariation = DEFAULT_BLOCK_HEIGHT_VARIATION;
_this.gravity = DEFAULT_GRAVITY;
_this.density = DEFAULT_DENSITY;
_this.dampingFactor = DEFAULT_DAMPING_FACTOR;
_this.angularDampingFactor = DEFAULT_ANGULAR_DAMPING_FACTOR;
_this.friction = DEFAULT_FRICTION;
_this.restitution = DEFAULT_RESTITUTION;
_this.spawnDistance = DEFAULT_SPAWN_DISTANCE;
_this.blockYawOffset = DEFAULT_BLOCK_YAW_OFFSET;
};
this.load();
};
// The PlankyStack exists out of rows and layers
PlankyStack = function() {
var _this = this;
this.planks = [];
this.ground = false;
this.editLines = [];
this.options = new PlankyOptions();
this.deRez = function() {
_this.planks.forEach(function(plank) {
Entities.deleteEntity(plank.entity);
});
_this.planks = [];
if (_this.ground) {
Entities.deleteEntity(_this.ground);
}
_this.editLines.forEach(function(line) {
Entities.deleteEntity(line);
})
_this.editLines = [];
if (_this.centerLine) {
Entities.deleteEntity(_this.centerLine);
}
_this.ground = false;
_this.centerLine = false;
};
this.rez = function() {
if (_this.planks.length > 0) {
_this.deRez();
}
_this.baseRotation = Quat.fromPitchYawRollDegrees(0.0, MyAvatar.bodyYaw, 0.0);
var basePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(_this.options.spawnDistance, Quat.getFront(_this.baseRotation)));
basePosition.y = grabLowestJointY();
_this.basePosition = basePosition;
_this.refresh();
};
//private function
var refreshGround = function() {
if (!_this.ground) {
_this.ground = Entities.addEntity({
type: 'Model',
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
dimensions: _this.options.baseDimension,
position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}),
rotation: _this.baseRotation,
shapeType: 'box'
});
return;
}
// move ground to rez position/rotation
Entities.editEntity(_this.ground, {dimensions: _this.options.baseDimension, position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}), rotation: _this.baseRotation});
};
var refreshLines = function() {
if (_this.editLines.length === 0) {
_this.editLines.push(Entities.addEntity({
type: 'Line',
dimensions: {x: 5, y: 21, z: 5},
position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}),
lineWidth: 7,
color: {red: 20, green: 20, blue: 20},
linePoints: [{x: 0, y: 0, z: 0}, {x: 0, y: 10, z: 0}],
visible: editMode
}));
return;
}
_this.editLines.forEach(function(line) {
Entities.editEntity(line, {visible: editMode});
})
};
var trimDimension = function(dimension, maxIndex) {
var removingPlanks = [];
_this.planks.forEach(function(plank, index, object) {
if (plank[dimension] > maxIndex) {
removingPlanks.push(index);
}
});
removingPlanks.reverse();
for (var i = 0; i < removingPlanks.length; i++) {
Entities.deleteEntity(_this.planks[removingPlanks[i]].entity);
_this.planks.splice(removingPlanks[i], 1);
}
};
var createOrUpdate = function(layer, row) {
var found = false;
var layerRotated = layer % 2 === 0;
var layerRotation = Quat.fromPitchYawRollDegrees(0, layerRotated ? NO_ANGLE : RIGHT_ANGLE, 0.0);
var blockPositionXZ = (row - (_this.options.blocksPerLayer / 2) + 0.5) * (_this.options.blockSpacing + _this.options.blockSize.x);
var localTransform = Vec3.multiplyQbyV(_this.offsetRot, {
x: (layerRotated ? blockPositionXZ : 0),
y: (_this.options.blockSize.y / 2) + (_this.options.blockSize.y * layer),
z: (layerRotated ? 0 : blockPositionXZ)
});
var newProperties = {
type: 'Model',
modelURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/block.fbx',
shapeType: 'box',
name: 'PlankyBlock' + layer + '-' + row,
dimensions: Vec3.sum(_this.options.blockSize, {x: 0, y: -((_this.options.blockSize.y * (_this.options.blockHeightVariation / MAXIMUM_PERCENTAGE)) * Math.random()), z: 0}),
position: Vec3.sum(_this.basePosition, localTransform),
rotation: Quat.multiply(layerRotation, _this.offsetRot),
damping: _this.options.dampingFactor,
restitution: _this.options.restitution,
friction: _this.options.friction,
angularDamping: _this.options.angularDampingFactor,
gravity: _this.options.gravity,
density: _this.options.density,
velocity: {x: 0, y: 0, z: 0},
angularVelocity: Quat.fromPitchYawRollDegrees(0, 0, 0),
ignoreForCollisions: true
};
_this.planks.forEach(function(plank, index, object) {
if (plank.layer === layer && plank.row === row) {
Entities.editEntity(plank.entity, newProperties);
found = true;
// break loop:
return false;
}
});
if (!found) {
_this.planks.push({layer: layer, row: row, entity: Entities.addEntity(newProperties)})
}
};
this.onValueChanged = function(option, value) {
_this.options[option] = value;
if (['numLayers', 'blocksPerLayer', 'blockSize', 'blockSpacing', 'blockHeightVariation'].indexOf(option) !== -1) {
_this.refresh();
}
};
this.refresh = function() {
refreshGround();
refreshLines();
trimDimension('layer', _this.options.numLayers - 1);
trimDimension('row', _this.options.blocksPerLayer - 1);
_this.offsetRot = Quat.multiply(_this.baseRotation, Quat.fromPitchYawRollDegrees(0.0, _this.options.blockYawOffset, 0.0));
for (var layer = 0; layer < _this.options.numLayers; layer++) {
for (var row = 0; row < _this.options.blocksPerLayer; row++) {
createOrUpdate(layer, row);
}
}
if (!editMode) {
_this.planks.forEach(function(plank, index, object) {
Entities.editEntity(plank.entity, {ignoreForCollisions: false, collisionsWillMove: true});
});
}
};
this.isFound = function() {
//TODO: identify entities here until one is found
return _this.planks.length > 0;
};
};
var settingsWindow = new SettingsWindow();
var plankyStack = new PlankyStack();
settingsWindow.init(plankyStack);
function grabLowestJointY() {
var jointNames = MyAvatar.getJointNames();
@ -47,108 +315,60 @@ function grabLowestJointY() {
return floorY;
}
function getButtonPosX() {
return windowWidth - ((BUTTON_DIMENSIONS.width / 2) + BUTTON_DIMENSIONS.width);
}
toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL, "highfidelity.games.planky", function (windowDimensions, toolbar) {
return {
x: windowDimensions.x - (toolbar.width * 1.1),
y: toolbar.height / 2
};
});
var button = Overlays.addOverlay('image', {
x: getButtonPosX(),
y: 10,
button = toolBar.addTool({
width: BUTTON_DIMENSIONS.width,
height: BUTTON_DIMENSIONS.height,
imageURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/planky_button.svg',
alpha: 0.8
alpha: 0.8,
visible: true
});
cogButton = toolBar.addTool({
width: BUTTON_DIMENSIONS.width,
height: BUTTON_DIMENSIONS.height,
imageURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/cog.svg',
subImage: { x: 0, y: BUTTON_DIMENSIONS.height, width: BUTTON_DIMENSIONS.width, height: BUTTON_DIMENSIONS.height },
alpha: 0.8,
visible: true
}, true, false);
function resetBlocks() {
pieces.forEach(function(piece) {
Entities.deleteEntity(piece);
});
pieces = [];
var avatarRot = Quat.fromPitchYawRollDegrees(0.0, MyAvatar.bodyYaw, 0.0);
basePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(SPAWN_DISTANCE, Quat.getFront(avatarRot)));
basePosition.y = grabLowestJointY() - (BASE_DIMENSION.y / 2);
if (!ground) {
ground = Entities.addEntity({
type: 'Model',
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
dimensions: BASE_DIMENSION,
position: basePosition,
rotation: avatarRot,
shapeType: 'box'
});
} else {
Entities.editEntity(ground, {position: basePosition, rotation: avatarRot});
}
var offsetRot = Quat.multiply(avatarRot, Quat.fromPitchYawRollDegrees(0.0, BLOCK_YAW_OFFSET, 0.0));
basePosition.y += (BASE_DIMENSION.y / 2);
for (var layerIndex = 0; layerIndex < NUM_LAYERS; layerIndex++) {
var layerRotated = layerIndex % 2 === 0;
var offset = -(BLOCK_SPACING);
var layerRotation = Quat.fromPitchYawRollDegrees(0, layerRotated ? 0 : 90, 0.0);
for (var blockIndex = 0; blockIndex < BLOCKS_PER_LAYER; blockIndex++) {
var blockPositionXZ = BLOCK_SIZE.x * blockIndex - (BLOCK_SIZE.x * 3 / 2 - BLOCK_SIZE.x / 2);
var localTransform = Vec3.multiplyQbyV(offsetRot, {
x: (layerRotated ? blockPositionXZ + offset: 0),
y: (BLOCK_SIZE.y / 2) + (BLOCK_SIZE.y * layerIndex),
z: (layerRotated ? 0 : blockPositionXZ + offset)
});
pieces.push(Entities.addEntity({
type: 'Model',
modelURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/block.fbx',
shapeType: 'box',
name: 'PlankyBlock' + ((layerIndex * BLOCKS_PER_LAYER) + blockIndex),
dimensions: {
x: BLOCK_SIZE.x,
y: BLOCK_SIZE.y - ((BLOCK_SIZE.y * (BLOCK_HEIGHT_VARIATION / MAXIMUM_PERCENTAGE)) * Math.random()),
z: BLOCK_SIZE.z
},
position: {
x: basePosition.x + localTransform.x,
y: basePosition.y + localTransform.y,
z: basePosition.z + localTransform.z
},
rotation: Quat.multiply(layerRotation, offsetRot),
collisionsWillMove: true,
damping: DAMPING_FACTOR,
restitution: RESTITUTION,
friction: FRICTION,
angularDamping: ANGULAR_DAMPING_FACTOR,
gravity: GRAVITY,
density: DENSITY
}));
offset += BLOCK_SPACING;
Controller.mousePressEvent.connect(function(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (toolBar.clicked(clickedOverlay) === button) {
if (!plankyStack.isFound()) {
plankyStack.rez();
return;
}
plankyStack.refresh();
} else if (toolBar.clicked(clickedOverlay) === cogButton) {
editMode = !editMode;
toolBar.selectTool(cogButton, editMode);
settingsWindow.webWindow.setVisible(editMode);
if(plankyStack.planks.length) {
plankyStack.refresh();
}
}
}
});
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
if (clickedOverlay === button) {
resetBlocks();
Script.update.connect(function() {
if (windowWidth !== Window.innerWidth) {
windowWidth = Window.innerWidth;
Overlays.editOverlay(button, {x: getButtonPosX()});
Overlays.editOverlay(cogButton, {x: getCogButtonPosX()});
}
}
})
Controller.mousePressEvent.connect(mousePressEvent);
function cleanup() {
Overlays.deleteOverlay(button);
Script.scriptEnding.connect(function() {
toolBar.cleanup();
if (ground) {
Entities.deleteEntity(ground);
}
pieces.forEach(function(piece) {
Entities.deleteEntity(piece);
});
pieces = [];
}
function onUpdate() {
if (windowWidth != Window.innerWidth) {
windowWidth = Window.innerWidth;
Overlays.editOverlay(button, {x: getButtonPosX()});
}
}
Script.update.connect(onUpdate)
Script.scriptEnding.connect(cleanup);
plankyStack.deRez();
});

View file

@ -0,0 +1,288 @@
//
// satellite.js
// games
//
// Created by Bridget Went 7/1/2015.
// Copyright 2015 High Fidelity, Inc.
//
// A game to bring a satellite model into orbit around an animated earth model .
// - Double click to create a new satellite
// - Click on the satellite, drag a vector arrow to specify initial velocity
// - Release mouse to launch the active satellite
// - Orbital movement is calculated using equations of gravitational physics
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('../utilities/tools/vector.js');
var URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/";
SatelliteGame = function() {
var MAX_RANGE = 50.0;
var Y_AXIS = {
x: 0,
y: 1,
z: 0
}
var LIFETIME = 6000;
var ERROR_THRESH = 20.0;
// Create the spinning earth model
var EARTH_SIZE = 20.0;
var CLOUDS_OFFSET = 0.5;
var SPIN = 0.1;
var ZONE_DIM = 100.0;
var LIGHT_INTENSITY = 1.5;
Earth = function(position, size) {
this.earth = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "earth.fbx",
position: position,
dimensions: {
x: size,
y: size,
z: size
},
rotation: Quat.angleAxis(180, {
x: 1,
y: 0,
z: 0
}),
angularVelocity: {
x: 0.00,
y: 0.5 * SPIN,
z: 0.00
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: 6000,
collisionsWillMove: false,
visible: true
});
this.clouds = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "clouds.fbx?i=2",
position: position,
dimensions: {
x: size + CLOUDS_OFFSET,
y: size + CLOUDS_OFFSET,
z: size + CLOUDS_OFFSET
},
angularVelocity: {
x: 0.00,
y: SPIN,
z: 0.00
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false,
visible: true
});
this.zone = Entities.addEntity({
type: "Zone",
position: position,
dimensions: {
x: ZONE_DIM,
y: ZONE_DIM,
z: ZONE_DIM
},
keyLightDirection: Vec3.normalize(Vec3.subtract(position, Camera.getPosition())),
keyLightIntensity: LIGHT_INTENSITY
});
this.cleanup = function() {
Entities.deleteEntity(this.clouds);
Entities.deleteEntity(this.earth);
Entities.deleteEntity(this.zone);
}
}
// Create earth model
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
var earth = new Earth(center, EARTH_SIZE);
var satellites = [];
var SATELLITE_SIZE = 2.0;
var launched = false;
var activeSatellite;
var PERIOD = 4.0;
var LARGE_BODY_MASS = 16000.0;
var SMALL_BODY_MASS = LARGE_BODY_MASS * 0.000000333;
Satellite = function(position, planetCenter) {
// The Satellite class
this.launched = false;
this.startPosition = position;
this.readyToLaunch = false;
this.radius = Vec3.length(Vec3.subtract(position, planetCenter));
this.satellite = Entities.addEntity({
type: "Model",
modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/satellite/satellite.fbx",
position: this.startPosition,
dimensions: {
x: SATELLITE_SIZE,
y: SATELLITE_SIZE,
z: SATELLITE_SIZE
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false,
});
this.getProperties = function() {
return Entities.getEntityProperties(this.satellite);
}
this.launch = function() {
var prop = Entities.getEntityProperties(this.satellite);
var between = Vec3.subtract(planetCenter, prop.position);
var radius = Vec3.length(between);
this.gravity = (4.0 * Math.PI * Math.PI * Math.pow(radius, 3.0)) / (LARGE_BODY_MASS * PERIOD * PERIOD);
var initialVelocity = Vec3.normalize(Vec3.cross(between, Y_AXIS));
initialVelocity = Vec3.multiply(Math.sqrt((this.gravity * LARGE_BODY_MASS) / radius), initialVelocity);
initialVelocity = Vec3.multiply(this.arrow.magnitude, initialVelocity);
initialVelocity = Vec3.multiply(Vec3.length(initialVelocity), this.arrow.direction);
Entities.editEntity(this.satellite, {
velocity: initialVelocity
});
this.launched = true;
};
this.update = function(deltaTime) {
var prop = Entities.getEntityProperties(this.satellite);
var between = Vec3.subtract(prop.position, planetCenter);
var radius = Vec3.length(between);
var acceleration = -(this.gravity * LARGE_BODY_MASS) * Math.pow(radius, (-2.0));
var speed = acceleration * deltaTime;
var vel = Vec3.multiply(speed, Vec3.normalize(between));
var newVelocity = Vec3.sum(prop.velocity, vel);
var newPos = Vec3.sum(prop.position, Vec3.multiply(newVelocity, deltaTime));
Entities.editEntity(this.satellite, {
velocity: newVelocity,
position: newPos
});
};
}
function mouseDoublePressEvent(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(pickRay.direction, distance);
var point = Vec3.sum(Camera.getPosition(), addVector);
// Create a new satellite
activeSatellite = new Satellite(point, center);
satellites.push(activeSatellite);
}
function mousePressEvent(event) {
if (!activeSatellite) {
return;
}
// Reset label
if (activeSatellite.arrow) {
activeSatellite.arrow.deleteLabel();
}
var statsPosition = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE * 0.4, Quat.getFront(Camera.getOrientation())));
var pickRay = Camera.computePickRay(event.x, event.y)
var rayPickResult = Entities.findRayIntersection(pickRay, true);
if (rayPickResult.entityID === activeSatellite.satellite) {
// Create a draggable vector arrow at satellite position
activeSatellite.arrow = new VectorArrow(distance, true, "INITIAL VELOCITY", statsPosition);
activeSatellite.arrow.onMousePressEvent(event);
activeSatellite.arrow.isDragging = true;
}
}
function mouseMoveEvent(event) {
if (!activeSatellite || !activeSatellite.arrow || !activeSatellite.arrow.isDragging) {
return;
}
activeSatellite.arrow.onMouseMoveEvent(event);
}
function mouseReleaseEvent(event) {
if (!activeSatellite || !activeSatellite.arrow || !activeSatellite.arrow.isDragging) {
return;
}
activeSatellite.arrow.onMouseReleaseEvent(event);
activeSatellite.launch();
activeSatellite.arrow.cleanup();
}
var counter = 0.0;
var CHECK_ENERGY_PERIOD = 500;
function update(deltaTime) {
if (!activeSatellite) {
return;
}
// Update all satellites
for (var i = 0; i < satellites.length; i++) {
if (!satellites[i].launched) {
continue;
}
satellites[i].update(deltaTime);
}
counter++;
if (counter % CHECK_ENERGY_PERIOD == 0) {
var prop = activeSatellite.getProperties();
var error = calcEnergyError(prop.position, Vec3.length(prop.velocity));
if (Math.abs(error) <= ERROR_THRESH) {
activeSatellite.arrow.editLabel("Nice job! The satellite has reached a stable orbit.");
} else {
activeSatellite.arrow.editLabel("Try again! The satellite is in an unstable orbit.");
}
}
}
this.endGame = function() {
for (var i = 0; i < satellites.length; i++) {
Entities.deleteEntity(satellites[i].satellite);
satellites[i].arrow.cleanup();
}
earth.cleanup();
}
function calcEnergyError(pos, vel) {
//Calculate total energy error for active satellite's orbital motion
var radius = activeSatellite.radius;
var gravity = (4.0 * Math.PI * Math.PI * Math.pow(radius, 3.0)) / (LARGE_BODY_MASS * PERIOD * PERIOD);
var initialVelocityCalculated = Math.sqrt((gravity * LARGE_BODY_MASS) / radius);
var totalEnergy = 0.5 * LARGE_BODY_MASS * Math.pow(initialVelocityCalculated, 2.0) - ((gravity * LARGE_BODY_MASS * SMALL_BODY_MASS) / radius);
var measuredEnergy = 0.5 * LARGE_BODY_MASS * Math.pow(vel, 2.0) - ((gravity * LARGE_BODY_MASS * SMALL_BODY_MASS) / Vec3.length(Vec3.subtract(pos, center)));
var error = ((measuredEnergy - totalEnergy) / totalEnergy) * 100;
return error;
}
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseDoublePressEvent.connect(mouseDoublePressEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Script.update.connect(update);
Script.scriptEnding.connect(this.endGame);
}

View file

@ -0,0 +1,669 @@
//
// solarsystem.js
// games
//
// Created by Bridget Went, 5/28/15.
// Copyright 2015 High Fidelity, Inc.
//
// The start to a project to build a virtual physics classroom to simulate the solar system, gravity, and orbital physics.
// - A sun with oribiting planets is created in front of the user
// - UI elements allow for adjusting the period, gravity, trails, and energy recalculations
// - Click "PAUSE" to pause the animation and show planet labels
// - In this mode, double-click a planet label to zoom in on that planet
// -Double-clicking on earth label initiates satellite orbiter game
// -Press "TAB" to toggle back to solar system view
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('../utilities/tools/cookies.js');
Script.include('games/satellite.js');
var BASE_URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/planets/";
var NUM_PLANETS = 8;
var trailsEnabled = true;
var energyConserved = true;
var planetView = false;
var earthView = false;
var satelliteGame;
var PANEL_X = 850;
var PANEL_Y = 600;
var BUTTON_SIZE = 20;
var PADDING = 20;
var DAMPING = 0.0;
var LIFETIME = 6000;
var ERROR_THRESH = 2.0;
var TIME_STEP = 70.0;
var MAX_POINTS_PER_LINE = 5;
var LINE_DIM = 10;
var LINE_WIDTH = 3.0;
var line;
var planetLines = [];
var trails = [];
var BOUNDS = 200;
// Alert user to move if they are too close to domain bounds
if (MyAvatar.position.x < BOUNDS || MyAvatar.position.x > TREE_SCALE - BOUNDS || MyAvatar.position.y < BOUNDS || MyAvatar.position.y > TREE_SCALE - BOUNDS || MyAvatar.position.z < BOUNDS || MyAvatar.position.z > TREE_SCALE - BOUNDS) {
Window.alert("Please move at least 200m away from domain bounds.");
return;
}
// Save intiial avatar and camera position
var startingPosition = MyAvatar.position;
var startFrame = Window.location.href;
// Place the sun
var MAX_RANGE = 80.0;
var SUN_SIZE = 8.0;
var center = Vec3.sum(startingPosition, Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var theSun = Entities.addEntity({
type: "Model",
modelURL: BASE_URL + "sun.fbx",
position: center,
dimensions: {
x: SUN_SIZE,
y: SUN_SIZE,
z: SUN_SIZE
},
angularDamping: DAMPING,
damping: DAMPING,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false
});
var planets = [];
var planet_properties = [];
// Reference values
var radius = 7.0;
var T_ref = 1.0;
var size = 1.0;
var M = 250.0;
var m = M * 0.000000333;
var G = (Math.pow(radius, 3.0) / Math.pow((T_ref / (2.0 * Math.PI)), 2.0)) / M;
var G_ref = G;
// Adjust size and distance as number of planets increases
var DELTA_RADIUS = 1.8;
var DELTA_SIZE = 0.2;
function initPlanets() {
for (var i = 0; i < NUM_PLANETS; ++i) {
var v0 = Math.sqrt((G * M) / radius);
var T = (2.0 * Math.PI) * Math.sqrt(Math.pow(radius, 3.0) / (G * M));
if (i == 0) {
var color = {
red: 255,
green: 255,
blue: 255
};
} else if (i == 1) {
var color = {
red: 255,
green: 160,
blue: 110
};
} else if (i == 2) {
var color = {
red: 10,
green: 150,
blue: 160
};
} else if (i == 3) {
var color = {
red: 180,
green: 70,
blue: 10
};
} else if (i == 4) {
var color = {
red: 250,
green: 140,
blue: 0
};
} else if (i == 5) {
var color = {
red: 235,
green: 215,
blue: 0
};
} else if (i == 6) {
var color = {
red: 135,
green: 205,
blue: 240
};
} else if (i == 7) {
var color = {
red: 30,
green: 140,
blue: 255
};
}
var prop = {
radius: radius,
position: Vec3.sum(center, {
x: radius,
y: 0.0,
z: 0.0
}),
lineColor: color,
period: T,
dimensions: size,
velocity: Vec3.multiply(v0, Vec3.normalize({
x: 0,
y: -0.2,
z: 0.9
}))
};
planet_properties.push(prop);
planets.push(Entities.addEntity({
type: "Model",
modelURL: BASE_URL + (i + 1) + ".fbx",
position: prop.position,
dimensions: {
x: prop.dimensions,
y: prop.dimensions,
z: prop.dimensions
},
velocity: prop.velocity,
angularDamping: DAMPING,
damping: DAMPING,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: true,
}));
radius *= DELTA_RADIUS;
size += DELTA_SIZE;
}
}
// Initialize planets
initPlanets();
var labels = [];
var labelLines = [];
var labelsShowing = false;
var LABEL_X = 8.0;
var LABEL_Y = 3.0;
var LABEL_Z = 1.0;
var LABEL_DIST = 8.0;
var TEXT_HEIGHT = 1.0;
var sunLabel;
function showLabels() {
labelsShowing = true;
for (var i = 0; i < NUM_PLANETS; i++) {
var properties = planet_properties[i];
var text;
if (i == 0) {
text = "Mercury";
} else if (i == 1) {
text = "Venus";
} else if (i == 2) {
text = "Earth";
} else if (i == 3) {
text = "Mars";
} else if (i == 4) {
text = "Jupiter";
} else if (i == 5) {
text = "Saturn";
} else if (i == 6) {
text = "Uranus";
} else if (i == 7) {
text = "Neptune";
}
text = text + " Speed: " + Vec3.length(properties.velocity).toFixed(2);
var labelPos = Vec3.sum(planet_properties[i].position, {
x: 0.0,
y: LABEL_DIST,
z: LABEL_DIST
});
var linePos = planet_properties[i].position;
labelLines.push(Entities.addEntity({
type: "Line",
position: linePos,
dimensions: {
x: 20,
y: 20,
z: 20
},
lineWidth: 3.0,
color: {
red: 255,
green: 255,
blue: 255
},
linePoints: [{
x: 0,
y: 0,
z: 0
}, computeLocalPoint(linePos, labelPos)]
}));
labels.push(Entities.addEntity({
type: "Text",
text: text,
lineHeight: TEXT_HEIGHT,
dimensions: {
x: LABEL_X,
y: LABEL_Y,
z: LABEL_Z
},
position: labelPos,
backgroundColor: {
red: 10,
green: 10,
blue: 10
},
textColor: {
red: 255,
green: 255,
blue: 255
},
faceCamera: true
}));
}
}
function hideLabels() {
labelsShowing = false;
Entities.deleteEntity(sunLabel);
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.deleteEntity(labelLines[i]);
Entities.deleteEntity(labels[i]);
}
labels = [];
labelLines = [];
}
var time = 0.0;
var elapsed;
var counter = 0;
var dt = 1.0 / TIME_STEP;
function update(deltaTime) {
if (paused) {
return;
}
deltaTime = dt;
time++;
for (var i = 0; i < NUM_PLANETS; ++i) {
var properties = planet_properties[i];
var between = Vec3.subtract(properties.position, center);
var speed = getAcceleration(properties.radius) * deltaTime;
var vel = Vec3.multiply(speed, Vec3.normalize(between));
// Update velocity and position
properties.velocity = Vec3.sum(properties.velocity, vel);
properties.position = Vec3.sum(properties.position, Vec3.multiply(properties.velocity, deltaTime));
Entities.editEntity(planets[i], {
velocity: properties.velocity,
position: properties.position
});
// Create new or update current trail
if (trailsEnabled) {
var lineStack = planetLines[i];
var point = properties.position;
var prop = Entities.getEntityProperties(lineStack[lineStack.length - 1]);
var linePos = prop.position;
trails[i].push(computeLocalPoint(linePos, point));
Entities.editEntity(lineStack[lineStack.length - 1], {
linePoints: trails[i]
});
if (trails[i].length === MAX_POINTS_PER_LINE) {
trails[i] = newLine(lineStack, point, properties.period, properties.lineColor);
}
}
// Measure total energy every 10 updates, recalibrate velocity if necessary
if (energyConserved) {
if (counter % 10 === 0) {
var error = calcEnergyError(planets[i], properties.radius, properties.v0, properties.velocity, properties.position);
if (Math.abs(error) >= ERROR_THRESH) {
var speed = adjustVelocity(planets[i], properties.position);
properties.velocity = Vec3.multiply(speed, Vec3.normalize(properties.velocity));
}
}
}
}
counter++;
if (time % TIME_STEP == 0) {
elapsed++;
}
}
function computeLocalPoint(linePos, worldPoint) {
var localPoint = Vec3.subtract(worldPoint, linePos);
return localPoint;
}
function getAcceleration(radius) {
var acc = -(G * M) * Math.pow(radius, (-2.0));
return acc;
}
// Create a new trail
function resetTrails(planetIndex) {
elapsed = 0.0;
var properties = planet_properties[planetIndex];
var trail = [];
var lineStack = [];
//add the first line to both the line entity stack and the trail
trails.push(newLine(lineStack, properties.position, properties.period, properties.lineColor));
planetLines.push(lineStack);
}
// Create a new line
function newLine(lineStack, point, period, color) {
if (elapsed < period) {
var line = Entities.addEntity({
position: point,
type: "Line",
color: color,
dimensions: {
x: LINE_DIM,
y: LINE_DIM,
z: LINE_DIM
},
lifetime: LIFETIME,
lineWidth: LINE_WIDTH
});
lineStack.push(line);
} else {
// Begin overwriting first lines after one full revolution (one period)
var firstLine = lineStack.shift();
Entities.editEntity(firstLine, {
position: point,
linePoints: [{
x: 0.0,
y: 0.0,
z: 0.0
}]
});
lineStack.push(firstLine);
}
var points = [];
points.push(computeLocalPoint(point, point));
return points;
}
// Measure energy error, recalculate velocity to return to initial net energy
var totalEnergy;
var measuredEnergy;
var measuredPE;
function calcEnergyError(planet, radius, v0, v, pos) {
totalEnergy = 0.5 * M * Math.pow(v0, 2.0) - ((G * M * m) / radius);
measuredEnergy = 0.5 * M * Math.pow(v, 2.0) - ((G * M * m) / Vec3.length(Vec3.subtract(center, pos)));
var error = ((measuredEnergy - totalEnergy) / totalEnergy) * 100;
return error;
}
function adjustVelocity(planet, pos) {
var measuredPE = -(G * M * m) / Vec3.length(Vec3.subtract(center, pos));
return Math.sqrt(2 * (totalEnergy - measuredPE) / M);
}
// Allow user to toggle pausing the model, switch to planet view
var pauseButton = Overlays.addOverlay("text", {
backgroundColor: {
red: 200,
green: 200,
blue: 255
},
text: "Pause",
x: PANEL_X,
y: PANEL_Y - 30,
width: 70,
height: 20,
alpha: 1.0,
backgroundAlpha: 0.5,
visible: true
});
var paused = false;
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if (clickedOverlay == pauseButton) {
paused = !paused;
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.editEntity(planets[i], {
velocity: {
x: 0.0,
y: 0.0,
z: 0.0
}
});
}
if (paused && !labelsShowing) {
Overlays.editOverlay(pauseButton, {
text: "Paused",
backgroundColor: {
red: 255,
green: 50,
blue: 50
}
});
showLabels();
}
if (paused == false && labelsShowing) {
Overlays.editOverlay(pauseButton, {
text: "Pause",
backgroundColor: {
red: 200,
green: 200,
blue: 255
}
});
hideLabels();
}
planetView = false;
}
}
function keyPressEvent(event) {
// Jump back to solar system view
if (event.text == "TAB" && planetView) {
if (earthView) {
satelliteGame.endGame();
earthView = false;
}
MyAvatar.position = startingPosition;
}
}
function mouseDoublePressEvent(event) {
if (earthView) {
return;
}
var pickRay = Camera.computePickRay(event.x, event.y)
var rayPickResult = Entities.findRayIntersection(pickRay, true);
for (var i = 0; i < NUM_PLANETS; ++i) {
if (rayPickResult.entityID === labels[i]) {
planetView = true;
if (i == 2) {
MyAvatar.position = Vec3.sum(center, {
x: 200,
y: 200,
z: 200
});
Camera.setPosition(Vec3.sum(center, {
x: 200,
y: 200,
z: 200
}));
earthView = true;
satelliteGame = new SatelliteGame();
} else {
MyAvatar.position = Vec3.sum({
x: 0.0,
y: 0.0,
z: 3.0
}, planet_properties[i].position);
Camera.lookAt(planet_properties[i].position);
}
break;
}
}
}
// Create UI panel
var panel = new Panel(PANEL_X, PANEL_Y);
var panelItems = [];
var g_multiplier = 1.0;
panelItems.push(panel.newSlider("Adjust Gravitational Force: ", 0.1, 5.0,
function(value) {
g_multiplier = value;
G = G_ref * g_multiplier;
},
function() {
return g_multiplier;
},
function(value) {
return value.toFixed(1) + "x";
}));
var period_multiplier = 1.0;
var last_alpha = period_multiplier;
panelItems.push(panel.newSlider("Adjust Orbital Period: ", 0.1, 3.0,
function(value) {
period_multiplier = value;
changePeriod(period_multiplier);
},
function() {
return period_multiplier;
},
function(value) {
return (value).toFixed(2) + "x";
}));
panelItems.push(panel.newCheckbox("Leave Trails: ",
function(value) {
trailsEnabled = value;
if (trailsEnabled) {
for (var i = 0; i < NUM_PLANETS; ++i) {
resetTrails(i);
}
//if trails are off and we've already created trails, remove existing trails
} else if (planetLines.length != 0) {
for (var i = 0; i < NUM_PLANETS; ++i) {
for (var j = 0; j < planetLines[i].length; ++j) {
Entities.deleteEntity(planetLines[i][j]);
}
planetLines[i] = [];
}
}
},
function() {
return trailsEnabled;
},
function(value) {
return value;
}));
panelItems.push(panel.newCheckbox("Energy Error Calculations: ",
function(value) {
energyConserved = value;
},
function() {
return energyConserved;
},
function(value) {
return value;
}));
// Update global G constant, period, poke velocity to new value
function changePeriod(alpha) {
var ratio = last_alpha / alpha;
G = Math.pow(ratio, 2.0) * G;
for (var i = 0; i < NUM_PLANETS; ++i) {
var properties = planet_properties[i];
properties.period = ratio * properties.period;
properties.velocity = Vec3.multiply(ratio, properties.velocity);
}
last_alpha = alpha;
}
// Clean up models, UI panels, lines, and button overlays
function scriptEnding() {
satelliteGame.endGame();
Entities.deleteEntity(theSun);
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.deleteEntity(planets[i]);
}
Menu.removeMenu("Developer > Scene");
panel.destroy();
Overlays.deleteOverlay(pauseButton);
var e = Entities.findEntities(MyAvatar.position, 16000);
for (i = 0; i < e.length; i++) {
var props = Entities.getEntityProperties(e[i]);
if (props.type === "Line" || props.type === "Text") {
Entities.deleteEntity(e[i]);
}
}
};
Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) {
return panel.mouseMoveEvent(event);
});
Controller.mousePressEvent.connect(function panelMousePressEvent(event) {
return panel.mousePressEvent(event);
});
Controller.mouseDoublePressEvent.connect(function panelMouseDoublePressEvent(event) {
return panel.mouseDoublePressEvent(event);
});
Controller.mouseReleaseEvent.connect(function(event) {
return panel.mouseReleaseEvent(event);
});
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseDoublePressEvent.connect(mouseDoublePressEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);

View file

@ -0,0 +1,42 @@
<!-- -->
<!-- #20622: JS Stream Player -->
<!-- ************************* -->
<!-- -->
<!-- Created by Kevin M. Thomas and Thoys 07/17/15. -->
<!-- Copyright 2015 High Fidelity, Inc. -->
<!-- kevintown.net -->
<!-- -->
<!-- JavaScript for the High Fidelity interface that creates a stream player with a UI and keyPressEvents for adding a stream URL in addition to play, stop and volume functionality. -->
<!-- -->
<!-- Distributed under the Apache License, Version 2.0. -->
<!-- See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -->
<!-- -->
<!DOCTYPE html>
<html lang="en">
<head>
<script src="http://code.jquery.com/jquery-1.11.3.min.js"></script>
<script type="text/javascript">
$(function(){
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
var myData = JSON.parse(data);
if (myData.action == "changeStream") {
$('body > audio').attr("src", myData.stream);
}
if (myData.action == "changeVolume") {
$('body > audio').prop("volume", myData.volume);
}
});
}
EventBridge.emitWebEvent("loaded");
});
</script>
</head>
<body>
<audio controls src="" controls autoplay></audio>
</body>
</html>

View file

@ -0,0 +1,140 @@
<!DOCTYPE html>
<html>
<head>
<link rel="stylesheet" type="text/css" href="style.css">
<script type="text/javascript" src="jquery-2.1.4.min.js"></script>
<script type="text/javascript">
var properties = [];
function sendWebEvent(data) {
EventBridge.emitWebEvent(JSON.stringify(data));
}
PropertyInput = function(key, label, value, attributes) {
this.key = key;
this.label = label;
this.value = value;
this.attributes = attributes;
var self = this;
this.construct = function() {
self.widget = $('<div>').addClass('property').append(self.createLabel()).append(self.createValueDiv());
$('#properties-list').append(self.widget);
};
this.createValue = self.__proto__.createValue;
this.getValue = self.__proto__.getValue;
this.createValueDiv = function() {
self.inputDiv = $('<div>').addClass('value').append(self.createValue());
return self.inputDiv;
};
this.addButton = function(id, buttonText) {
self.inputDiv.append($('<div>').append($('<input>').attr('type', 'button').attr('id', id).val(buttonText)));
};
this.createWidget = function() {
self.widget = $('<div>').addClass('property').append(self.createLabel()).append(self.inputDiv);
return self.widget;
};
this.createLabel = function() {
self.label = $('<div>').addClass('label').text(label);
return self.label;
};
this.setValue = function(value) {
self.input.val(value);
};
this.construct();
};
var valueChangeHandler = function() {
sendWebEvent({
action: 'value-change',
option: $(this).data('var-name'),
value: properties[$(this).data('var-name')].getValue()
});
};
NumberInput = function(key, label, value, attributes) {
PropertyInput.call(this, key, label, value, attributes);
};
NumberInput.prototype = Object.create(PropertyInput.prototype);
NumberInput.prototype.constructor = NumberInput;
NumberInput.prototype.createValue = function() {
this.input = $('<input>').data('var-name', this.key).attr('name', this.key).attr('type', 'number').val(this.value).on('change', valueChangeHandler);
if (this.attributes !== undefined) {
this.input.attr(this.attributes);
}
return this.input;
};
NumberInput.prototype.getValue = function() {
return parseFloat(this.input.val());
};
CoordinateInput = function(key, label, value, attributes) {
PropertyInput.call(this, key, label, value, attributes);
};
CoordinateInput.prototype = Object.create(PropertyInput.prototype);
CoordinateInput.prototype.constructor = CoordinateInput;
CoordinateInput.prototype.createValue = function() {
this.inputX = $('<input>').data('var-name', this.key).attr('name', this.key + '-x').attr('type', 'number').addClass('coord').val(this.value.x).on('change', valueChangeHandler);
this.inputY = $('<input>').data('var-name', this.key).attr('name', this.key + '-y').attr('type', 'number').addClass('coord').val(this.value.y).on('change', valueChangeHandler);
this.inputZ = $('<input>').data('var-name', this.key).attr('name', this.key + '-z').attr('type', 'number').addClass('coord').val(this.value.z).on('change', valueChangeHandler);
if (this.attributes !== undefined) {
this.inputX.attr(this.attributes);
this.inputY.attr(this.attributes);
this.inputZ.attr(this.attributes);
}
return [encapsulateInput(this.inputX, 'X'), encapsulateInput(this.inputY, 'Y'), encapsulateInput(this.inputZ, 'Z')];
};
CoordinateInput.prototype.getValue = function() {
return {x: parseFloat(this.inputX.val()), y: parseFloat(this.inputY.val()), z: parseFloat(this.inputZ.val())};
};
function encapsulateInput(input, label) {
return $('<div>').addClass('input-area').append(label + ' ').append(input);
}
function addHeader(label) {
$('#properties-list').append($('<div>').addClass('section-header').append($('<label>').text(label)));
}
$(function() {
addHeader('Stack Settings');
properties['numLayers'] = new NumberInput('numLayers', 'Layers', 17, {'min': 0, 'max': 300, 'step': 1});
properties['blocksPerLayer'] = new NumberInput('blocksPerLayer', 'Blocks per layer', 4, {'min': 1, 'max': 100, 'step': 1});
properties['blockSize'] = new CoordinateInput('blockSize', 'Block size', {x: 0.2, y: 0.1, z: 0.8}, {'min': 0.05, 'max': 20, 'step': 0.1});
properties['blockSpacing'] = new NumberInput('blockSpacing', 'Block spacing', properties['blockSize'].getValue().x / properties['blocksPerLayer'].getValue(), {'min': 0, 'max': 20, 'step': 0.01});
properties['blockSpacing'].addButton('btn-recalculate-spacing', 'Recalculate spacing');
$('#btn-recalculate-spacing').on('click', function() {
properties['blockSpacing'].setValue(properties['blockSize'].getValue().x / properties['blocksPerLayer'].getValue());
});
properties['blockHeightVariation'] = new NumberInput('blockHeightVariation', 'Block height variation (%)', 0.1, {'min': 0, 'max': 1, 'step': 0.01});
addHeader('Physics Settings');
properties['gravity'] = new CoordinateInput('gravity', 'Gravity', {x: 0, y: -2.8, z: 0}, {'step': 0.01});
properties['density'] = new NumberInput('density', 'Density', 4000, {'min': 0, 'max': 4000, 'step': 1});
properties['dampingFactor'] = new NumberInput('dampingFactor', 'Damping factor', 0.98, {'min': 0, 'max': 1, 'step': 0.01});
properties['angularDampingFactor'] = new NumberInput('angularDampingFactor', 'Angular damping factor', 0.8, {'min': 0, 'max': 1, 'step': 0.01});
properties['friction'] = new NumberInput('friction', 'Friction', 0.99, {'min': 0, 'max': 1, 'step': 0.01});
properties['restitution'] = new NumberInput('restitution', 'Restitution', 0.0, {'min': 0, 'max': 1, 'step': 0.01});
addHeader('Spawn Settings');
properties['spawnDistance'] = new NumberInput('spawnDistance', 'Spawn distance (meters)', 3);
properties['blockYawOffset'] = new NumberInput('blockYawOffset', 'Block yaw offset (degrees)', 45, {'min': 0, 'max': 360, 'step': 1});
properties['baseDimension'] = new CoordinateInput('baseDimension', 'Base dimension', {x: 7, y: 2, z: 7}, {'min': 0.5, 'max': 200, 'step': 0.1});
addHeader('Actions');
$('#properties-list')
.append($('<input>').val('factory reset').attr('type', 'button').on('click', function() { sendWebEvent({action: 'factory-reset'}); }))
.append($('<input>').val('save as default').attr('type', 'button').on('click', function() { sendWebEvent({action: 'save-default'}); }))
.append($('<input>').val('cleanup planky').attr('type', 'button').on('click', function() { sendWebEvent({action: 'cleanup'}); }));
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);
if (data.action == 'load') {
$.each(data.options, function(option, value) {
properties[option].setValue(value);
});
}
});
}
sendWebEvent({action: 'loaded'});
});
</script>
</head>
<body class="properties">
<div id="properties-list"></div>
</body>
</html>

331
examples/leaves.js Executable file
View file

@ -0,0 +1,331 @@
//
// Leaves.js
// examples
//
// Created by Bing Shearer on 14 Jul 2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var leafName = "scriptLeaf";
var leafSquall = function (properties) {
var // Properties
squallOrigin,
squallRadius,
leavesPerMinute = 60,
leafSize = {
x: 0.1,
y: 0.1,
z: 0.1
},
leafFallSpeed = 1, // m/s
leafLifetime = 60, // Seconds
leafSpinMax = 0, // Maximum angular velocity per axis; deg/s
debug = false, // Display origin circle; don't use running on Stack Manager
// Other
squallCircle,
SQUALL_CIRCLE_COLOR = {
red: 255,
green: 0,
blue: 0
},
SQUALL_CIRCLE_ALPHA = 0.5,
SQUALL_CIRCLE_ROTATION = Quat.fromPitchYawRollDegrees(90, 0, 0),
leafProperties,
leaf_MODEL_URL = "https://hifi-public.s3.amazonaws.com/ozan/support/forBing/palmLeaf.fbx",
leafTimer,
leaves = [], // HACK: Work around leaves not always getting velocities
leafVelocities = [], // HACK: Work around leaves not always getting velocities
DEGREES_TO_RADIANS = Math.PI / 180,
leafDeleteOnTearDown = true,
maxLeaves,
leafCount,
nearbyEntities,
complexMovement = false,
movementTime = 0,
maxSpinRadians = properties.leafSpinMax * DEGREES_TO_RADIANS,
windFactor,
leafDeleteOnGround = false,
floorHeight = null;
function processProperties() {
if (!properties.hasOwnProperty("origin")) {
print("ERROR: Leaf squall origin must be specified");
return;
}
squallOrigin = properties.origin;
if (!properties.hasOwnProperty("radius")) {
print("ERROR: Leaf squall radius must be specified");
return;
}
squallRadius = properties.radius;
if (properties.hasOwnProperty("leavesPerMinute")) {
leavesPerMinute = properties.leavesPerMinute;
}
if (properties.hasOwnProperty("leafSize")) {
leafSize = properties.leafSize;
}
if (properties.hasOwnProperty("leafFallSpeed")) {
leafFallSpeed = properties.leafFallSpeed;
}
if (properties.hasOwnProperty("leafLifetime")) {
leafLifetime = properties.leafLifetime;
}
if (properties.hasOwnProperty("leafSpinMax")) {
leafSpinMax = properties.leafSpinMax;
}
if (properties.hasOwnProperty("debug")) {
debug = properties.debug;
}
if (properties.hasOwnProperty("floorHeight")) {
floorHeight = properties.floorHeight;
}
if (properties.hasOwnProperty("maxLeaves")) {
maxLeaves = properties.maxLeaves;
}
if (properties.hasOwnProperty("complexMovement")) {
complexMovement = properties.complexMovement;
}
if (properties.hasOwnProperty("leafDeleteOnGround")) {
leafDeleteOnGround = properties.leafDeleteOnGround;
}
if (properties.hasOwnProperty("windFactor")) {
windFactor = properties.windFactor;
} else if (complexMovement == true){
print("ERROR: Wind Factor must be defined for complex movement")
}
leafProperties = {
type: "Model",
name: leafName,
modelURL: leaf_MODEL_URL,
lifetime: leafLifetime,
dimensions: leafSize,
velocity: {
x: 0,
y: -leafFallSpeed,
z: 0
},
damping: 0,
angularDamping: 0,
ignoreForCollisions: true
};
}
function createleaf() {
var angle,
radius,
offset,
leaf,
spin = {
x: 0,
y: 0,
z: 0
},
i;
// HACK: Work around leaves not always getting velocities set at creation
for (i = 0; i < leaves.length; i++) {
Entities.editEntity(leaves[i], leafVelocities[i]);
}
angle = Math.random() * leafSpinMax;
radius = Math.random() * squallRadius;
offset = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, angle, 0), {
x: 0,
y: -0.1,
z: radius
});
leafProperties.position = Vec3.sum(squallOrigin, offset);
if (properties.leafSpinMax > 0 && !complexMovement) {
spin = {
x: Math.random() * maxSpinRadians,
y: Math.random() * maxSpinRadians,
z: Math.random() * maxSpinRadians
};
leafProperties.angularVelocity = spin;
} else if (complexMovement) {
spin = {
x: 0,
y: 0,
z: 0
};
leafProperties.angularVelocity = spin
}
leaf = Entities.addEntity(leafProperties);
// HACK: Work around leaves not always getting velocities set at creation
leaves.push(leaf);
leafVelocities.push({
velocity: leafProperties.velocity,
angularVelocity: spin
});
if (leaves.length > 5) {
leaves.shift();
leafVelocities.shift();
}
}
function setUp() {
if (debug) {
squallCircle = Overlays.addOverlay("circle3d", {
size: {
x: 2 * squallRadius,
y: 2 * squallRadius
},
color: SQUALL_CIRCLE_COLOR,
alpha: SQUALL_CIRCLE_ALPHA,
solid: true,
visible: debug,
position: squallOrigin,
rotation: SQUALL_CIRCLE_ROTATION
});
}
leafTimer = Script.setInterval(function () {
if (leafCount <= maxLeaves - 1) {
createleaf()
}
}, 60000 / leavesPerMinute);
}
Script.setInterval(function () {
nearbyEntities = Entities.findEntities(squallOrigin, squallRadius);
newLeafMovement()
}, 100);
function newLeafMovement() { //new additions to leaf code. Operates at 10 Hz or every 100 ms
movementTime += 0.1;
var currentLeaf,
randomRotationSpeed = {
x: maxSpinRadians * Math.sin(movementTime),
y: maxSpinRadians * Math.random(),
z: maxSpinRadians * Math.sin(movementTime / 7)
};
for (var i = 0; i < nearbyEntities.length; i++) {
var entityProperties = Entities.getEntityProperties(nearbyEntities[i]);
var entityName = entityProperties.name;
if (leafName === entityName) {
currentLeaf = nearbyEntities[i];
var leafHeight = entityProperties.position.y;
if (complexMovement && leafHeight > floorHeight || complexMovement && floorHeight == null) { //actual new movement code;
var leafCurrentVel = entityProperties.velocity,
leafCurrentRot = entityProperties.rotation,
yVec = {
x: 0,
y: 1,
z: 0
},
leafYinWFVec = Vec3.multiplyQbyV(leafCurrentRot, yVec),
leafLocalHorVec = Vec3.cross(leafYinWFVec, yVec),
leafMostDownVec = Vec3.cross(leafYinWFVec, leafLocalHorVec),
leafDesiredVel = Vec3.multiply(leafMostDownVec, windFactor),
leafVelDelt = Vec3.subtract(leafDesiredVel, leafCurrentVel),
leafNewVel = Vec3.sum(leafCurrentVel, Vec3.multiply(leafVelDelt, windFactor));
Entities.editEntity(currentLeaf, {
angularVelocity: randomRotationSpeed,
velocity: leafNewVel
})
} else if (leafHeight <= floorHeight) {
if (!leafDeleteOnGround) {
Entities.editEntity(nearbyEntities[i], {
locked: false,
velocity: {
x: 0,
y: 0,
z: 0
},
angularVelocity: {
x: 0,
y: 0,
z: 0
}
})
} else {
Entity.deleteEntity(currentLeaf);
}
}
}
}
}
getLeafCount = Script.setInterval(function () {
leafCount = 0
for (var i = 0; i < nearbyEntities.length; i++) {
var entityName = Entities.getEntityProperties(nearbyEntities[i]).name;
//Stop Leaves at floorHeight
if (leafName === entityName) {
leafCount++;
if (i == nearbyEntities.length - 1) {
//print(leafCount);
}
}
}
}, 1000)
function tearDown() {
Script.clearInterval(leafTimer);
Overlays.deleteOverlay(squallCircle);
if (leafDeleteOnTearDown) {
for (var i = 0; i < nearbyEntities.length; i++) {
var entityName = Entities.getEntityProperties(nearbyEntities[i]).name;
if (leafName === entityName) {
//We have a match - delete this entity
Entities.editEntity(nearbyEntities[i], {
locked: false
});
Entities.deleteEntity(nearbyEntities[i]);
}
}
}
}
processProperties();
setUp();
Script.scriptEnding.connect(tearDown);
return {};
};
var leafSquall1 = new leafSquall({
origin: {
x: 3071.5,
y: 2170,
z: 6765.3
},
radius: 100,
leavesPerMinute: 30,
leafSize: {
x: 0.3,
y: 0.00,
z: 0.3
},
leafFallSpeed: 0.4,
leafLifetime: 100,
leafSpinMax: 30,
debug: false,
maxLeaves: 100,
leafDeleteOnTearDown: true,
complexMovement: true,
floorHeight: 2143.5,
windFactor: 0.5,
leafDeleteOnGround: false
});
// todo
//deal with depth issue

View file

@ -141,7 +141,7 @@ CameraManager = function() {
// Pick a point INITIAL_ZOOM_DISTANCE in front of the camera to use as a focal point
that.zoomDistance = INITIAL_ZOOM_DISTANCE;
that.targetZoomDistance = that.zoomDistance;
that.targetZoomDistance = that.zoomDistance + 3.0;
var focalPoint = Vec3.sum(Camera.getPosition(),
Vec3.multiply(that.zoomDistance, Quat.getFront(Camera.getOrientation())));
@ -150,6 +150,7 @@ CameraManager = function() {
var xzDist = Math.sqrt(dPos.x * dPos.x + dPos.z * dPos.z);
that.targetPitch = -Math.atan2(dPos.y, xzDist) * 180 / Math.PI;
that.targetPitch += (90 - that.targetPitch) / 3.0; // Swing camera "up" to look down at the focal point
that.targetYaw = Math.atan2(dPos.x, dPos.z) * 180 / Math.PI;
that.pitch = that.targetPitch;
that.yaw = that.targetYaw;

View file

@ -44,6 +44,7 @@
emitStrength: emitStrength,
emitDirection: emitDirection,
color: color,
lifespan: 1.0,
visible: true,
locked: false });
@ -67,13 +68,13 @@
var objs = [];
function Init() {
objs.push(new TestBox());
objs.push(new TestFx({ red: 255, blue: 0, green: 0 },
objs.push(new TestFx({ red: 255, green: 0, blue: 0 },
{ x: 0.5, y: 1.0, z: 0.0 },
100, 3, 1));
objs.push(new TestFx({ red: 0, blue: 255, green: 0 },
objs.push(new TestFx({ red: 0, green: 255, blue: 0 },
{ x: 0, y: 1, z: 0 },
1000, 5, 0.5));
objs.push(new TestFx({ red: 0, blue: 0, green: 255 },
objs.push(new TestFx({ red: 0, green: 0, blue: 255 },
{ x: -0.5, y: 1, z: 0 },
100, 3, 1));
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,186 @@
//
// vector.js
// examples
//
// Created by Bridget Went on 7/1/15.
// Copyright 2015 High Fidelity, Inc.
//
// A template for creating vector arrows using line entities. A VectorArrow object creates a
// draggable vector arrow where the user clicked at a specified distance from the viewer.
// The relative magnitude and direction of the vector may be displayed.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
var LINE_DIMENSIONS = 100;
var LIFETIME = 6000;
var RAD_TO_DEG = 180.0 / Math.PI;
var LINE_WIDTH = 4;
var ARROW_WIDTH = 6;
var line, linePosition;
var arrow1, arrow2;
var SCALE = 0.15;
var ANGLE = 150.0;
VectorArrow = function(distance, showStats, statsTitle, statsPosition) {
this.magnitude = 0;
this.direction = {x: 0, y: 0, z: 0};
this.showStats = showStats;
this.isDragging = false;
this.newLine = function(position) {
linePosition = position;
var points = [];
line = Entities.addEntity({
position: linePosition,
type: "Line",
color: {red: 255, green: 255, blue: 255},
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
lineWidth: LINE_WIDTH,
lifetime: LIFETIME,
linePoints: []
});
arrow1 = Entities.addEntity({
position: {x: 0, y: 0, z: 0},
type: "Line",
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
color: {red: 255, green: 255, blue: 255},
lineWidth: ARROW_WIDTH,
linePoints: [],
});
arrow2 = Entities.addEntity({
position: {x: 0, y: 0, z: 0},
type: "Line",
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
color: {red: 255, green: 255, blue: 255},
lineWidth: ARROW_WIDTH,
linePoints: [],
});
}
var STATS_DIMENSIONS = {
x: 4.0,
y: 1.5,
z: 0.1
};
var TEXT_HEIGHT = 0.3;
this.onMousePressEvent = function(event) {
this.newLine(computeWorldPoint(event));
if (this.showStats) {
this.label = Entities.addEntity({
type: "Text",
position: statsPosition,
dimensions: STATS_DIMENSIONS,
lineHeight: TEXT_HEIGHT,
faceCamera: true
});
}
this.isDragging = true;
}
this.onMouseMoveEvent = function(event) {
if (!this.isDragging) {
return;
}
var worldPoint = computeWorldPoint(event);
var localPoint = computeLocalPoint(event, linePosition);
points = [{x: 0, y: 0, z: 0}, localPoint];
Entities.editEntity(line, { linePoints: points });
var nextOffset = Vec3.multiply(SCALE, localPoint);
var normOffset = Vec3.normalize(localPoint);
var axis = Vec3.cross(normOffset, Quat.getFront(Camera.getOrientation()) );
axis = Vec3.cross(axis, normOffset);
var rotate1 = Quat.angleAxis(ANGLE, axis);
var rotate2 = Quat.angleAxis(-ANGLE, axis);
// Rotate arrow head to follow direction of the line
Entities.editEntity(arrow1, {
visible: true,
position: worldPoint,
linePoints: [{x: 0, y: 0, z: 0}, nextOffset],
rotation: rotate1
});
Entities.editEntity(arrow2, {
visible: true,
position: worldPoint,
linePoints: [{x: 0, y: 0, z: 0}, nextOffset],
rotation: rotate2
});
this.magnitude = Vec3.length(localPoint) * 0.1;
this.direction = Vec3.normalize(Vec3.subtract(worldPoint, linePosition));
if (this.showStats) {
this.editLabel(statsTitle + " Magnitude " + this.magnitude.toFixed(2) + ", Direction: " +
this.direction.x.toFixed(2) + ", " + this.direction.y.toFixed(2) + ", " + this.direction.z.toFixed(2));
}
}
this.onMouseReleaseEvent = function() {
this.isDragging = false;
}
this.cleanup = function() {
Entities.deleteEntity(line);
Entities.deleteEntity(arrow1);
Entities.deleteEntity(arrow2);
}
this.deleteLabel = function() {
Entities.deleteEntity(this.label);
}
this.editLabel = function(str) {
if(!this.showStats) {
return;
}
Entities.editEntity(this.label, {
text: str
});
}
function computeWorldPoint(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(pickRay.direction, distance);
return Vec3.sum(Camera.getPosition(), addVector);
}
function computeLocalPoint(event, linePosition) {
var localPoint = Vec3.subtract(computeWorldPoint(event), linePosition);
return localPoint;
}
}

View file

@ -0,0 +1,33 @@
//
// #20628: JS Stream Player Domain-Zone-Entity
// ********************************************
//
// Created by Kevin M. Thomas and Thoys 07/20/15.
// Copyright 2015 High Fidelity, Inc.
// kevintown.net
//
// JavaScript for the High Fidelity interface that is an entity script to be placed in a chosen entity inside a domain-zone.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Function which exists inside of an entity which triggers as a user approches it.
(function() {
const SCRIPT_NAME = "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.js";
function isScriptRunning(script) {
script = script.toLowerCase().trim();
var runningScripts = ScriptDiscoveryService.getRunning();
for (i in runningScripts) {
if (runningScripts[i].url.toLowerCase().trim() == script) {
return true;
}
}
return false;
};
if (!isScriptRunning(SCRIPT_NAME)) {
Script.load(SCRIPT_NAME);
}
})

View file

@ -0,0 +1,42 @@
<!-- -->
<!-- #20628: JS Stream Player Domain-Zone -->
<!-- ************************************* -->
<!-- -->
<!-- Created by Kevin M. Thomas and Thoys 07/20/15. -->
<!-- Copyright 2015 High Fidelity, Inc. -->
<!-- kevintown.net -->
<!-- -->
<!-- JavaScript for the High Fidelity interface that creates a stream player with a UI for playing a domain-zone specificed stream URL in addition to play, stop and volume functionality which is resident only in the domain-zone. -->
<!-- -->
<!-- Distributed under the Apache License, Version 2.0. -->
<!-- See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -->
<!-- -->
<!DOCTYPE html>
<html lang="en">
<head>
<script src="http://code.jquery.com/jquery-1.11.3.min.js"></script>
<script type="text/javascript">
$(function(){
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
var myData = JSON.parse(data);
if (myData.action == "changeStream") {
$('body > audio').attr("src", myData.stream);
}
if (myData.action == "changeVolume") {
$('body > audio').prop("volume", myData.volume);
}
});
}
EventBridge.emitWebEvent("loaded");
});
</script>
</head>
<body>
<audio controls src="" controls autoplay></audio>
</body>
</html>

View file

@ -0,0 +1,176 @@
//
// #20628: JS Stream Player Domain-Zone
// *************************************
//
// Created by Kevin M. Thomas, Thoys and Konstantin 07/24/15.
// Copyright 2015 High Fidelity, Inc.
// kevintown.net
//
// JavaScript for the High Fidelity interface that creates a stream player with a UI for playing a domain-zone specificed stream URL in addition to play, stop and volume functionality which is resident only in the domain-zone.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// Declare variables and set up new WebWindow.
var lastZone = "";
var volume = 0.5;
var stream = "";
var streamWindow = new WebWindow('Stream', "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.html", 0, 0, false);
var visible = false;
// Set up toggleStreamPlayButton overlay.
var toggleStreamPlayButton = Overlays.addOverlay("text", {
x: 122,
y: 310,
width: 38,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
visible: false,
text: " Play"
});
// Set up toggleStreamStopButton overlay.
var toggleStreamStopButton = Overlays.addOverlay("text", {
x: 166,
y: 310,
width: 40,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
visible: false,
text: " Stop"
});
// Set up increaseVolumeButton overlay.
var toggleIncreaseVolumeButton = Overlays.addOverlay("text", {
x: 211,
y: 310,
width: 18,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
visible: false,
text: " +"
});
// Set up decreaseVolumeButton overlay.
var toggleDecreaseVolumeButton = Overlays.addOverlay("text", {
x: 234,
y: 310,
width: 15,
height: 28,
backgroundColor: { red: 0, green: 0, blue: 0},
color: { red: 255, green: 255, blue: 0},
font: {size: 15},
topMargin: 8,
visible: false,
text: " -"
});
// Function to change JSON object stream.
function changeStream(stream) {
var streamJSON = {
action: "changeStream",
stream: stream
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
}
// Function to change JSON object volume.
function changeVolume(volume) {
var volumeJSON = {
action: "changeVolume",
volume: volume
}
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
}
// Function that adds mousePressEvent functionality to connect UI to enter stream URL, play and stop stream.
function mousePressEvent(event) {
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamPlayButton) {
changeStream(stream);
volume = 0.25;
changeVolume(volume);
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamStopButton) {
changeStream("");
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleIncreaseVolumeButton) {
volume += 0.25;
changeVolume(volume);
}
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleDecreaseVolumeButton) {
volume -= 0.25;
changeVolume(volume);
}
}
// Function checking bool if in proper zone.
function isOurZone(properties) {
return stream != "" && properties.type == "Zone";
}
// Function to toggle visibile the overlay.
function toggleVisible(newVisibility) {
if (newVisibility != visible) {
visible = newVisibility;
Overlays.editOverlay(toggleStreamPlayButton, {visible: visible});
Overlays.editOverlay(toggleStreamStopButton, {visible: visible});
Overlays.editOverlay(toggleIncreaseVolumeButton, {visible: visible});
Overlays.editOverlay(toggleDecreaseVolumeButton, {visible: visible});
}
}
// Function to check if avatar is in proper domain.
Window.domainChanged.connect(function() {
Script.stop();
});
// Function to check if avatar is within zone.
Entities.enterEntity.connect(function(entityID) {
print("Entered..." + JSON.stringify(entityID));
var properties = Entities.getEntityProperties(entityID);
stream = properties.userData;
if(isOurZone(properties))
{
lastZone = properties.name;
toggleVisible(true);
}
})
// Function to check if avatar is leaving zone.
Entities.leaveEntity.connect(function(entityID) {
print("Left..." + JSON.stringify(entityID));
var properties = Entities.getEntityProperties(entityID);
if (properties.name == lastZone && properties.type == "Zone") {
print("Leaving Zone!");
toggleVisible(false);
changeStream("");
}
})
// Function to delete overlays upon exit.
function onScriptEnding() {
Overlays.deleteOverlay(toggleStreamPlayButton);
Overlays.deleteOverlay(toggleStreamStopButton);
Overlays.deleteOverlay(toggleIncreaseVolumeButton);
Overlays.deleteOverlay(toggleDecreaseVolumeButton);
changeStream("");
streamWindow.deleteLater();
}
// Connect mouse and hide WebWindow.
Controller.mousePressEvent.connect(mousePressEvent);
streamWindow.setVisible(false);
// Call function upon ending script.
Script.scriptEnding.connect(onScriptEnding);

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK" "connexionClient")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -218,33 +218,13 @@ else (APPLE)
"${PROJECT_SOURCE_DIR}/resources"
$<TARGET_FILE_DIR:${TARGET_NAME}>/resources
)
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif ()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
# link target to external libraries
if (WIN32)
add_dependency_external_projects(glew)
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${GLEW_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLEW_LIBRARIES} wsock32.lib opengl32.lib Winmm.lib)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif ()
endif()
# target_link_libraries(${TARGET_NAME} wsock32.lib Winmm.lib)
target_link_libraries(${TARGET_NAME} wsock32.lib Winmm.lib)
else (WIN32)
# Nothing else required on linux apparently
endif()
endif (APPLE)

View file

@ -0,0 +1,79 @@
//
// 3DConnexion.cpp
// hifi
//
// Created by MarcelEdward Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef I3D_MOUSE_PARAMS_H
#define I3D_MOUSE_PARAMS_H
// Parameters for the 3D mouse based on the SDK from 3Dconnexion
class I3dMouseSensor {
public:
enum Speed {
SPEED_LOW = 0,
SPEED_MID,
SPEED_HIGH
};
virtual bool IsPanZoom() const = 0;
virtual bool IsRotate() const = 0;
virtual Speed GetSpeed() const = 0;
virtual void SetPanZoom(bool isPanZoom) = 0;
virtual void SetRotate(bool isRotate) = 0;
virtual void SetSpeed(Speed speed) = 0;
protected:
virtual ~I3dMouseSensor() {}
};
class I3dMouseNavigation {
public:
enum Pivot {
PIVOT_MANUAL = 0,
PIVOT_AUTO,
PIVOT_AUTO_OVERRIDE
};
enum Navigation {
NAVIGATION_OBJECT_MODE = 0,
NAVIGATION_CAMERA_MODE,
NAVIGATION_FLY_MODE,
NAVIGATION_WALK_MODE,
NAVIGATION_HELICOPTER_MODE
};
enum PivotVisibility {
PIVOT_HIDE = 0,
PIVOT_SHOW,
PIVOT_SHOW_MOVING
};
virtual Navigation GetNavigationMode() const = 0;
virtual Pivot GetPivotMode() const = 0;
virtual PivotVisibility GetPivotVisibility() const = 0;
virtual bool IsLockHorizon() const = 0;
virtual void SetLockHorizon(bool bOn) = 0;
virtual void SetNavigationMode(Navigation navigation) = 0;
virtual void SetPivotMode(Pivot pivot) = 0;
virtual void SetPivotVisibility(PivotVisibility visibility) = 0;
protected:
virtual ~I3dMouseNavigation(){}
};
class I3dMouseParam : public I3dMouseSensor, public I3dMouseNavigation {
public:
virtual ~I3dMouseParam() {}
};
#endif

View file

@ -0,0 +1,4 @@
The mac version does not require any files here. 3D connexion should be installed from
http://www.3dconnexion.eu/service/drivers.html
For windows a header file is required Inc/I3dMouseParams.h

View file

@ -115,6 +115,7 @@
#include "devices/MIDIManager.h"
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "devices/3Dconnexion.h"
#include "scripting/AccountScriptingInterface.h"
#include "scripting/AudioDeviceScriptingInterface.h"
@ -330,7 +331,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
_lastNackTime(usecTimestampNow()),
_lastSendDownstreamAudioStats(usecTimestampNow()),
_isVSyncOn(true),
_isThrottleFPSEnabled(false),
_isThrottleFPSEnabled(true),
_aboutToQuit(false),
_notifiedPacketVersionMismatchThisDomain(false),
_glWidget(new GLCanvas()),
@ -638,6 +639,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
applicationUpdater->checkForUpdate();
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
ConnexionClient::init();
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::DomainConnectionDenied, this, "handleDomainConnectionDeniedPacket");
}
@ -749,6 +753,7 @@ Application::~Application() {
Leapmotion::destroy();
RealSense::destroy();
ConnexionClient::destroy();
qInstallMessageHandler(NULL); // NOTE: Do this as late as possible so we continue to get our log messages
}
@ -766,8 +771,9 @@ void Application::initializeGL() {
}
#endif
// Where the gpuContext is created and where the TRUE Backend is created and assigned
_gpuContext = std::make_shared<gpu::Context>(new gpu::GLBackend());
// Where the gpuContext is initialized and where the TRUE Backend is created and assigned
gpu::Context::init<gpu::GLBackend>();
_gpuContext = std::make_shared<gpu::Context>();
initDisplay();
qCDebug(interfaceapp, "Initialized Display.");
@ -994,7 +1000,8 @@ void Application::paintGL() {
_compositor.displayOverlayTexture(&renderArgs);
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
PROFILE_RANGE(__FUNCTION__ "/bufferSwap");
_glWidget->swapBuffers();
@ -1006,6 +1013,13 @@ void Application::paintGL() {
_frameCount++;
_numFramesSinceLastResize++;
Stats::getInstance()->setRenderDetails(renderArgs._details);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::Batch batch;
batch.resetStages();
renderArgs._context->render(batch);
}
void Application::runTests() {
@ -1479,6 +1493,7 @@ void Application::focusOutEvent(QFocusEvent* event) {
_keyboardMouseDevice.focusOutEvent(event);
SixenseManager::getInstance().focusOutEvent();
SDL2Manager::getInstance()->focusOutEvent();
ConnexionData::getInstance().focusOutEvent();
// synthesize events for keys currently pressed, since we may not get their release events
foreach (int key, _keysPressed) {
@ -3263,6 +3278,9 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
renderContext._maxDrawnOverlay3DItems = sceneInterface->getEngineMaxDrawnOverlay3DItems();
renderContext._drawItemStatus = sceneInterface->doEngineDisplayItemStatus();
renderContext._drawHitEffect = sceneInterface->doEngineDisplayHitEffect();
renderContext._occlusionStatus = Menu::getInstance()->isOptionChecked(MenuOption::DebugAmbientOcclusion);
renderArgs->_shouldRender = LODManager::shouldRender;
@ -3378,7 +3396,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
// This was removed in commit 71e59cfa88c6563749594e25494102fe01db38e9 but could be further
// investigated in order to adapt the technique while fixing the head rendering issue,
// but the complexity of the hack suggests that a better approach
_mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
_mirrorCamera.setPosition(_myAvatar->getDefaultEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
}
_mirrorCamera.setProjection(glm::perspective(glm::radians(fov), aspect, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));
@ -3388,14 +3406,10 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
// set the bounds of rear mirror view
gpu::Vec4i viewport;
if (billboard) {
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
viewport = gpu::Vec4i(0, 0, region.width(), region.height());
} else {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
float ratio = (float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
int x = region.x() * ratio;
int y = region.y() * ratio;
int width = region.width() * ratio;
int height = region.height() * ratio;
viewport = gpu::Vec4i(0, 0, width, height);
@ -3599,24 +3613,15 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
int statsMessageLength = 0;
const QUuid& nodeUUID = sendingNode->getUUID();
OctreeSceneStats* octreeStats;
// now that we know the node ID, let's add these stats to the stats for that node...
_octreeSceneStatsLock.lockForWrite();
auto it = _octreeServerSceneStats.find(nodeUUID);
if (it != _octreeServerSceneStats.end()) {
octreeStats = &it->second;
statsMessageLength = octreeStats->unpackFromPacket(packet);
} else {
OctreeSceneStats temp;
statsMessageLength = temp.unpackFromPacket(packet);
octreeStats = &temp;
}
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
statsMessageLength = octreeStats.unpackFromPacket(packet);
_octreeSceneStatsLock.unlock();
VoxelPositionSize rootDetails;
voxelDetailsForCode(octreeStats->getJurisdictionRoot(), rootDetails);
// see if this is the first we've heard of this node...
NodeToJurisdictionMap* jurisdiction = NULL;
QString serverType;
@ -3628,6 +3633,9 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
jurisdiction->lockForRead();
if (jurisdiction->find(nodeUUID) == jurisdiction->end()) {
jurisdiction->unlock();
VoxelPositionSize rootDetails;
voxelDetailsForCode(octreeStats.getJurisdictionRoot(), rootDetails);
qCDebug(interfaceapp, "stats from new %s server... [%f, %f, %f, %f]",
qPrintable(serverType),
@ -3640,7 +3648,7 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
// but OctreeSceneStats thinks it's just returning a reference to its contents. So we need to make a copy of the
// details from the OctreeSceneStats to construct the JurisdictionMap
JurisdictionMap jurisdictionMap;
jurisdictionMap.copyContents(octreeStats->getJurisdictionRoot(), octreeStats->getJurisdictionEndNodes());
jurisdictionMap.copyContents(octreeStats.getJurisdictionRoot(), octreeStats.getJurisdictionEndNodes());
jurisdiction->lockForWrite();
(*jurisdiction)[nodeUUID] = jurisdictionMap;
jurisdiction->unlock();

View file

@ -13,7 +13,6 @@
#define hifi_GLCanvas_h
#include <QDebug>
#include <gpu/GPUConfig.h>
#include <QGLWidget>
#include <QTimer>

View file

@ -29,6 +29,7 @@
#include "devices/Faceshift.h"
#include "devices/RealSense.h"
#include "devices/SixenseManager.h"
#include "devices/3Dconnexion.h"
#include "MainWindow.h"
#include "scripting/MenuScriptingInterface.h"
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
@ -248,12 +249,8 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehavior()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ShiftHipsForIdleAnimations, 0, false,
avatar, SLOT(updateMotionBehavior()));
MenuWrapper* viewMenu = addMenu("View");
addActionToQMenuAndActionHash(viewMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
addCheckableActionToQMenuAndActionHash(viewMenu,
MenuOption::Fullscreen,
@ -332,7 +329,7 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere,
0, // QML Qt::SHIFT | Qt::Key_A,
true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DebugAmbientOcclusion);
MenuWrapper* ambientLightMenu = renderOptionsMenu->addMenu(MenuOption::RenderAmbientLight);
QActionGroup* ambientLightGroup = new QActionGroup(ambientLightMenu);
@ -370,7 +367,7 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true,
qApp, SLOT(setVSyncEnabled()));
#endif
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, false,
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true,
qApp, SLOT(setThrottleFPSEnabled()));
}
@ -449,6 +446,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,
&ConnexionClient::getInstance(),
SLOT(toggleConnexion(bool)));
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);
@ -489,6 +491,7 @@ Menu::Menu() {
#endif
MenuWrapper* networkMenu = developerMenu->addMenu("Network");
addActionToQMenuAndActionHash(networkMenu, MenuOption::ReloadContent, 0, qApp, SLOT(reloadResourceCaches()));
addCheckableActionToQMenuAndActionHash(networkMenu, MenuOption::DisableNackPackets, 0, false,
qApp->getEntityEditPacketSender(),
SLOT(toggleNackPackets()));

View file

@ -134,7 +134,6 @@ namespace MenuOption {
const QString AddressBar = "Show Address Bar";
const QString AlignForearmsWithWrists = "Align Forearms with Wrists";
const QString AlternateIK = "Alternate IK";
const QString AmbientOcclusion = "Ambient Occlusion";
const QString Animations = "Animations...";
const QString Atmosphere = "Atmosphere";
const QString Attachments = "Attachments...";
@ -161,10 +160,12 @@ namespace MenuOption {
const QString CenterPlayerInView = "Center Player In View";
const QString Chat = "Chat...";
const QString Collisions = "Collisions";
const QString Connexion = "Activate 3D Connexion Devices";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";
const QString CopyAddress = "Copy Address to Clipboard";
const QString CopyPath = "Copy Path to Clipboard";
const QString DebugAmbientOcclusion = "Debug Ambient Occlusion";
const QString DecreaseAvatarSize = "Decrease Avatar Size";
const QString DeleteBookmark = "Delete Bookmark...";
const QString DisableActivityLogger = "Disable Activity Logger";
@ -272,7 +273,6 @@ namespace MenuOption {
const QString SimpleShadows = "Simple";
const QString SixenseEnabled = "Enable Hydra Support";
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
const QString ShiftHipsForIdleAnimations = "Shift hips for idle animations";
const QString Stars = "Stars";
const QString Stats = "Stats";
const QString StopAllScripts = "Stop All Scripts";

View file

@ -106,7 +106,7 @@ bool ModelPackager::loadModel() {
}
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
QByteArray fbxContents = fbx.readAll();
_geometry = readFBX(fbxContents, QVariantHash());
_geometry = readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath());
// make sure we have some basic mappings
populateBasicMapping(_mapping, _fbxInfo.filePath(), _geometry);

View file

@ -14,8 +14,6 @@
#include <mutex>
#include <QElapsedTimer>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <NumericalConstants.h>
#include <DependencyManager.h>
@ -154,6 +152,7 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setDepthTest(gpu::State::DepthTest(false));
state->setAntialiasedLineEnable(true); // line smoothing also smooth points
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_starsPipeline.reset(gpu::Pipeline::create(program, state));
@ -207,8 +206,6 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
batch._glUniform1f(_timeSlot, secs);
geometryCache->renderUnitCube(batch);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
static const size_t VERTEX_STRIDE = sizeof(StarVertex);
size_t offset = offsetof(StarVertex, position);
gpu::BufferView posView(vertexBuffer, offset, vertexBuffer->getSize(), VERTEX_STRIDE, positionElement);
@ -217,14 +214,11 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
// Render the stars
batch.setPipeline(_starsPipeline);
batch._glEnable(GL_PROGRAM_POINT_SIZE_EXT);
batch._glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
batch._glEnable(GL_POINT_SMOOTH);
batch.setInputFormat(streamFormat);
batch.setInputBuffer(VERTICES_SLOT, posView);
batch.setInputBuffer(COLOR_SLOT, colView);
batch.draw(gpu::Primitive::POINTS, STARFIELD_NUM_STARS);
renderArgs->_context->render(batch);
}

View file

@ -443,36 +443,57 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
_skeletonModel.renderBoundingCollisionShapes(*renderArgs->_batch, 0.7f);
}
// Stack indicator spheres
float indicatorOffset = 0.0f;
if (!_displayName.isEmpty() && _displayNameAlpha != 0.0f) {
const float DISPLAY_NAME_INDICATOR_OFFSET = 0.22f;
indicatorOffset = DISPLAY_NAME_INDICATOR_OFFSET;
}
const float INDICATOR_RADIUS = 0.03f;
const float INDICATOR_INDICATOR_OFFSET = 3.0f * INDICATOR_RADIUS;
// If this is the avatar being looked at, render a little ball above their head
if (_isLookAtTarget && Menu::getInstance()->isOptionChecked(MenuOption::RenderFocusIndicator)) {
const float INDICATOR_OFFSET = 0.22f;
const float INDICATOR_RADIUS = 0.03f;
const glm::vec4 LOOK_AT_INDICATOR_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
glm::vec3 position = glm::vec3(_position.x, getDisplayNamePosition().y + indicatorOffset, _position.z);
glm::vec3 position = glm::vec3(_position.x, getDisplayNamePosition().y + INDICATOR_OFFSET, _position.z);
Transform transform;
transform.setTranslation(position);
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, INDICATOR_RADIUS,
15, 15, LOOK_AT_INDICATOR_COLOR);
indicatorOffset += INDICATOR_INDICATOR_OFFSET;
}
// If the avatar is looking at me, render an indication that they area
if (getHead()->getIsLookingAtMe() && Menu::getInstance()->isOptionChecked(MenuOption::ShowWhosLookingAtMe)) {
const glm::vec4 LOOKING_AT_ME_COLOR = { 0.8f, 0.65f, 0.0f, 0.1f };
glm::vec3 position = glm::vec3(_position.x, getDisplayNamePosition().y + indicatorOffset, _position.z);
Transform transform;
transform.setTranslation(position);
batch.setModelTransform(transform);
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch, INDICATOR_RADIUS,
15, 15, LOOKING_AT_ME_COLOR);
// If the avatar is looking at me, indicate that they are
if (getHead()->isLookingAtMe() && Menu::getInstance()->isOptionChecked(MenuOption::ShowWhosLookingAtMe)) {
const glm::vec3 LOOKING_AT_ME_COLOR = { 1.0f, 1.0f, 1.0f };
const float LOOKING_AT_ME_ALPHA_START = 0.8f;
const float LOOKING_AT_ME_DURATION = 0.5f; // seconds
quint64 now = usecTimestampNow();
float alpha = LOOKING_AT_ME_ALPHA_START
* (1.0f - ((float)(now - getHead()->getLookingAtMeStarted()))
/ (LOOKING_AT_ME_DURATION * (float)USECS_PER_SECOND));
if (alpha > 0.0f) {
QSharedPointer<NetworkGeometry> geometry = getHead()->getFaceModel().getGeometry();
if (geometry) {
const float DEFAULT_EYE_DIAMETER = 0.048f; // Typical human eye
const float RADIUS_INCREMENT = 0.005f;
Transform transform;
glm::vec3 position = getHead()->getLeftEyePosition();
transform.setTranslation(position);
batch.setModelTransform(transform);
float eyeDiameter = geometry->getFBXGeometry().leftEyeSize;
if (eyeDiameter == 0.0f) {
eyeDiameter = DEFAULT_EYE_DIAMETER;
}
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch,
eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT, 15, 15, glm::vec4(LOOKING_AT_ME_COLOR, alpha));
position = getHead()->getRightEyePosition();
transform.setTranslation(position);
batch.setModelTransform(transform);
eyeDiameter = geometry->getFBXGeometry().rightEyeSize;
if (eyeDiameter == 0.0f) {
eyeDiameter = DEFAULT_EYE_DIAMETER;
}
DependencyManager::get<DeferredLightingEffect>()->renderSolidSphere(batch,
eyeDiameter * _scale / 2.0f + RADIUS_INCREMENT, 15, 15, glm::vec4(LOOKING_AT_ME_COLOR, alpha));
}
}
}
// quick check before falling into the code below:

View file

@ -55,6 +55,8 @@ Head::Head(Avatar* owningAvatar) :
_deltaLeanForward(0.0f),
_isCameraMoving(false),
_isLookingAtMe(false),
_lookingAtMeStarted(0),
_wasLastLookingAtMe(0),
_faceModel(this),
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
@ -316,7 +318,7 @@ glm::quat Head::getFinalOrientationInLocalFrame() const {
}
glm::vec3 Head::getCorrectedLookAtPosition() {
if (_isLookingAtMe) {
if (isLookingAtMe()) {
return _correctedLookAtPosition;
} else {
return getLookAtPosition();
@ -324,10 +326,21 @@ glm::vec3 Head::getCorrectedLookAtPosition() {
}
void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
if (!isLookingAtMe()) {
_lookingAtMeStarted = usecTimestampNow();
}
_isLookingAtMe = true;
_wasLastLookingAtMe = usecTimestampNow();
_correctedLookAtPosition = correctedLookAtPosition;
}
bool Head::isLookingAtMe() {
// Allow for outages such as may be encountered during avatar movement
quint64 now = usecTimestampNow();
const quint64 LOOKING_AT_ME_GAP_ALLOWED = 1000000; // microseconds
return _isLookingAtMe || (now - _wasLastLookingAtMe) < LOOKING_AT_ME_GAP_ALLOWED;
}
glm::quat Head::getCameraOrientation() const {
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how

View file

@ -52,8 +52,9 @@ public:
void setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition);
glm::vec3 getCorrectedLookAtPosition();
void clearCorrectedLookAtPosition() { _isLookingAtMe = false; }
bool getIsLookingAtMe() { return _isLookingAtMe; }
bool isLookingAtMe();
quint64 getLookingAtMeStarted() { return _lookingAtMeStarted; }
float getScale() const { return _scale; }
glm::vec3 getPosition() const { return _position; }
const glm::vec3& getEyePosition() const { return _eyePosition; }
@ -139,6 +140,8 @@ private:
bool _isCameraMoving;
bool _isLookingAtMe;
quint64 _lookingAtMeStarted;
quint64 _wasLastLookingAtMe;
FaceModel _faceModel;
glm::vec3 _correctedLookAtPosition;

View file

@ -98,7 +98,6 @@ MyAvatar::MyAvatar() :
_lookAtTargetAvatar(),
_shouldRender(true),
_billboardValid(false),
_feetTouchFloor(true),
_eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
@ -166,9 +165,6 @@ void MyAvatar::update(float deltaTime) {
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
simulate(deltaTime);
if (_feetTouchFloor) {
_skeletonModel.updateStandingFoot();
}
}
void MyAvatar::simulate(float deltaTime) {
@ -1140,11 +1136,7 @@ glm::vec3 MyAvatar::getSkeletonPosition() const {
// The avatar is rotated PI about the yAxis, so we have to correct for it
// to get the skeleton offset contribution in the world-frame.
const glm::quat FLIP = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 skeletonOffset = _skeletonOffset;
if (_feetTouchFloor) {
skeletonOffset += _skeletonModel.getStandingOffset();
}
return _position + getOrientation() * FLIP * skeletonOffset;
return _position + getOrientation() * FLIP * _skeletonOffset;
}
return Avatar::getPosition();
}
@ -1290,7 +1282,6 @@ bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
void MyAvatar::updateOrientation(float deltaTime) {
// Smoothly rotate body with arrow keys
float driveLeft = _driveKeys[ROT_LEFT] - _driveKeys[ROT_RIGHT];
float targetSpeed = (_driveKeys[ROT_LEFT] - _driveKeys[ROT_RIGHT]) * YAW_SPEED;
if (targetSpeed != 0.0f) {
const float ROTATION_RAMP_TIMESCALE = 0.1f;
@ -1623,7 +1614,6 @@ void MyAvatar::updateMotionBehavior() {
_motionBehaviors &= ~AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED;
}
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
_feetTouchFloor = menu->isOptionChecked(MenuOption::ShiftHipsForIdleAnimations);
}
//Renders sixense laser pointers for UI selection with controllers

View file

@ -258,7 +258,6 @@ private:
QList<AnimationHandlePointer> _animationHandles;
bool _feetTouchFloor;
eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder;

View file

@ -24,12 +24,6 @@
#include "Util.h"
#include "InterfaceLogging.h"
enum StandingFootState {
LEFT_FOOT,
RIGHT_FOOT,
NO_FOOT
};
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
Model(parent),
_triangleFanID(DependencyManager::get<GeometryCache>()->allocateID()),
@ -37,9 +31,6 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent) :
_boundingShape(),
_boundingShapeLocalOffset(0.0f),
_defaultEyeModelPosition(glm::vec3(0.0f, 0.0f, 0.0f)),
_standingFoot(NO_FOOT),
_standingOffset(0.0f),
_clampedFootPosition(0.0f),
_headClipDistance(DEFAULT_NEAR_CLIP),
_isFirstPerson(false)
{
@ -573,65 +564,6 @@ glm::vec3 SkeletonModel::getDefaultEyeModelPosition() const {
return _owningAvatar->getScale() * _defaultEyeModelPosition;
}
/// \return offset of hips after foot animation
void SkeletonModel::updateStandingFoot() {
if (_geometry == NULL) {
return;
}
glm::vec3 offset(0.0f);
int leftFootIndex = _geometry->getFBXGeometry().leftToeJointIndex;
int rightFootIndex = _geometry->getFBXGeometry().rightToeJointIndex;
if (leftFootIndex != -1 && rightFootIndex != -1) {
glm::vec3 leftPosition, rightPosition;
getJointPosition(leftFootIndex, leftPosition);
getJointPosition(rightFootIndex, rightPosition);
int lowestFoot = (leftPosition.y < rightPosition.y) ? LEFT_FOOT : RIGHT_FOOT;
const float MIN_STEP_HEIGHT_THRESHOLD = 0.05f;
bool oneFoot = fabsf(leftPosition.y - rightPosition.y) > MIN_STEP_HEIGHT_THRESHOLD;
int currentFoot = oneFoot ? lowestFoot : _standingFoot;
if (_standingFoot == NO_FOOT) {
currentFoot = lowestFoot;
}
if (currentFoot != _standingFoot) {
if (_standingFoot == NO_FOOT) {
// pick the lowest foot
glm::vec3 lowestPosition = (currentFoot == LEFT_FOOT) ? leftPosition : rightPosition;
// we ignore zero length positions which can happen for a few frames until skeleton is fully loaded
if (glm::length(lowestPosition) > 0.0f) {
_standingFoot = currentFoot;
_clampedFootPosition = lowestPosition;
}
} else {
// swap feet
_standingFoot = currentFoot;
glm::vec3 nextPosition = leftPosition;
glm::vec3 prevPosition = rightPosition;
if (_standingFoot == RIGHT_FOOT) {
nextPosition = rightPosition;
prevPosition = leftPosition;
}
glm::vec3 oldOffset = _clampedFootPosition - prevPosition;
_clampedFootPosition = oldOffset + nextPosition;
offset = _clampedFootPosition - nextPosition;
}
} else {
glm::vec3 nextPosition = (_standingFoot == LEFT_FOOT) ? leftPosition : rightPosition;
offset = _clampedFootPosition - nextPosition;
}
// clamp the offset to not exceed some max distance
const float MAX_STEP_OFFSET = 1.0f;
float stepDistance = glm::length(offset);
if (stepDistance > MAX_STEP_OFFSET) {
offset *= (MAX_STEP_OFFSET / stepDistance);
}
}
_standingOffset = offset;
}
float DENSITY_OF_WATER = 1000.0f; // kg/m^3
float MIN_JOINT_MASS = 1.0f;
float VERY_BIG_MASS = 1.0e6f;

View file

@ -96,10 +96,6 @@ public:
/// \return whether or not the head was found.
glm::vec3 getDefaultEyeModelPosition() const;
/// skeleton offset caused by moving feet
void updateStandingFoot();
const glm::vec3& getStandingOffset() const { return _standingOffset; }
void computeBoundingShape(const FBXGeometry& geometry);
void renderBoundingCollisionShapes(gpu::Batch& batch, float alpha);
float getBoundingShapeRadius() const { return _boundingShape.getRadius(); }
@ -169,9 +165,6 @@ private:
glm::vec3 _boundingShapeLocalOffset;
glm::vec3 _defaultEyeModelPosition;
int _standingFoot;
glm::vec3 _standingOffset;
glm::vec3 _clampedFootPosition;
float _headClipDistance; // Near clip distance to use if no separate head model

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,244 @@
// 3DConnexion.h
// hifi
//
// Created by Marcel Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ConnexionClient_h
#define hifi_ConnexionClient_h
#include <qobject.h>
#include <qlibrary.h>
#include "InterfaceLogging.h"
#include "Application.h"
#include "ui/UserInputMapper.h"
#ifndef HAVE_CONNEXIONCLIENT
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static void init() {};
static void destroy() {};
static bool Is3dmouseAttached() { return false; };
public slots:
void toggleConnexion(bool shouldEnable) {};
};
#endif // NOT_HAVE_CONNEXIONCLIENT
#ifdef HAVE_CONNEXIONCLIENT
// the windows connexion rawinput
#ifdef _WIN32
#include "I3dMouseParams.h"
#include <QAbstractNativeEventFilter>
#include <QAbstractEventDispatcher>
#include <Winsock2.h>
#include <windows.h>
// windows rawinput parameters
class MouseParameters : public I3dMouseParam {
public:
MouseParameters();
~MouseParameters();
// I3dmouseSensor interface
bool IsPanZoom() const;
bool IsRotate() const;
Speed GetSpeed() const;
void SetPanZoom(bool isPanZoom);
void SetRotate(bool isRotate);
void SetSpeed(Speed speed);
// I3dmouseNavigation interface
Navigation GetNavigationMode() const;
Pivot GetPivotMode() const;
PivotVisibility GetPivotVisibility() const;
bool IsLockHorizon() const;
void SetLockHorizon(bool bOn);
void SetNavigationMode(Navigation navigation);
void SetPivotMode(Pivot pivot);
void SetPivotVisibility(PivotVisibility visibility);
static bool Is3dmouseAttached();
private:
MouseParameters(const MouseParameters&);
const MouseParameters& operator = (const MouseParameters&);
Navigation fNavigation;
Pivot fPivot;
PivotVisibility fPivotVisibility;
bool fIsLockHorizon;
bool fIsPanZoom;
bool fIsRotate;
Speed fSpeed;
};
class ConnexionClient : public QObject, public QAbstractNativeEventFilter {
Q_OBJECT
public:
ConnexionClient();
~ConnexionClient();
static ConnexionClient& getInstance();
ConnexionClient* client;
static void init();
static void destroy();
static bool Is3dmouseAttached();
I3dMouseParam& MouseParams();
const I3dMouseParam& MouseParams() const;
virtual void Move3d(HANDLE device, std::vector<float>& motionData);
virtual void On3dmouseKeyDown(HANDLE device, int virtualKeyCode);
virtual void On3dmouseKeyUp(HANDLE device, int virtualKeyCode);
virtual bool nativeEventFilter(const QByteArray& eventType, void* message, long* result) Q_DECL_OVERRIDE
{
MSG* msg = static_cast< MSG * >(message);
return ConnexionClient::RawInputEventFilter(message, result);
}
public slots:
void toggleConnexion(bool shouldEnable);
signals:
void Move3d(std::vector<float>& motionData);
void On3dmouseKeyDown(int virtualKeyCode);
void On3dmouseKeyUp(int virtualKeyCode);
private:
bool InitializeRawInput(HWND hwndTarget);
static bool RawInputEventFilter(void* msg, long* result);
void OnRawInput(UINT nInputCode, HRAWINPUT hRawInput);
UINT GetRawInputBuffer(PRAWINPUT pData, PUINT pcbSize, UINT cbSizeHeader);
bool TranslateRawInputData(UINT nInputCode, PRAWINPUT pRawInput);
void On3dmouseInput();
class TInputData {
public:
TInputData() : fAxes(6) {}
bool IsZero() {
return (0.0f == fAxes[0] && 0.0f == fAxes[1] && 0.0f == fAxes[2] &&
0.0f == fAxes[3] && 0.0f == fAxes[4] && 0.0f == fAxes[5]);
}
int fTimeToLive; // For telling if the device was unplugged while sending data
bool fIsDirty;
std::vector<float> fAxes;
};
HWND fWindow;
// Data cache to handle multiple rawinput devices
std::map< HANDLE, TInputData> fDevice2Data;
std::map< HANDLE, unsigned long> fDevice2Keystate;
// 3dmouse parameters
MouseParameters f3dMouseParams; // Rotate, Pan Zoom etc.
// use to calculate distance traveled since last event
DWORD fLast3dmouseInputTime;
};
// the osx connexion api
#else
#include <glm/glm.hpp>
#include "3DconnexionClient/ConnexionClientAPI.h"
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static bool Is3dmouseAttached();
static void init();
static void destroy();
public slots:
void toggleConnexion(bool shouldEnable);
};
#endif // __APPLE__
#endif // HAVE_CONNEXIONCLIENT
// connnects to the userinputmapper
class ConnexionData : public QObject {
Q_OBJECT
public:
static ConnexionData& getInstance();
ConnexionData();
enum PositionChannel {
POSITION_AXIS_X_POS = 1,
POSITION_AXIS_X_NEG = 2,
POSITION_AXIS_Y_POS = 3,
POSITION_AXIS_Y_NEG = 4,
POSITION_AXIS_Z_POS = 5,
POSITION_AXIS_Z_NEG = 6,
ROTATION_AXIS_X_POS = 7,
ROTATION_AXIS_X_NEG = 8,
ROTATION_AXIS_Y_POS = 9,
ROTATION_AXIS_Y_NEG = 10,
ROTATION_AXIS_Z_POS = 11,
ROTATION_AXIS_Z_NEG = 12
};
enum ButtonChannel {
BUTTON_1 = 1,
BUTTON_2 = 2,
BUTTON_3 = 3
};
typedef std::unordered_set<int> ButtonPressedMap;
typedef std::map<int, float> AxisStateMap;
float getButton(int channel) const;
float getAxis(int channel) const;
UserInputMapper::Input makeInput(ConnexionData::PositionChannel axis);
UserInputMapper::Input makeInput(ConnexionData::ButtonChannel button);
void registerToUserInputMapper(UserInputMapper& mapper);
void assignDefaultInputMapping(UserInputMapper& mapper);
void update();
void focusOutEvent();
int getDeviceID() { return _deviceID; }
void setDeviceID(int deviceID) { _deviceID = deviceID; }
QString _name;
glm::vec3 cc_position;
glm::vec3 cc_rotation;
int clientId;
void setButton(int lastButtonState);
void handleAxisEvent();
protected:
int _deviceID = 0;
ButtonPressedMap _buttonPressedMap;
AxisStateMap _axisStateMap;
};
#endif // defined(hifi_ConnexionClient_h)

View file

@ -91,13 +91,12 @@ private:
int _leftBlinkIndex;
int _rightBlinkIndex;
int _leftEyeOpenIndex;
int _rightEyeOpenIndex;
int _leftEyeDownIndex;
int _rightEyeDownIndex;
int _leftEyeInIndex;
int _rightEyeInIndex;
int _leftEyeOpenIndex;
int _rightEyeOpenIndex;
int _browDownLeftIndex;
int _browDownRightIndex;

View file

@ -11,16 +11,16 @@
//
#include "OculusManager.h"
#include <gpu/GPUConfig.h>
#include <glm/glm.hpp>
#include <QDesktopWidget>
#include <QGuiApplication>
#include <gpu/GPUConfig.h>
#include <QScreen>
#include <CursorManager.h>
#include <QOpenGLTimerQuery>
#include <QGLWidget>
#include <CursorManager.h>
#include <glm/glm.hpp>
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>

View file

@ -9,13 +9,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TV3DManager.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "gpu/GLBackend.h"
#include "Application.h"
#include <RenderArgs.h>
#include "TV3DManager.h"
#include "Application.h"
#include "Menu.h"
int TV3DManager::_screenWidth = 1;
@ -63,6 +64,7 @@ void TV3DManager::setFrustum(Camera& whichCamera) {
}
void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int screenHeight) {
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
if (screenHeight == 0) {
screenHeight = 1; // prevent divide by 0
}
@ -72,6 +74,7 @@ void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int scre
setFrustum(whichCamera);
glViewport (0, 0, _screenWidth, _screenHeight); // sets drawing viewport
#endif
}
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {

View file

@ -17,6 +17,7 @@
#include <glm/glm.hpp>
class Camera;
class RenderArgs;
struct eyeFrustum {
double left;

View file

@ -57,7 +57,7 @@ void OctreePacketProcessor::processPacket(QSharedPointer<NLPacket> packet, Share
if (piggybackBytes) {
// construct a new packet from the piggybacked one
std::unique_ptr<char> buffer = std::unique_ptr<char>(new char[piggybackBytes]);
auto buffer = std::unique_ptr<char[]>(new char[piggybackBytes]);
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggybackBytes);
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggybackBytes, packet->getSenderSockAddr());

View file

@ -57,7 +57,7 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
} else {
auto dialogWidget = new QDialog(Application::getInstance()->getWindow(), Qt::Window);
dialogWidget->setWindowTitle(title);
dialogWidget->setMinimumSize(width, height);
dialogWidget->resize(width, height);
connect(dialogWidget, &QDialog::finished, this, &WebWindowClass::hasClosed);
auto layout = new QVBoxLayout(dialogWidget);

View file

@ -495,7 +495,7 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + PI_OVER_TWO);
float xAngle = (atan2f(direction.z, direction.x) + PI_OVER_TWO);
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
// Get the pixel range over which the xAngle and yAngle are scaled

View file

@ -117,7 +117,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
batch.setProjectionTransform(mat4());
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glBindTexture(GL_TEXTURE_2D, _uiTexture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture);
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}

View file

@ -125,8 +125,10 @@ void AudioStatsDialog::renderStats() {
audioInputBufferLatency = (double)_stats->getAudioInputMsecsReadStats().getWindowAverage();
inputRingBufferLatency = (double)_stats->getInputRungBufferMsecsAvailableStats().getWindowAverage();
networkRoundtripLatency = (double) audioMixerNodePointer->getPingMs();
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = (double)downstreamAudioStreamStats._framesAvailableAverage * AudioConstants::NETWORK_FRAME_MSECS;
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._framesAvailableAverage *
(double)AudioConstants::NETWORK_FRAME_MSECS;
outputRingBufferLatency = (double)downstreamAudioStreamStats._framesAvailableAverage *
(double)AudioConstants::NETWORK_FRAME_MSECS;
audioOutputBufferLatency = (double)_stats->getAudioOutputMsecsUnplayedStats().getWindowAverage();
}

View file

@ -36,7 +36,6 @@ void Cube3DOverlay::render(RenderArgs* args) {
// TODO: handle registration point??
glm::vec3 position = getPosition();
glm::vec3 center = getCenter();
glm::vec3 dimensions = getDimensions();
glm::quat rotation = getRotation();

View file

@ -62,7 +62,7 @@ void AnimationReader::run() {
QSharedPointer<Resource> animation = _animation.toStrongRef();
if (!animation.isNull()) {
QMetaObject::invokeMethod(animation.data(), "setGeometry",
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash())));
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash(), _reply->property("url").toString())));
}
_reply->deleteLater();
}

View file

@ -1,5 +1,7 @@
set(TARGET_NAME entities-renderer)
AUTOSCRIBE_SHADER_LIB(gpu model render)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
setup_hifi_library(Widgets OpenGL Network Script)

View file

@ -14,22 +14,141 @@
#include <DeferredLightingEffect.h>
#include <PerfStat.h>
#include <GeometryCache.h>
#include <AbstractViewStateInterface.h>
#include "EntitiesRendererLogging.h"
#include "RenderableParticleEffectEntityItem.h"
#include "untextured_particle_vert.h"
#include "untextured_particle_frag.h"
#include "textured_particle_vert.h"
#include "textured_particle_frag.h"
class ParticlePayload {
public:
typedef render::Payload<ParticlePayload> Payload;
typedef Payload::DataPointer Pointer;
typedef RenderableParticleEffectEntityItem::Vertex Vertex;
ParticlePayload() : _vertexFormat(std::make_shared<gpu::Stream::Format>()),
_vertexBuffer(std::make_shared<gpu::Buffer>()),
_indexBuffer(std::make_shared<gpu::Buffer>()) {
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element::VEC3F_XYZ, 0);
_vertexFormat->setAttribute(gpu::Stream::TEXCOORD, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV), offsetof(Vertex, uv));
_vertexFormat->setAttribute(gpu::Stream::COLOR, 0, gpu::Element::COLOR_RGBA_32, offsetof(Vertex, rgba));
}
void setPipeline(gpu::PipelinePointer pipeline) { _pipeline = pipeline; }
const gpu::PipelinePointer& getPipeline() const { return _pipeline; }
const Transform& getModelTransform() const { return _modelTransform; }
void setModelTransform(const Transform& modelTransform) { _modelTransform = modelTransform; }
const AABox& getBound() const { return _bound; }
void setBound(AABox& bound) { _bound = bound; }
gpu::BufferPointer getVertexBuffer() { return _vertexBuffer; }
const gpu::BufferPointer& getVertexBuffer() const { return _vertexBuffer; }
gpu::BufferPointer getIndexBuffer() { return _indexBuffer; }
const gpu::BufferPointer& getIndexBuffer() const { return _indexBuffer; }
void setTexture(gpu::TexturePointer texture) { _texture = texture; }
const gpu::TexturePointer& getTexture() const { return _texture; }
bool getVisibleFlag() const { return _visibleFlag; }
void setVisibleFlag(bool visibleFlag) { _visibleFlag = visibleFlag; }
void render(RenderArgs* args) const {
assert(_pipeline);
gpu::Batch& batch = *args->_batch;
batch.setPipeline(_pipeline);
if (_texture) {
batch.setResourceTexture(0, _texture);
}
batch.setModelTransform(_modelTransform);
batch.setInputFormat(_vertexFormat);
batch.setInputBuffer(0, _vertexBuffer, 0, sizeof(Vertex));
batch.setIndexBuffer(gpu::UINT16, _indexBuffer, 0);
auto numIndices = _indexBuffer->getSize() / sizeof(uint16_t);
batch.drawIndexed(gpu::TRIANGLES, numIndices);
}
protected:
Transform _modelTransform;
AABox _bound;
gpu::PipelinePointer _pipeline;
gpu::Stream::FormatPointer _vertexFormat;
gpu::BufferPointer _vertexBuffer;
gpu::BufferPointer _indexBuffer;
gpu::TexturePointer _texture;
bool _visibleFlag = true;
};
namespace render {
template <>
const ItemKey payloadGetKey(const ParticlePayload::Pointer& payload) {
if (payload->getVisibleFlag()) {
return ItemKey::Builder::transparentShape();
} else {
return ItemKey::Builder().withInvisible().build();
}
}
template <>
const Item::Bound payloadGetBound(const ParticlePayload::Pointer& payload) {
return payload->getBound();
}
template <>
void payloadRender(const ParticlePayload::Pointer& payload, RenderArgs* args) {
payload->render(args);
}
}
gpu::PipelinePointer RenderableParticleEffectEntityItem::_texturedPipeline;
gpu::PipelinePointer RenderableParticleEffectEntityItem::_untexturedPipeline;
EntityItemPointer RenderableParticleEffectEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
return std::make_shared<RenderableParticleEffectEntityItem>(entityID, properties);
}
RenderableParticleEffectEntityItem::RenderableParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
ParticleEffectEntityItem(entityItemID, properties) {
_cacheID = DependencyManager::get<GeometryCache>()->allocateID();
// lazy creation of particle system pipeline
if (!_untexturedPipeline && !_texturedPipeline) {
createPipelines();
}
}
void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
Q_ASSERT(getType() == EntityTypes::ParticleEffect);
PerformanceTimer perfTimer("RenderableParticleEffectEntityItem::render");
bool RenderableParticleEffectEntityItem::addToScene(EntityItemPointer self,
render::ScenePointer scene,
render::PendingChanges& pendingChanges) {
auto particlePayload = std::shared_ptr<ParticlePayload>(new ParticlePayload());
particlePayload->setPipeline(_untexturedPipeline);
_renderItemId = scene->allocateID();
auto renderData = ParticlePayload::Pointer(particlePayload);
auto renderPayload = render::PayloadPointer(new ParticlePayload::Payload(renderData));
pendingChanges.resetItem(_renderItemId, renderPayload);
_scene = scene;
return true;
}
void RenderableParticleEffectEntityItem::removeFromScene(EntityItemPointer self,
render::ScenePointer scene,
render::PendingChanges& pendingChanges) {
pendingChanges.removeItem(_renderItemId);
_scene = nullptr;
};
void RenderableParticleEffectEntityItem::update(const quint64& now) {
ParticleEffectEntityItem::update(now);
if (_texturesChangedFlag) {
if (_textures.isEmpty()) {
@ -42,71 +161,155 @@ void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
_texturesChangedFlag = false;
}
bool textured = _texture && _texture->isLoaded();
updateQuads(args, textured);
Q_ASSERT(args->_batch);
gpu::Batch& batch = *args->_batch;
if (textured) {
batch.setResourceTexture(0, _texture->getGPUTexture());
}
batch.setModelTransform(getTransformToCenter());
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, textured);
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::QUADS, _cacheID);
};
updateRenderItem();
}
static glm::vec3 zSortAxis;
static bool zSort(const glm::vec3& rhs, const glm::vec3& lhs) {
return glm::dot(rhs, ::zSortAxis) > glm::dot(lhs, ::zSortAxis);
}
void RenderableParticleEffectEntityItem::updateQuads(RenderArgs* args, bool textured) {
float particleRadius = getParticleRadius();
glm::vec4 particleColor(toGlm(getXColor()), getLocalRenderAlpha());
glm::vec3 upOffset = args->_viewFrustum->getUp() * particleRadius;
glm::vec3 rightOffset = args->_viewFrustum->getRight() * particleRadius;
QVector<glm::vec3> vertices;
QVector<glm::vec3> positions;
QVector<glm::vec2> textureCoords;
vertices.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
if (textured) {
textureCoords.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
}
positions.reserve(getLivingParticleCount());
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
positions.append(_particlePositions[i]);
if (textured) {
textureCoords.append(glm::vec2(0, 1));
textureCoords.append(glm::vec2(1, 1));
textureCoords.append(glm::vec2(1, 0));
textureCoords.append(glm::vec2(0, 0));
}
}
// sort particles back to front
::zSortAxis = args->_viewFrustum->getDirection();
qSort(positions.begin(), positions.end(), zSort);
for (int i = 0; i < positions.size(); i++) {
glm::vec3 pos = (textured) ? positions[i] : _particlePositions[i];
// generate corners of quad aligned to face the camera.
vertices.append(pos + rightOffset + upOffset);
vertices.append(pos - rightOffset + upOffset);
vertices.append(pos - rightOffset - upOffset);
vertices.append(pos + rightOffset - upOffset);
}
if (textured) {
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, textureCoords, particleColor);
} else {
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, particleColor);
}
uint32_t toRGBA(uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
return ((uint32_t)r | (uint32_t)g << 8 | (uint32_t)b << 16 | (uint32_t)a << 24);
}
void RenderableParticleEffectEntityItem::updateRenderItem() {
if (!_scene) {
return;
}
float particleRadius = getParticleRadius();
auto xcolor = getXColor();
auto alpha = (uint8_t)(glm::clamp(getLocalRenderAlpha(), 0.0f, 1.0f) * 255.0f);
auto rgba = toRGBA(xcolor.red, xcolor.green, xcolor.blue, alpha);
// make a copy of each particle position
std::vector<glm::vec3> positions;
positions.reserve(getLivingParticleCount());
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
positions.push_back(_particlePositions[i]);
}
// sort particles back to front
// NOTE: this is view frustum might be one frame out of date.
auto frustum = AbstractViewStateInterface::instance()->getCurrentViewFrustum();
::zSortAxis = frustum->getDirection();
qSort(positions.begin(), positions.end(), zSort);
// allocate vertices
_vertices.clear();
// build vertices from particle positions
const glm::vec3 upOffset = frustum->getUp() * particleRadius;
const glm::vec3 rightOffset = frustum->getRight() * particleRadius;
for (auto&& pos : positions) {
// generate corners of quad aligned to face the camera.
_vertices.emplace_back(pos + rightOffset + upOffset, glm::vec2(1.0f, 1.0f), rgba);
_vertices.emplace_back(pos - rightOffset + upOffset, glm::vec2(0.0f, 1.0f), rgba);
_vertices.emplace_back(pos - rightOffset - upOffset, glm::vec2(0.0f, 0.0f), rgba);
_vertices.emplace_back(pos + rightOffset - upOffset, glm::vec2(1.0f, 0.0f), rgba);
}
render::PendingChanges pendingChanges;
pendingChanges.updateItem<ParticlePayload>(_renderItemId, [&](ParticlePayload& payload) {
// update vertex buffer
auto vertexBuffer = payload.getVertexBuffer();
size_t numBytes = sizeof(Vertex) * _vertices.size();
vertexBuffer->resize(numBytes);
gpu::Byte* data = vertexBuffer->editData();
memcpy(data, &(_vertices[0]), numBytes);
// FIXME, don't update index buffer if num particles has not changed.
// update index buffer
auto indexBuffer = payload.getIndexBuffer();
const size_t NUM_VERTS_PER_PARTICLE = 4;
const size_t NUM_INDICES_PER_PARTICLE = 6;
auto numQuads = (_vertices.size() / NUM_VERTS_PER_PARTICLE);
numBytes = sizeof(uint16_t) * numQuads * NUM_INDICES_PER_PARTICLE;
indexBuffer->resize(numBytes);
data = indexBuffer->editData();
auto indexPtr = reinterpret_cast<uint16_t*>(data);
for (size_t i = 0; i < numQuads; ++i) {
indexPtr[i * NUM_INDICES_PER_PARTICLE + 0] = i * NUM_VERTS_PER_PARTICLE + 0;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 1] = i * NUM_VERTS_PER_PARTICLE + 1;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 2] = i * NUM_VERTS_PER_PARTICLE + 3;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 3] = i * NUM_VERTS_PER_PARTICLE + 1;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 4] = i * NUM_VERTS_PER_PARTICLE + 2;
indexPtr[i * NUM_INDICES_PER_PARTICLE + 5] = i * NUM_VERTS_PER_PARTICLE + 3;
}
// update transform
glm::quat rot = _transform.getRotation();
glm::vec3 pos = _transform.getTranslation();
Transform t;
t.setRotation(rot);
t.setTranslation(pos);
payload.setModelTransform(t);
// transform _particleMinBound and _particleMaxBound corners into world coords
glm::vec3 d = _particleMaxBound - _particleMinBound;
const size_t NUM_BOX_CORNERS = 8;
glm::vec3 corners[NUM_BOX_CORNERS] = {
pos + rot * (_particleMinBound + glm::vec3(0.0f, 0.0f, 0.0f)),
pos + rot * (_particleMinBound + glm::vec3(d.x, 0.0f, 0.0f)),
pos + rot * (_particleMinBound + glm::vec3(0.0f, d.y, 0.0f)),
pos + rot * (_particleMinBound + glm::vec3(d.x, d.y, 0.0f)),
pos + rot * (_particleMinBound + glm::vec3(0.0f, 0.0f, d.z)),
pos + rot * (_particleMinBound + glm::vec3(d.x, 0.0f, d.z)),
pos + rot * (_particleMinBound + glm::vec3(0.0f, d.y, d.z)),
pos + rot * (_particleMinBound + glm::vec3(d.x, d.y, d.z))
};
glm::vec3 min(FLT_MAX, FLT_MAX, FLT_MAX);
glm::vec3 max = -min;
for (size_t i = 0; i < NUM_BOX_CORNERS; i++) {
min.x = std::min(min.x, corners[i].x);
min.y = std::min(min.y, corners[i].y);
min.z = std::min(min.z, corners[i].z);
max.x = std::max(max.x, corners[i].x);
max.y = std::max(max.y, corners[i].y);
max.z = std::max(max.z, corners[i].z);
}
AABox bound(min, max - min);
payload.setBound(bound);
bool textured = _texture && _texture->isLoaded();
if (textured) {
payload.setTexture(_texture->getGPUTexture());
payload.setPipeline(_texturedPipeline);
} else {
payload.setTexture(nullptr);
payload.setPipeline(_untexturedPipeline);
}
});
_scene->enqueuePendingChanges(pendingChanges);
}
void RenderableParticleEffectEntityItem::createPipelines() {
if (!_untexturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
gpu::State::INV_SRC_ALPHA, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(untextured_particle_vert)));
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(untextured_particle_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
_untexturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
}
if (!_texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
gpu::State::INV_SRC_ALPHA, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(textured_particle_vert)));
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
_texturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
}
}

View file

@ -16,20 +16,35 @@
#include "RenderableEntityItem.h"
class RenderableParticleEffectEntityItem : public ParticleEffectEntityItem {
friend class ParticlePayload;
public:
static EntityItemPointer factory(const EntityItemID& entityID, const EntityItemProperties& properties);
RenderableParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
virtual void render(RenderArgs* args);
void updateQuads(RenderArgs* args, bool textured);
virtual void update(const quint64& now) override;
SIMPLE_RENDERABLE();
void updateRenderItem();
virtual bool addToScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges);
virtual void removeFromScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges);
protected:
render::ItemID _renderItemId;
int _cacheID;
const int VERTS_PER_PARTICLE = 4;
struct Vertex {
Vertex(glm::vec3 xyzIn, glm::vec2 uvIn, uint32_t rgbaIn) : xyz(xyzIn), uv(uvIn), rgba(rgbaIn) {}
glm::vec3 xyz;
glm::vec2 uv;
uint32_t rgba;
};
static void createPipelines();
std::vector<Vertex> _vertices;
static gpu::PipelinePointer _untexturedPipeline;
static gpu::PipelinePointer _texturedPipeline;
render::ScenePointer _scene;
NetworkTexturePointer _texture;
};

View file

@ -35,7 +35,7 @@
#include <PolyVoxCore/Material.h>
#include "model/Geometry.h"
#include "gpu/GLBackend.h"
#include "gpu/Context.h"
#include "EntityTreeRenderer.h"
#include "RenderablePolyVoxEntityItem.h"

View file

@ -8,7 +8,6 @@
#include "RenderableWebEntityItem.h"
#include <gpu/GPUConfig.h>
#include <QMouseEvent>
#include <QQuickItem>
#include <QQuickWindow>
@ -24,7 +23,7 @@
#include <GLMHelpers.h>
#include <PathUtils.h>
#include <TextureCache.h>
#include <gpu/GLBackend.h>
#include <gpu/Context.h>
#include "EntityTreeRenderer.h"
@ -178,8 +177,7 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
batch.setModelTransform(getTransformToCenter());
bool textured = false, culled = false, emissive = false;
if (_texture) {
batch._glActiveTexture(GL_TEXTURE0);
batch._glBindTexture(GL_TEXTURE_2D, _texture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _texture);
textured = emissive = true;
}

View file

@ -0,0 +1,20 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// fragment shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D colorMap;
varying vec4 varColor;
varying vec2 varTexCoord;
void main(void) {
vec4 color = texture2D(colorMap, varTexCoord);
gl_FragColor = color * varColor;
}

View file

@ -0,0 +1,28 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// particle vertex shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec4 varColor;
varying vec2 varTexCoord;
void main(void) {
// pass along the color & uvs to fragment shader
varColor = gl_Color;
varTexCoord = gl_MultiTexCoord0.xy;
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
}

View file

@ -0,0 +1,16 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// fragment shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
varying vec4 varColor;
void main(void) {
gl_FragColor = varColor;
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// particle vertex shader
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec4 varColor;
void main(void) {
// pass along the diffuse color
varColor = gl_Color;
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
}

View file

@ -114,7 +114,8 @@ _glowLevelChanged(false),
_localRenderAlphaChanged(false),
_defaultSettings(true),
_naturalDimensions(1.0f, 1.0f, 1.0f)
_naturalDimensions(1.0f, 1.0f, 1.0f),
_naturalPosition(0.0f, 0.0f, 0.0f)
{
}
@ -128,6 +129,11 @@ void EntityItemProperties::setSittingPoints(const QVector<SittingPoint>& sitting
}
}
void EntityItemProperties::calculateNaturalPosition(const glm::vec3& min, const glm::vec3& max) {
glm::vec3 halfDimension = (max - min) / 2.0f;
_naturalPosition = max - halfDimension;
}
bool EntityItemProperties::animationSettingsChanged() const {
return _animationSettingsChanged;
}
@ -378,6 +384,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
COPY_PROPERTY_TO_QSCRIPTVALUE(dimensions);
if (!skipDefaults) {
COPY_PROPERTY_TO_QSCRIPTVALUE(naturalDimensions); // gettable, but not settable
COPY_PROPERTY_TO_QSCRIPTVALUE(naturalPosition);
}
COPY_PROPERTY_TO_QSCRIPTVALUE(rotation);
COPY_PROPERTY_TO_QSCRIPTVALUE(velocity);

View file

@ -192,7 +192,10 @@ public:
const glm::vec3& getNaturalDimensions() const { return _naturalDimensions; }
void setNaturalDimensions(const glm::vec3& value) { _naturalDimensions = value; }
const glm::vec3& getNaturalPosition() const { return _naturalPosition; }
void calculateNaturalPosition(const glm::vec3& min, const glm::vec3& max);
const QStringList& getTextureNames() const { return _textureNames; }
void setTextureNames(const QStringList& value) { _textureNames = value; }
@ -232,6 +235,7 @@ private:
QVector<SittingPoint> _sittingPoints;
QStringList _textureNames;
glm::vec3 _naturalDimensions;
glm::vec3 _naturalPosition;
};
Q_DECLARE_METATYPE(EntityItemProperties);

View file

@ -118,6 +118,7 @@ EntityItemProperties EntityScriptingInterface::getEntityProperties(QUuid identit
results.setSittingPoints(geometry->sittingPoints);
Extents meshExtents = geometry->getUnscaledMeshExtents();
results.setNaturalDimensions(meshExtents.maximum - meshExtents.minimum);
results.calculateNaturalPosition(meshExtents.minimum, meshExtents.maximum);
}
}

View file

@ -39,6 +39,7 @@
#include "EntityTree.h"
#include "EntityTreeElement.h"
#include "EntitiesLogging.h"
#include "EntityScriptingInterface.h"
#include "ParticleEffectEntityItem.h"
const xColor ParticleEffectEntityItem::DEFAULT_COLOR = { 255, 255, 255 };
@ -92,6 +93,75 @@ ParticleEffectEntityItem::ParticleEffectEntityItem(const EntityItemID& entityIte
ParticleEffectEntityItem::~ParticleEffectEntityItem() {
}
void ParticleEffectEntityItem::setDimensions(const glm::vec3& value) {
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::setLifespan(float lifespan) {
_lifespan = lifespan;
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::setEmitDirection(glm::vec3 emitDirection) {
_emitDirection = glm::normalize(emitDirection);
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::setEmitStrength(float emitStrength) {
_emitStrength = emitStrength;
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::setLocalGravity(float localGravity) {
_localGravity = localGravity;
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::setParticleRadius(float particleRadius) {
_particleRadius = particleRadius;
computeAndUpdateDimensions();
}
void ParticleEffectEntityItem::computeAndUpdateDimensions() {
const float t = _lifespan * 1.1f; // add 10% extra time, to account for incremental timer accumulation error.
const float MAX_RANDOM_FACTOR = (0.5f * 0.25f);
const float maxOffset = (MAX_RANDOM_FACTOR * _emitStrength) + _particleRadius;
// bounds for x and z is easy to compute because there is no at^2 term.
float xMax = (_emitDirection.x * _emitStrength + maxOffset) * t;
float xMin = (_emitDirection.x * _emitStrength - maxOffset) * t;
float zMax = (_emitDirection.z * _emitStrength + maxOffset) * t;
float zMin = (_emitDirection.z * _emitStrength - maxOffset) * t;
// yEnd is where the particle will end.
float a = _localGravity;
float atSquared = a * t * t;
float v = _emitDirection.y * _emitStrength + maxOffset;
float vt = v * t;
float yEnd = 0.5f * atSquared + vt;
// yApex is where the particle is at it's apex.
float yApexT = (-v / a);
float yApex = 0.0f;
// only set apex if it's within the lifespan of the particle.
if (yApexT >= 0.0f && yApexT <= t) {
yApex = -(v * v) / (2.0f * a);
}
float yMax = std::max(yApex, yEnd);
float yMin = std::min(yApex, yEnd);
// times 2 because dimensions are diameters not radii.
glm::vec3 dims(2.0f * std::max(fabsf(xMin), fabsf(xMax)),
2.0f * std::max(fabsf(yMin), fabsf(yMax)),
2.0f * std::max(fabsf(zMin), fabsf(zMax)));
EntityItem::setDimensions(dims);
}
EntityItemProperties ParticleEffectEntityItem::getProperties() const {
EntityItemProperties properties = EntityItem::getProperties(); // get the properties from our base class
@ -245,7 +315,7 @@ bool ParticleEffectEntityItem::isAnimatingSomething() const {
}
bool ParticleEffectEntityItem::needsToCallUpdate() const {
return isAnimatingSomething() ? true : EntityItem::needsToCallUpdate();
return true;
}
void ParticleEffectEntityItem::update(const quint64& now) {
@ -260,13 +330,6 @@ void ParticleEffectEntityItem::update(const quint64& now) {
if (isAnimatingSomething()) {
stepSimulation(deltaTime);
// update the dimensions
glm::vec3 dims;
dims.x = glm::max(glm::abs(_particleMinBound.x), glm::abs(_particleMaxBound.x)) * 2.0f;
dims.y = glm::max(glm::abs(_particleMinBound.y), glm::abs(_particleMaxBound.y)) * 2.0f;
dims.z = glm::max(glm::abs(_particleMinBound.z), glm::abs(_particleMaxBound.z)) * 2.0f;
setDimensions(dims);
}
EntityItem::update(now); // let our base class handle it's updates...
@ -319,7 +382,7 @@ void ParticleEffectEntityItem::setAnimationSettings(const QString& value) {
qCDebug(entities) << "ParticleEffectEntityItem::setAnimationSettings() calling setAnimationFrameIndex()...";
qCDebug(entities) << " settings:" << value;
qCDebug(entities) << " settingsMap[frameIndex]:" << settingsMap["frameIndex"];
qCDebug(entities" frameIndex: %20.5f", frameIndex);
qCDebug(entities, " frameIndex: %20.5f", frameIndex);
}
#endif

View file

@ -86,12 +86,14 @@ public:
void setAnimationLastFrame(float lastFrame) { _animationLoop.setLastFrame(lastFrame); }
float getAnimationLastFrame() const { return _animationLoop.getLastFrame(); }
virtual void setDimensions(const glm::vec3& value) override;
static const quint32 DEFAULT_MAX_PARTICLES;
void setMaxParticles(quint32 maxParticles);
quint32 getMaxParticles() const { return _maxParticles; }
static const float DEFAULT_LIFESPAN;
void setLifespan(float lifespan) { _lifespan = lifespan; }
void setLifespan(float lifespan);
float getLifespan() const { return _lifespan; }
static const float DEFAULT_EMIT_RATE;
@ -99,21 +101,23 @@ public:
float getEmitRate() const { return _emitRate; }
static const glm::vec3 DEFAULT_EMIT_DIRECTION;
void setEmitDirection(glm::vec3 emitDirection) { _emitDirection = glm::normalize(emitDirection); }
void setEmitDirection(glm::vec3 emitDirection);
const glm::vec3& getEmitDirection() const { return _emitDirection; }
static const float DEFAULT_EMIT_STRENGTH;
void setEmitStrength(float emitStrength) { _emitStrength = emitStrength; }
void setEmitStrength(float emitStrength);
float getEmitStrength() const { return _emitStrength; }
static const float DEFAULT_LOCAL_GRAVITY;
void setLocalGravity(float localGravity) { _localGravity = localGravity; }
void setLocalGravity(float localGravity);
float getLocalGravity() const { return _localGravity; }
static const float DEFAULT_PARTICLE_RADIUS;
void setParticleRadius(float particleRadius) { _particleRadius = particleRadius; }
void setParticleRadius(float particleRadius);
float getParticleRadius() const { return _particleRadius; }
void computeAndUpdateDimensions();
bool getAnimationIsPlaying() const { return _animationLoop.isRunning(); }
float getAnimationFrameIndex() const { return _animationLoop.getFrameIndex(); }
float getAnimationFPS() const { return _animationLoop.getFPS(); }

View file

@ -17,6 +17,7 @@
#include <QTextStream>
#include <QtDebug>
#include <QtEndian>
#include <QFileInfo>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
@ -1454,9 +1455,21 @@ void buildModelMesh(ExtractedMesh& extracted) {
}
#endif // USE_MODEL_MESH
QByteArray fileOnUrl(const QByteArray& filenameString, const QString& url) {
QString path = QFileInfo(url).path();
QByteArray filename = filenameString;
QFileInfo checkFile(path + "/" + filename.replace('\\', '/'));
//check if the file exists at the RelativeFileName
if (checkFile.exists() && checkFile.isFile()) {
filename = filename.replace('\\', '/');
} else {
// there is no texture at the fbx dir with the filename added. Assume it is in the fbx dir.
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
}
return filename;
}
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QHash<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
QHash<QString, int> meshIDsToMeshIndices;
@ -1781,9 +1794,8 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
TextureParam tex;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
// trim off any path information
QByteArray filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
textureFilenames.insert(getID(object.properties), filename);
} else if (subobject.name == "TextureName") {
// trim the name from the timestamp
@ -1857,7 +1869,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
} else if (subobject.name == "Content" && !subobject.properties.isEmpty()) {
content = subobject.properties.at(0).toByteArray();
@ -2616,10 +2628,17 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
buildModelMesh(extracted);
# endif
if (extracted.mesh.isEye) {
if (maxJointIndex == geometry.leftEyeJointIndex) {
geometry.leftEyeSize = extracted.mesh.meshExtents.largestDimension() * offsetScale;
} else {
geometry.rightEyeSize = extracted.mesh.meshExtents.largestDimension() * offsetScale;
}
}
geometry.meshes.append(extracted.mesh);
int meshIndex = geometry.meshes.size() - 1;
meshIDsToMeshIndices.insert(it.key(), meshIndex);
}
// now that all joints have been scanned, compute a collision shape for each joint
@ -2710,12 +2729,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
return geometry;
}
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QBuffer buffer(const_cast<QByteArray*>(&model));
buffer.open(QIODevice::ReadOnly);
return readFBX(&buffer, mapping, loadLightmaps, lightmapLevel);
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
}
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, loadLightmaps, lightmapLevel);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, url, loadLightmaps, lightmapLevel);
}

View file

@ -232,7 +232,10 @@ public:
int rightHandJointIndex = -1;
int leftToeJointIndex = -1;
int rightToeJointIndex = -1;
float leftEyeSize = 0.0f; // Maximum mesh extents dimension
float rightEyeSize = 0.0f;
QVector<int> humanIKJointIndices;
glm::vec3 palmDirection;
@ -268,10 +271,10 @@ Q_DECLARE_METATYPE(FBXGeometry)
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
#endif // hifi_FBXReader_h

View file

@ -195,7 +195,10 @@ void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f
}
bool OBJReader::isValidTexture(const QByteArray &filename) {
QUrl candidateUrl = url->resolved(QUrl(filename));
if (!_url) {
return false;
}
QUrl candidateUrl = _url->resolved(QUrl(filename));
QNetworkReply *netReply = request(candidateUrl, true);
bool isValid = netReply->isFinished() && (netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200);
netReply->deleteLater();
@ -242,7 +245,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
} else if ((token == "map_Kd") || (token == "map_Ks")) {
QByteArray filename = QUrl(tokenizer.getLineAsDatum()).fileName().toUtf8();
if (filename.endsWith(".tga")) {
qCDebug(modelformat) << "OBJ Reader WARNING: currently ignoring tga texture " << filename << " in " << url;
qCDebug(modelformat) << "OBJ Reader WARNING: currently ignoring tga texture " << filename << " in " << _url;
break;
}
if (isValidTexture(filename)) {
@ -252,7 +255,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
currentMaterial.specularTextureFilename = filename;
}
} else {
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " ignoring missing texture " << filename;
qCDebug(modelformat) << "OBJ Reader WARNING: " << _url << " ignoring missing texture " << filename;
}
}
}
@ -303,7 +306,8 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
}
QByteArray token = tokenizer.getDatum();
//qCDebug(modelformat) << token;
if (token == "g") {
// we don't support separate objects in the same file, so treat "o" the same as "g".
if (token == "g" || token == "o") {
if (sawG) {
// we've encountered the beginning of the next group.
tokenizer.pushBackToken(OBJTokenizer::DATUM_TOKEN);
@ -316,7 +320,7 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
QByteArray groupName = tokenizer.getDatum();
currentGroup = groupName;
//qCDebug(modelformat) << "new group:" << groupName;
} else if (token == "mtllib") {
} else if (token == "mtllib" && _url) {
if (tokenizer.nextToken() != OBJTokenizer::DATUM_TOKEN) {
break;
}
@ -325,13 +329,15 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
break; // Some files use mtllib over and over again for the same libraryName
}
librariesSeen[libraryName] = true;
QUrl libraryUrl = url->resolved(QUrl(libraryName).fileName()); // Throw away any path part of libraryName, and merge against original url.
// Throw away any path part of libraryName, and merge against original url.
QUrl libraryUrl = _url->resolved(QUrl(libraryName).fileName());
qCDebug(modelformat) << "OBJ Reader new library:" << libraryName << " at:" << libraryUrl;
QNetworkReply* netReply = request(libraryUrl, false);
if (netReply->isFinished() && (netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200)) {
parseMaterialLibrary(netReply);
} else {
qCDebug(modelformat) << "OBJ Reader " << libraryName << " did not answer. Got " << netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
qCDebug(modelformat) << "OBJ Reader " << libraryName << " did not answer. Got "
<< netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
}
netReply->deleteLater();
} else if (token == "usemtl") {
@ -406,10 +412,10 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
OBJTokenizer tokenizer(device);
float scaleGuess = 1.0f;
this->url = url;
_url = url;
geometry.meshExtents.reset();
geometry.meshes.append(FBXMesh());
try {
// call parseOBJGroup as long as it's returning true. Each successful call will
// add a new meshPart to the geometry's single mesh.
@ -417,7 +423,7 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
FBXMesh& mesh = geometry.meshes[0];
mesh.meshIndex = 0;
geometry.joints.resize(1);
geometry.joints[0].isFree = false;
geometry.joints[0].parentIndex = -1;
@ -440,37 +446,44 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
0, 0, 1, 0,
0, 0, 0, 1);
mesh.clusters.append(cluster);
// Some .obj files use the convention that a group with uv coordinates that doesn't define a material, should use a texture with the same basename as the .obj file.
QString filename = url->fileName();
int extIndex = filename.lastIndexOf('.'); // by construction, this does not fail
QString basename = filename.remove(extIndex + 1, sizeof("obj"));
OBJMaterial& preDefinedMaterial = materials[SMART_DEFAULT_MATERIAL_NAME];
preDefinedMaterial.diffuseColor = glm::vec3(1.0f);
QVector<QByteArray> extensions = {"jpg", "jpeg", "png", "tga"};
QByteArray base = basename.toUtf8(), textName = "";
for (int i = 0; i < extensions.count(); i++) {
QByteArray candidateString = base + extensions[i];
if (isValidTexture(candidateString)) {
textName = candidateString;
break;
// Some .obj files use the convention that a group with uv coordinates that doesn't define a material, should use
// a texture with the same basename as the .obj file.
if (url) {
QString filename = url->fileName();
int extIndex = filename.lastIndexOf('.'); // by construction, this does not fail
QString basename = filename.remove(extIndex + 1, sizeof("obj"));
OBJMaterial& preDefinedMaterial = materials[SMART_DEFAULT_MATERIAL_NAME];
preDefinedMaterial.diffuseColor = glm::vec3(1.0f);
QVector<QByteArray> extensions = {"jpg", "jpeg", "png", "tga"};
QByteArray base = basename.toUtf8(), textName = "";
for (int i = 0; i < extensions.count(); i++) {
QByteArray candidateString = base + extensions[i];
if (isValidTexture(candidateString)) {
textName = candidateString;
break;
}
}
if (!textName.isEmpty()) {
preDefinedMaterial.diffuseTextureFilename = textName;
}
materials[SMART_DEFAULT_MATERIAL_NAME] = preDefinedMaterial;
}
if (!textName.isEmpty()) {
preDefinedMaterial.diffuseTextureFilename = textName;
}
materials[SMART_DEFAULT_MATERIAL_NAME] = preDefinedMaterial;
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
FBXMeshPart& meshPart = mesh.parts[i];
FaceGroup faceGroup = faceGroups[meshPartCount];
OBJFace leadFace = faceGroup[0]; // All the faces in the same group will have the same name and material.
QString groupMaterialName = leadFace.materialName;
if (groupMaterialName.isEmpty() && (leadFace.textureUVIndices.count() > 0)) {
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " needs a texture that isn't specified. Using default mechanism.";
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
<< " needs a texture that isn't specified. Using default mechanism.";
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
} else if (!groupMaterialName.isEmpty() && !materials.contains(groupMaterialName)) {
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " specifies a material " << groupMaterialName << " that is not defined. Using default mechanism.";
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
<< " specifies a material " << groupMaterialName
<< " that is not defined. Using default mechanism.";
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
}
if (!groupMaterialName.isEmpty()) {

View file

@ -69,12 +69,13 @@ public:
QVector<FaceGroup> faceGroups;
QString currentMaterialName;
QHash<QString, OBJMaterial> materials;
QUrl* url;
QNetworkReply* request(QUrl& url, bool isTest);
FBXGeometry readOBJ(const QByteArray& model, const QVariantHash& mapping);
FBXGeometry readOBJ(QIODevice* device, const QVariantHash& mapping, QUrl* url);
private:
QUrl* _url = nullptr;
QHash<QByteArray, bool> librariesSeen;
bool parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mapping, FBXGeometry& geometry, float& scaleGuess);
void parseMaterialLibrary(QIODevice* device);
@ -83,4 +84,4 @@ private:
// What are these utilities doing here? One is used by fbx loading code in VHACD Utils, and the other a general debugging utility.
void setMeshPartDefaults(FBXMeshPart& meshPart, QString materialID);
void fbxDebugDump(const FBXGeometry& fbxgeo);
void fbxDebugDump(const FBXGeometry& fbxgeo);

View file

@ -21,23 +21,26 @@ elseif (WIN32)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
# note that this will also enable NSIGHT profilers in all the projects linking gpu
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_include_directories(${TARGET_NAME} PUBLIC ${NSIGHT_INCLUDE_DIRS})
target_compile_definitions(${TARGET_NAME} PUBLIC NSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif ()
endif()
elseif (ANDROID)
target_link_libraries(${TARGET_NAME} "-lGLESv3" "-lEGL")
else ()
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLEW_INCLUDE_DIRS})
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif ()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
target_include_directories(${TARGET_NAME} PUBLIC ${OPENGL_INCLUDE_DIR})
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARIES}" "${OPENGL_LIBRARY}")
endif (APPLE)

View file

@ -8,13 +8,9 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <string.h>
#include "Batch.h"
#include "GPUConfig.h"
#include <QDebug>
#include <GLMHelpers.h>
#if defined(NSIGHT_FOUND)
#include "nvToolsExt.h"
@ -106,36 +102,6 @@ void Batch::drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, ui
_params.push_back(nbInstances);
}
void Batch::clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor) {
ADD_COMMAND(clearFramebuffer);
_params.push_back(enableScissor);
_params.push_back(stencil);
_params.push_back(depth);
_params.push_back(color.w);
_params.push_back(color.z);
_params.push_back(color.y);
_params.push_back(color.x);
_params.push_back(targets);
}
void Batch::clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor) {
clearFramebuffer(targets & Framebuffer::BUFFER_COLORS, color, 1.0f, 0, enableScissor);
}
void Batch::clearDepthFramebuffer(float depth, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_DEPTH, Vec4(0.0f), depth, 0, enableScissor);
}
void Batch::clearStencilFramebuffer(int stencil, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_STENCIL, Vec4(0.0f), 1.0f, stencil, enableScissor);
}
void Batch::clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_DEPTHSTENCIL, Vec4(0.0f), depth, stencil, enableScissor);
}
void Batch::setInputFormat(const Stream::FormatPointer& format) {
ADD_COMMAND(setInputFormat);
@ -255,6 +221,35 @@ void Batch::setFramebuffer(const FramebufferPointer& framebuffer) {
}
void Batch::clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor) {
ADD_COMMAND(clearFramebuffer);
_params.push_back(enableScissor);
_params.push_back(stencil);
_params.push_back(depth);
_params.push_back(color.w);
_params.push_back(color.z);
_params.push_back(color.y);
_params.push_back(color.x);
_params.push_back(targets);
}
void Batch::clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor) {
clearFramebuffer(targets & Framebuffer::BUFFER_COLORS, color, 1.0f, 0, enableScissor);
}
void Batch::clearDepthFramebuffer(float depth, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_DEPTH, Vec4(0.0f), depth, 0, enableScissor);
}
void Batch::clearStencilFramebuffer(int stencil, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_STENCIL, Vec4(0.0f), 1.0f, stencil, enableScissor);
}
void Batch::clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor) {
clearFramebuffer(Framebuffer::BUFFER_DEPTHSTENCIL, Vec4(0.0f), depth, stencil, enableScissor);
}
void Batch::blit(const FramebufferPointer& src, const Vec4i& srcViewport,
const FramebufferPointer& dst, const Vec4i& dstViewport) {
ADD_COMMAND(blit);
@ -289,15 +284,7 @@ void Batch::getQuery(const QueryPointer& query) {
_params.push_back(_queries.cache(query));
}
void push_back(Batch::Params& params, const vec3& v) {
params.push_back(v.x);
params.push_back(v.y);
params.push_back(v.z);
void Batch::resetStages() {
ADD_COMMAND(resetStages);
}
void push_back(Batch::Params& params, const vec4& v) {
params.push_back(v.x);
params.push_back(v.y);
params.push_back(v.z);
params.push_back(v.a);
}

View file

@ -54,15 +54,6 @@ public:
void drawInstanced(uint32 nbInstances, Primitive primitiveType, uint32 nbVertices, uint32 startVertex = 0, uint32 startInstance = 0);
void drawIndexedInstanced(uint32 nbInstances, Primitive primitiveType, uint32 nbIndices, uint32 startIndex = 0, uint32 startInstance = 0);
// Clear framebuffer layers
// Targets can be any of the render buffers contained in the Framebuffer
// Optionally the scissor test can be enabled locally for this command and to restrict the clearing command to the pixels contained in the scissor rectangle
void clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor = false);
void clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, mask out targets to make sure it touches only color targets
void clearDepthFramebuffer(float depth, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only depth target
void clearStencilFramebuffer(int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only stencil target
void clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches depth and stencil target
// Input Stage
// InputFormat
// InputBuffers
@ -103,44 +94,36 @@ public:
void setResourceTexture(uint32 slot, const TexturePointer& view);
void setResourceTexture(uint32 slot, const TextureView& view); // not a command, just a shortcut from a TextureView
// Framebuffer Stage
// Ouput Stage
void setFramebuffer(const FramebufferPointer& framebuffer);
void blit(const FramebufferPointer& src, const Vec4i& srcViewport,
const FramebufferPointer& dst, const Vec4i& dstViewport);
// Clear framebuffer layers
// Targets can be any of the render buffers contained in the currnetly bound Framebuffer
// Optionally the scissor test can be enabled locally for this command and to restrict the clearing command to the pixels contained in the scissor rectangle
void clearFramebuffer(Framebuffer::Masks targets, const Vec4& color, float depth, int stencil, bool enableScissor = false);
void clearColorFramebuffer(Framebuffer::Masks targets, const Vec4& color, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, mask out targets to make sure it touches only color targets
void clearDepthFramebuffer(float depth, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only depth target
void clearStencilFramebuffer(int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches only stencil target
void clearDepthStencilFramebuffer(float depth, int stencil, bool enableScissor = false); // not a command, just a shortcut for clearFramebuffer, it touches depth and stencil target
void blit(const FramebufferPointer& src, const Vec4i& srcViewport, const FramebufferPointer& dst, const Vec4i& dstViewport);
// Query Section
void beginQuery(const QueryPointer& query);
void endQuery(const QueryPointer& query);
void getQuery(const QueryPointer& query);
// Reset the stage caches and states
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
// THe implementation of these functions is in GLBackend.cpp
void _glEnable(unsigned int cap);
void _glDisable(unsigned int cap);
void _glActiveBindTexture(unsigned int unit, unsigned int target, unsigned int texture);
void _glEnableClientState(unsigned int array);
void _glDisableClientState(unsigned int array);
void _glCullFace(unsigned int mode);
void _glAlphaFunc(unsigned int func, float ref);
void _glDepthFunc(unsigned int func);
void _glDepthMask(unsigned char flag);
void _glDepthRange(float zNear, float zFar);
void _glBindBuffer(unsigned int target, unsigned int buffer);
void _glBindTexture(unsigned int target, unsigned int texture);
void _glActiveTexture(unsigned int texture);
void _glTexParameteri(unsigned int target, unsigned int pname, int param);
void _glDrawBuffers(int n, const unsigned int* bufs);
void _glUseProgram(unsigned int program);
void _glUniform1i(int location, int v0);
void _glUniform1f(int location, float v0);
void _glUniform2f(int location, float v0, float v1);
@ -150,9 +133,6 @@ public:
void _glUniform4iv(int location, int count, const int* value);
void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value);
void _glEnableVertexAttribArray(int location);
void _glDisableVertexAttribArray(int location);
void _glColor4f(float red, float green, float blue, float alpha);
void _glLineWidth(float width);
@ -162,8 +142,6 @@ public:
COMMAND_drawInstanced,
COMMAND_drawIndexedInstanced,
COMMAND_clearFramebuffer,
COMMAND_setInputFormat,
COMMAND_setInputBuffer,
COMMAND_setIndexBuffer,
@ -181,37 +159,20 @@ public:
COMMAND_setResourceTexture,
COMMAND_setFramebuffer,
COMMAND_clearFramebuffer,
COMMAND_blit,
COMMAND_beginQuery,
COMMAND_endQuery,
COMMAND_getQuery,
COMMAND_resetStages,
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
COMMAND_glEnable,
COMMAND_glDisable,
COMMAND_glActiveBindTexture,
COMMAND_glEnableClientState,
COMMAND_glDisableClientState,
COMMAND_glCullFace,
COMMAND_glAlphaFunc,
COMMAND_glDepthFunc,
COMMAND_glDepthMask,
COMMAND_glDepthRange,
COMMAND_glBindBuffer,
COMMAND_glBindTexture,
COMMAND_glActiveTexture,
COMMAND_glTexParameteri,
COMMAND_glDrawBuffers,
COMMAND_glUseProgram,
COMMAND_glUniform1i,
COMMAND_glUniform1f,
COMMAND_glUniform2f,
@ -221,9 +182,6 @@ public:
COMMAND_glUniform4iv,
COMMAND_glUniformMatrix4fv,
COMMAND_glEnableVertexAttribArray,
COMMAND_glDisableVertexAttribArray,
COMMAND_glColor4f,
COMMAND_glLineWidth,

View file

@ -21,10 +21,9 @@
<@def VERSION_HEADER #version 120
#extension GL_EXT_gpu_shader4 : enable@>
<@else@>
<@def GPU_FEATURE_PROFILE GPU_LEGACY@>
<@def GPU_TRANSFORM_PROFILE GPU_LEGACY@>
<@def VERSION_HEADER #version 120
#extension GL_EXT_gpu_shader4 : enable@>
<@def GPU_FEATURE_PROFILE GPU_CORE@>
<@def GPU_TRANSFORM_PROFILE GPU_CORE@>
<@def VERSION_HEADER #version 430 compatibility@>
<@endif@>
<@endif@>

View file

@ -10,13 +10,16 @@
//
#include "Context.h"
// this include should disappear! as soon as the gpu::Context is in place
#include "GLBackend.h"
using namespace gpu;
Context::Context(Backend* backend) :
_backend(backend) {
Context::CreateBackend Context::_createBackendCallback = nullptr;
Context::MakeProgram Context::_makeProgramCallback = nullptr;
std::once_flag Context::_initialized;
Context::Context() {
if (_createBackendCallback) {
_backend.reset(_createBackendCallback());
}
}
Context::Context(const Context& context) {
@ -26,8 +29,8 @@ Context::~Context() {
}
bool Context::makeProgram(Shader& shader, const Shader::BindingSet& bindings) {
if (shader.isProgram()) {
return GLBackend::makeProgram(shader, bindings);
if (shader.isProgram() && _makeProgramCallback) {
return _makeProgramCallback(shader, bindings);
}
return false;
}
@ -45,3 +48,4 @@ void Context::syncCache() {
void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
}

View file

@ -12,6 +12,7 @@
#define hifi_gpu_Context_h
#include <assert.h>
#include <mutex>
#include "Batch.h"
@ -26,13 +27,12 @@ namespace gpu {
class Backend {
public:
virtual~ Backend() {};
virtual void render(Batch& batch) = 0;
virtual void syncCache() = 0;
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) = 0;
class TransformObject {
public:
Mat4 _model;
@ -118,7 +118,21 @@ protected:
class Context {
public:
Context(Backend* backend);
typedef Backend* (*CreateBackend)();
typedef bool (*MakeProgram)(Shader& shader, const Shader::BindingSet& bindings);
// This one call must happen before any context is created or used (Shader::MakeProgram) in order to setup the Backend and any singleton data needed
template <class T>
static void init() {
std::call_once(_initialized, [] {
_createBackendCallback = T::createBackend;
_makeProgramCallback = T::makeProgram;
T::init();
});
}
Context();
~Context();
void render(Batch& batch);
@ -132,13 +146,17 @@ public:
protected:
Context(const Context& context);
std::unique_ptr<Backend> _backend;
// This function can only be called by "static Shader::makeProgram()"
// makeProgramShader(...) make a program shader ready to be used in a Batch.
// It compiles the sub shaders, link them and defines the Slots and their bindings.
// If the shader passed is not a program, nothing happens.
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings);
std::unique_ptr<Backend> _backend;
static CreateBackend _createBackendCallback;
static MakeProgram _makeProgramCallback;
static std::once_flag _initialized;
friend class Shader;
};

View file

@ -8,9 +8,9 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <mutex>
#include "GPULogging.h"
#include "GLBackendShared.h"
#include <mutex>
#include <glm/gtc/type_ptr.hpp>
using namespace gpu;
@ -21,7 +21,6 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_drawIndexed),
(&::gpu::GLBackend::do_drawInstanced),
(&::gpu::GLBackend::do_drawIndexedInstanced),
(&::gpu::GLBackend::do_clearFramebuffer),
(&::gpu::GLBackend::do_setInputFormat),
(&::gpu::GLBackend::do_setInputBuffer),
@ -40,34 +39,17 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_setResourceTexture),
(&::gpu::GLBackend::do_setFramebuffer),
(&::gpu::GLBackend::do_clearFramebuffer),
(&::gpu::GLBackend::do_blit),
(&::gpu::GLBackend::do_beginQuery),
(&::gpu::GLBackend::do_endQuery),
(&::gpu::GLBackend::do_getQuery),
(&::gpu::GLBackend::do_glEnable),
(&::gpu::GLBackend::do_glDisable),
(&::gpu::GLBackend::do_resetStages),
(&::gpu::GLBackend::do_glEnableClientState),
(&::gpu::GLBackend::do_glDisableClientState),
(&::gpu::GLBackend::do_glActiveBindTexture),
(&::gpu::GLBackend::do_glCullFace),
(&::gpu::GLBackend::do_glAlphaFunc),
(&::gpu::GLBackend::do_glDepthFunc),
(&::gpu::GLBackend::do_glDepthMask),
(&::gpu::GLBackend::do_glDepthRange),
(&::gpu::GLBackend::do_glBindBuffer),
(&::gpu::GLBackend::do_glBindTexture),
(&::gpu::GLBackend::do_glActiveTexture),
(&::gpu::GLBackend::do_glTexParameteri),
(&::gpu::GLBackend::do_glDrawBuffers),
(&::gpu::GLBackend::do_glUseProgram),
(&::gpu::GLBackend::do_glUniform1i),
(&::gpu::GLBackend::do_glUniform1f),
(&::gpu::GLBackend::do_glUniform2f),
@ -77,19 +59,11 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glUniform4iv),
(&::gpu::GLBackend::do_glUniformMatrix4fv),
(&::gpu::GLBackend::do_glEnableVertexAttribArray),
(&::gpu::GLBackend::do_glDisableVertexAttribArray),
(&::gpu::GLBackend::do_glColor4f),
(&::gpu::GLBackend::do_glLineWidth),
};
GLBackend::GLBackend() :
_input(),
_transform(),
_pipeline(),
_output()
{
void GLBackend::init() {
static std::once_flag once;
std::call_once(once, [] {
qCDebug(gpulogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
@ -115,6 +89,13 @@ GLBackend::GLBackend() :
#endif
#if defined(Q_OS_LINUX)
GLenum err = glewInit();
if (GLEW_OK != err) {
/* Problem: glewInit failed, something is seriously wrong. */
qCDebug(gpulogging, "Error: %s\n", glewGetErrorString(err));
}
qCDebug(gpulogging, "Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
// TODO: Write the correct code for Linux...
/* if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
@ -122,12 +103,25 @@ GLBackend::GLBackend() :
}*/
#endif
});
}
Backend* GLBackend::createBackend() {
return new GLBackend();
}
GLBackend::GLBackend() :
_input(),
_transform(),
_pipeline(),
_output()
{
initInput();
initTransform();
}
GLBackend::~GLBackend() {
resetStages();
killInput();
killTransform();
}
@ -246,67 +240,18 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
void GLBackend::do_resetStages(Batch& batch, uint32 paramOffset) {
resetStages();
}
uint32 masks = batch._params[paramOffset + 7]._uint;
Vec4 color;
color.x = batch._params[paramOffset + 6]._float;
color.y = batch._params[paramOffset + 5]._float;
color.z = batch._params[paramOffset + 4]._float;
color.w = batch._params[paramOffset + 3]._float;
float depth = batch._params[paramOffset + 2]._float;
int stencil = batch._params[paramOffset + 1]._int;
int useScissor = batch._params[paramOffset + 0]._int;
GLuint glmask = 0;
if (masks & Framebuffer::BUFFER_STENCIL) {
glClearStencil(stencil);
glmask |= GL_STENCIL_BUFFER_BIT;
}
if (masks & Framebuffer::BUFFER_DEPTH) {
glClearDepth(depth);
glmask |= GL_DEPTH_BUFFER_BIT;
}
std::vector<GLenum> drawBuffers;
if (masks & Framebuffer::BUFFER_COLORS) {
for (unsigned int i = 0; i < Framebuffer::MAX_NUM_RENDER_BUFFERS; i++) {
if (masks & (1 << i)) {
drawBuffers.push_back(GL_COLOR_ATTACHMENT0 + i);
}
}
if (!drawBuffers.empty()) {
glDrawBuffers(drawBuffers.size(), drawBuffers.data());
glClearColor(color.x, color.y, color.z, color.w);
glmask |= GL_COLOR_BUFFER_BIT;
}
// Force the color mask cache to WRITE_ALL if not the case
do_setStateColorWriteMask(State::ColorMask::WRITE_ALL);
}
// Apply scissor if needed and if not already on
bool doEnableScissor = (useScissor && (!_pipeline._stateCache.scissorEnable));
if (doEnableScissor) {
glEnable(GL_SCISSOR_TEST);
}
glClear(glmask);
// Restore scissor if needed
if (doEnableScissor) {
glDisable(GL_SCISSOR_TEST);
}
// Restore the color draw buffers only if a frmaebuffer is bound
if (_output._framebuffer && !drawBuffers.empty()) {
auto glFramebuffer = syncGPUObject(*_output._framebuffer);
if (glFramebuffer) {
glDrawBuffers(glFramebuffer->_colorBuffers.size(), glFramebuffer->_colorBuffers.data());
}
}
void GLBackend::resetStages() {
resetInputStage();
resetPipelineStage();
resetTransformStage();
resetUniformStage();
resetResourceStage();
resetOutputStage();
resetQueryStage();
(void) CHECK_GL_ERROR();
}
@ -320,211 +265,24 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
//#define DO_IT_NOW(call, offset) runLastCommand();
#define DO_IT_NOW(call, offset)
void Batch::_glEnable(GLenum cap) {
ADD_COMMAND_GL(glEnable);
_params.push_back(cap);
DO_IT_NOW(_glEnable, 1);
}
void GLBackend::do_glEnable(Batch& batch, uint32 paramOffset) {
glEnable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisable(GLenum cap) {
ADD_COMMAND_GL(glDisable);
_params.push_back(cap);
DO_IT_NOW(_glDisable, 1);
}
void GLBackend::do_glDisable(Batch& batch, uint32 paramOffset) {
glDisable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableClientState(GLenum array) {
ADD_COMMAND_GL(glEnableClientState);
_params.push_back(array);
DO_IT_NOW(_glEnableClientState, 1);
}
void GLBackend::do_glEnableClientState(Batch& batch, uint32 paramOffset) {
glEnableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableClientState(GLenum array) {
ADD_COMMAND_GL(glDisableClientState);
_params.push_back(array);
DO_IT_NOW(_glDisableClientState, 1);
}
void GLBackend::do_glDisableClientState(Batch& batch, uint32 paramOffset) {
glDisableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glCullFace(GLenum mode) {
ADD_COMMAND_GL(glCullFace);
_params.push_back(mode);
DO_IT_NOW(_glCullFace, 1);
}
void GLBackend::do_glCullFace(Batch& batch, uint32 paramOffset) {
glCullFace(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glAlphaFunc(GLenum func, GLclampf ref) {
ADD_COMMAND_GL(glAlphaFunc);
_params.push_back(ref);
_params.push_back(func);
DO_IT_NOW(_glAlphaFunc, 2);
}
void GLBackend::do_glAlphaFunc(Batch& batch, uint32 paramOffset) {
glAlphaFunc(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthFunc(GLenum func) {
ADD_COMMAND_GL(glDepthFunc);
_params.push_back(func);
DO_IT_NOW(_glDepthFunc, 1);
}
void GLBackend::do_glDepthFunc(Batch& batch, uint32 paramOffset) {
glDepthFunc(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthMask(GLboolean flag) {
ADD_COMMAND_GL(glDepthMask);
_params.push_back(flag);
DO_IT_NOW(_glDepthMask, 1);
}
void GLBackend::do_glDepthMask(Batch& batch, uint32 paramOffset) {
glDepthMask(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthRange(GLfloat zNear, GLfloat zFar) {
ADD_COMMAND_GL(glDepthRange);
_params.push_back(zFar);
_params.push_back(zNear);
DO_IT_NOW(_glDepthRange, 2);
}
void GLBackend::do_glDepthRange(Batch& batch, uint32 paramOffset) {
glDepthRange(
batch._params[paramOffset + 1]._float,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindBuffer(GLenum target, GLuint buffer) {
ADD_COMMAND_GL(glBindBuffer);
_params.push_back(buffer);
_params.push_back(target);
DO_IT_NOW(_glBindBuffer, 2);
}
void GLBackend::do_glBindBuffer(Batch& batch, uint32 paramOffset) {
glBindBuffer(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindTexture(GLenum target, GLuint texture) {
ADD_COMMAND_GL(glBindTexture);
void Batch::_glActiveBindTexture(GLenum unit, GLenum target, GLuint texture) {
ADD_COMMAND_GL(glActiveBindTexture);
_params.push_back(texture);
_params.push_back(target);
_params.push_back(unit);
DO_IT_NOW(_glBindTexture, 2);
DO_IT_NOW(_glActiveBindTexture, 3);
}
void GLBackend::do_glBindTexture(Batch& batch, uint32 paramOffset) {
void GLBackend::do_glActiveBindTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset + 2]._uint);
glBindTexture(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glActiveTexture(GLenum texture) {
ADD_COMMAND_GL(glActiveTexture);
_params.push_back(texture);
DO_IT_NOW(_glActiveTexture, 1);
}
void GLBackend::do_glActiveTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glTexParameteri(GLenum target, GLenum pname, GLint param) {
ADD_COMMAND_GL(glTexParameteri);
_params.push_back(param);
_params.push_back(pname);
_params.push_back(target);
DO_IT_NOW(glTexParameteri, 3);
}
void GLBackend::do_glTexParameteri(Batch& batch, uint32 paramOffset) {
glTexParameteri(batch._params[paramOffset + 2]._uint,
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._int);
(void) CHECK_GL_ERROR();
}
void Batch::_glDrawBuffers(GLsizei n, const GLenum* bufs) {
ADD_COMMAND_GL(glDrawBuffers);
_params.push_back(cacheData(n * sizeof(GLenum), bufs));
_params.push_back(n);
DO_IT_NOW(_glDrawBuffers, 2);
}
void GLBackend::do_glDrawBuffers(Batch& batch, uint32 paramOffset) {
glDrawBuffers(
batch._params[paramOffset + 1]._uint,
(const GLenum*)batch.editData(batch._params[paramOffset + 0]._uint));
(void) CHECK_GL_ERROR();
}
void Batch::_glUseProgram(GLuint program) {
ADD_COMMAND_GL(glUseProgram);
_params.push_back(program);
DO_IT_NOW(_glUseProgram, 1);
}
void GLBackend::do_glUseProgram(Batch& batch, uint32 paramOffset) {
_pipeline._program = batch._params[paramOffset]._uint;
// for this call we still want to execute the glUseProgram in the order of the glCOmmand to avoid any issue
_pipeline._invalidProgram = false;
glUseProgram(_pipeline._program);
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform1i(GLint location, GLint v0) {
if (location < 0) {
return;
@ -725,30 +483,6 @@ void GLBackend::do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glEnableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glEnableVertexAttribArray, 1);
}
void GLBackend::do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glEnableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glDisableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glDisableVertexAttribArray, 1);
}
void GLBackend::do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glDisableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glColor4f(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha) {
ADD_COMMAND_GL(glColor4f);

View file

@ -18,16 +18,21 @@
#include "GPUConfig.h"
#include "Context.h"
#include "Batch.h"
namespace gpu {
class GLBackend : public Backend {
public:
// Context Backend static interface required
friend class Context;
static void init();
static Backend* createBackend();
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings);
explicit GLBackend(bool syncCache);
GLBackend();
public:
virtual ~GLBackend();
virtual void render(Batch& batch);
@ -49,7 +54,6 @@ public:
static void checkGLStackStable(std::function<void()> f);
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());
class GLBuffer : public GPUObject {
@ -91,9 +95,9 @@ public:
#if (GPU_TRANSFORM_PROFILE == GPU_CORE)
#else
GLuint _transformObject_model = -1;
GLuint _transformCamera_viewInverse = -1;
GLuint _transformCamera_viewport = -1;
GLint _transformObject_model = -1;
GLint _transformCamera_viewInverse = -1;
GLint _transformCamera_viewport = -1;
#endif
GLShader();
@ -195,8 +199,17 @@ public:
static const int MAX_NUM_ATTRIBUTES = Stream::NUM_INPUT_SLOTS;
static const int MAX_NUM_INPUT_BUFFERS = 16;
uint32 getNumInputBuffers() const { return _input._buffersState.size(); }
uint32 getNumInputBuffers() const { return _input._invalidBuffers.size(); }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_UNIFORM_BUFFERS = 12;
uint32 getMaxNumUniformBuffers() const { return MAX_NUM_UNIFORM_BUFFERS; }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_RESOURCE_TEXTURES = 16;
uint32 getMaxNumResourceTextures() const { return MAX_NUM_RESOURCE_TEXTURES; }
// The State setters called by the GLState::Commands when a new state is assigned
void do_setStateFillMode(int32 mode);
@ -241,8 +254,6 @@ protected:
void do_drawInstanced(Batch& batch, uint32 paramOffset);
void do_drawIndexedInstanced(Batch& batch, uint32 paramOffset);
void do_clearFramebuffer(Batch& batch, uint32 paramOffset);
// Input Stage
void do_setInputFormat(Batch& batch, uint32 paramOffset);
void do_setInputBuffer(Batch& batch, uint32 paramOffset);
@ -252,12 +263,16 @@ protected:
void killInput();
void syncInputStateCache();
void updateInput();
void resetInputStage();
struct InputStageState {
bool _invalidFormat = true;
Stream::FormatPointer _format;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
BuffersState _buffersState;
BuffersState _invalidBuffers;
Buffers _buffers;
Offsets _bufferOffsets;
@ -268,23 +283,20 @@ protected:
Offset _indexBufferOffset;
Type _indexBufferType;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
GLuint _defaultVAO;
InputStageState() :
_invalidFormat(true),
_format(0),
_buffersState(0),
_buffers(_buffersState.size(), BufferPointer(0)),
_bufferOffsets(_buffersState.size(), 0),
_bufferStrides(_buffersState.size(), 0),
_bufferVBOs(_buffersState.size(), 0),
_attributeActivation(0),
_invalidBuffers(0),
_buffers(_invalidBuffers.size(), BufferPointer(0)),
_bufferOffsets(_invalidBuffers.size(), 0),
_bufferStrides(_invalidBuffers.size(), 0),
_bufferVBOs(_invalidBuffers.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0),
_indexBufferType(UINT32),
_attributeActivation(0),
_defaultVAO(0)
{}
} _input;
@ -300,6 +312,7 @@ protected:
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncTransformStateCache();
void updateTransform();
void resetTransformStage();
struct TransformStageState {
TransformObject _transformObject;
TransformCamera _transformCamera;
@ -332,12 +345,31 @@ protected:
// Uniform Stage
void do_setUniformBuffer(Batch& batch, uint32 paramOffset);
void releaseUniformBuffer(int slot);
void resetUniformStage();
struct UniformStageState {
Buffers _buffers;
UniformStageState():
_buffers(MAX_NUM_UNIFORM_BUFFERS, nullptr)
{}
} _uniform;
// Resource Stage
void do_setResourceTexture(Batch& batch, uint32 paramOffset);
struct UniformStageState {
};
void releaseResourceTexture(int slot);
void resetResourceStage();
struct ResourceStageState {
Textures _textures;
ResourceStageState():
_textures(MAX_NUM_RESOURCE_TEXTURES, nullptr)
{}
} _resource;
// Pipeline Stage
void do_setPipeline(Batch& batch, uint32 paramOffset);
void do_setStateBlendFactor(Batch& batch, uint32 paramOffset);
@ -351,6 +383,7 @@ protected:
void syncPipelineStateCache();
// Grab the actual gl state into it's gpu::State equivalent. THis is used by the above call syncPipleineStateCache()
void getCurrentGLState(State::Data& state);
void resetPipelineStage();
struct PipelineStageState {
@ -385,10 +418,12 @@ protected:
// Output stage
void do_setFramebuffer(Batch& batch, uint32 paramOffset);
void do_clearFramebuffer(Batch& batch, uint32 paramOffset);
void do_blit(Batch& batch, uint32 paramOffset);
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncOutputStateCache();
void resetOutputStage();
struct OutputStageState {
@ -403,31 +438,20 @@ protected:
void do_endQuery(Batch& batch, uint32 paramOffset);
void do_getQuery(Batch& batch, uint32 paramOffset);
void resetQueryStage();
struct QueryStageState {
};
// Reset stages
void do_resetStages(Batch& batch, uint32 paramOffset);
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void do_glEnable(Batch& batch, uint32 paramOffset);
void do_glDisable(Batch& batch, uint32 paramOffset);
void do_glActiveBindTexture(Batch& batch, uint32 paramOffset);
void do_glEnableClientState(Batch& batch, uint32 paramOffset);
void do_glDisableClientState(Batch& batch, uint32 paramOffset);
void do_glCullFace(Batch& batch, uint32 paramOffset);
void do_glAlphaFunc(Batch& batch, uint32 paramOffset);
void do_glDepthFunc(Batch& batch, uint32 paramOffset);
void do_glDepthMask(Batch& batch, uint32 paramOffset);
void do_glDepthRange(Batch& batch, uint32 paramOffset);
void do_glBindBuffer(Batch& batch, uint32 paramOffset);
void do_glBindTexture(Batch& batch, uint32 paramOffset);
void do_glActiveTexture(Batch& batch, uint32 paramOffset);
void do_glTexParameteri(Batch& batch, uint32 paramOffset);
void do_glDrawBuffers(Batch& batch, uint32 paramOffset);
void do_glUseProgram(Batch& batch, uint32 paramOffset);
void do_glUniform1i(Batch& batch, uint32 paramOffset);
void do_glUniform1f(Batch& batch, uint32 paramOffset);
void do_glUniform2f(Batch& batch, uint32 paramOffset);
@ -437,9 +461,6 @@ protected:
void do_glUniform4iv(Batch& batch, uint32 paramOffset);
void do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset);
void do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glColor4f(Batch& batch, uint32 paramOffset);
void do_glLineWidth(Batch& batch, uint32 paramOffset);

View file

@ -52,7 +52,7 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
}
if (isModified) {
_input._buffersState.set(channel);
_input._invalidBuffers.set(channel);
}
}
}
@ -154,7 +154,7 @@ void GLBackend::updateInput() {
_stats._ISNumFormatChanges++;
}
if (_input._buffersState.any()) {
if (_input._invalidBuffers.any()) {
int numBuffers = _input._buffers.size();
auto buffer = _input._buffers.data();
auto vbo = _input._bufferVBOs.data();
@ -162,7 +162,7 @@ void GLBackend::updateInput() {
auto stride = _input._bufferStrides.data();
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
if (_input._buffersState.test(bufferNum)) {
if (_input._invalidBuffers.test(bufferNum)) {
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
}
buffer++;
@ -170,11 +170,11 @@ void GLBackend::updateInput() {
offset++;
stride++;
}
_input._buffersState.reset();
_input._invalidBuffers.reset();
(void) CHECK_GL_ERROR();
}
#else
if (_input._invalidFormat || _input._buffersState.any()) {
if (_input._invalidFormat || _input._invalidBuffers.any()) {
if (_input._invalidFormat) {
InputStageState::ActivationCache newActivation;
@ -232,7 +232,7 @@ void GLBackend::updateInput() {
if ((channelIt).first < buffers.size()) {
int bufferNum = (channelIt).first;
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
if (_input._invalidBuffers.test(bufferNum) || _input._invalidFormat) {
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
GLuint vbo = _input._bufferVBOs[bufferNum];
if (boundVBO != vbo) {
@ -240,7 +240,7 @@ void GLBackend::updateInput() {
(void) CHECK_GL_ERROR();
boundVBO = vbo;
}
_input._buffersState[bufferNum] = false;
_input._invalidBuffers[bufferNum] = false;
for (unsigned int i = 0; i < channel._slots.size(); i++) {
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
@ -285,6 +285,51 @@ void GLBackend::updateInput() {
#endif
}
void GLBackend::resetInputStage() {
// Reset index buffer
_input._indexBufferType = UINT32;
_input._indexBufferOffset = 0;
_input._indexBuffer.reset();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
(void) CHECK_GL_ERROR();
#if defined(SUPPORT_VAO)
// TODO
#else
glBindBuffer(GL_ARRAY_BUFFER, 0);
size_t i = 0;
#if defined(SUPPORT_LEGACY_OPENGL)
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
glDisableClientState(attributeSlotToClassicAttribName[i]);
}
glVertexPointer(4, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, 0);
glColorPointer(4, GL_FLOAT, 0, 0);
glTexCoordPointer(4, GL_FLOAT, 0, 0);
#endif
for (; i < _input._attributeActivation.size(); i++) {
glDisableVertexAttribArray(i);
glVertexAttribPointer(i, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
#endif
// Reset vertex buffer and format
_input._format.reset();
_input._invalidFormat = false;
_input._attributeActivation.reset();
for (int i = 0; i < _input._buffers.size(); i++) {
_input._buffers[i].reset();
_input._bufferOffsets[i] = 0;
_input._bufferStrides[i] = 0;
_input._bufferVBOs[i] = 0;
}
_input._invalidBuffers.reset();
}
void GLBackend::do_setIndexBuffer(Batch& batch, uint32 paramOffset) {
_input._indexBufferType = (Type) batch._params[paramOffset + 2]._uint;

View file

@ -177,10 +177,16 @@ void GLBackend::syncOutputStateCache() {
_output._framebuffer.reset();
}
void GLBackend::resetOutputStage() {
if (_output._framebuffer) {
_output._framebuffer.reset();
_output._drawFBO = 0;
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
void GLBackend::do_setFramebuffer(Batch& batch, uint32 paramOffset) {
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
if (_output._framebuffer != framebuffer) {
auto newFBO = getFramebufferID(framebuffer);
if (_output._drawFBO != newFBO) {
@ -191,6 +197,86 @@ void GLBackend::do_setFramebuffer(Batch& batch, uint32 paramOffset) {
}
}
void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
uint32 masks = batch._params[paramOffset + 7]._uint;
Vec4 color;
color.x = batch._params[paramOffset + 6]._float;
color.y = batch._params[paramOffset + 5]._float;
color.z = batch._params[paramOffset + 4]._float;
color.w = batch._params[paramOffset + 3]._float;
float depth = batch._params[paramOffset + 2]._float;
int stencil = batch._params[paramOffset + 1]._int;
int useScissor = batch._params[paramOffset + 0]._int;
GLuint glmask = 0;
if (masks & Framebuffer::BUFFER_STENCIL) {
glClearStencil(stencil);
glmask |= GL_STENCIL_BUFFER_BIT;
// TODO: we will probably need to also check the write mask of stencil like we do
// for depth buffer, but as would say a famous Fez owner "We'll cross that bridge when we come to it"
}
bool restoreDepthMask = false;
if (masks & Framebuffer::BUFFER_DEPTH) {
glClearDepth(depth);
glmask |= GL_DEPTH_BUFFER_BIT;
bool cacheDepthMask = _pipeline._stateCache.depthTest.getWriteMask();
if (!cacheDepthMask) {
restoreDepthMask = true;
glDepthMask(GL_TRUE);
}
}
std::vector<GLenum> drawBuffers;
if (masks & Framebuffer::BUFFER_COLORS) {
for (unsigned int i = 0; i < Framebuffer::MAX_NUM_RENDER_BUFFERS; i++) {
if (masks & (1 << i)) {
drawBuffers.push_back(GL_COLOR_ATTACHMENT0 + i);
}
}
if (!drawBuffers.empty()) {
glDrawBuffers(drawBuffers.size(), drawBuffers.data());
glClearColor(color.x, color.y, color.z, color.w);
glmask |= GL_COLOR_BUFFER_BIT;
}
// Force the color mask cache to WRITE_ALL if not the case
do_setStateColorWriteMask(State::ColorMask::WRITE_ALL);
}
// Apply scissor if needed and if not already on
bool doEnableScissor = (useScissor && (!_pipeline._stateCache.scissorEnable));
if (doEnableScissor) {
glEnable(GL_SCISSOR_TEST);
}
// Clear!
glClear(glmask);
// Restore scissor if needed
if (doEnableScissor) {
glDisable(GL_SCISSOR_TEST);
}
// Restore write mask meaning turn back off
if (restoreDepthMask) {
glDepthMask(GL_FALSE);
}
// Restore the color draw buffers only if a frmaebuffer is bound
if (_output._framebuffer && !drawBuffers.empty()) {
auto glFramebuffer = syncGPUObject(*_output._framebuffer);
if (glFramebuffer) {
glDrawBuffers(glFramebuffer->_colorBuffers.size(), glFramebuffer->_colorBuffers.data());
}
}
(void) CHECK_GL_ERROR();
}
void GLBackend::do_blit(Batch& batch, uint32 paramOffset) {
auto srcframebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
Vec4i srcvp;
@ -203,19 +289,31 @@ void GLBackend::do_blit(Batch& batch, uint32 paramOffset) {
for (size_t i = 0; i < 4; ++i) {
dstvp[i] = batch._params[paramOffset + 6 + i]._int;
}
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, getFramebufferID(dstframebuffer));
// Assign dest framebuffer if not bound already
auto newDrawFBO = getFramebufferID(dstframebuffer);
if (_output._drawFBO != newDrawFBO) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, newDrawFBO);
}
// always bind the read fbo
glBindFramebuffer(GL_READ_FRAMEBUFFER, getFramebufferID(srcframebuffer));
// Blit!
glBlitFramebuffer(srcvp.x, srcvp.y, srcvp.z, srcvp.w,
dstvp.x, dstvp.y, dstvp.z, dstvp.w,
GL_COLOR_BUFFER_BIT, GL_LINEAR);
(void) CHECK_GL_ERROR();
if (_output._framebuffer) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, getFramebufferID(_output._framebuffer));
// Always clean the read fbo to 0
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
// Restore draw fbo if changed
if (_output._drawFBO != newDrawFBO) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, _output._drawFBO);
}
}
(void) CHECK_GL_ERROR();
}
void GLBackend::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
auto readFBO = gpu::GLBackend::getFramebufferID(srcFramebuffer);

View file

@ -164,15 +164,74 @@ void GLBackend::updatePipeline() {
#endif
}
void GLBackend::resetPipelineStage() {
// First reset State to default
State::Signature resetSignature(0);
resetPipelineState(resetSignature);
_pipeline._state = nullptr;
_pipeline._invalidState = false;
// Second the shader side
_pipeline._invalidProgram = false;
_pipeline._program = 0;
_pipeline._pipeline.reset();
glUseProgram(0);
}
void GLBackend::releaseUniformBuffer(int slot) {
#if (GPU_FEATURE_PROFILE == GPU_CORE)
auto& buf = _uniform._buffers[slot];
if (buf) {
auto* object = Backend::getGPUObject<GLBackend::GLBuffer>(*buf);
if (object) {
GLuint bo = object->_buffer;
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
buf.reset();
}
#endif
}
void GLBackend::resetUniformStage() {
for (int i = 0; i < _uniform._buffers.size(); i++) {
releaseUniformBuffer(i);
}
}
void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 3]._uint;
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
#if (GPU_FEATURE_PROFILE == GPU_CORE)
GLuint bo = getBufferID(*uniformBuffer);
glBindBufferRange(GL_UNIFORM_BUFFER, slot, bo, rangeStart, rangeSize);
if (!uniformBuffer) {
releaseUniformBuffer(slot);
return;
}
// check cache before thinking
if (_uniform._buffers[slot] == uniformBuffer) {
return;
}
// Sync BufferObject
auto* object = GLBackend::syncGPUObject(*uniformBuffer);
if (object) {
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, rangeStart, rangeSize);
_uniform._buffers[slot] = uniformBuffer;
(void) CHECK_GL_ERROR();
} else {
releaseResourceTexture(slot);
return;
}
#else
// because we rely on the program uniform mechanism we need to have
// the program bound, thank you MacOSX Legacy profile.
@ -184,19 +243,49 @@ void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
// NOT working so we ll stick to the uniform float array until we move to core profile
// GLuint bo = getBufferID(*uniformBuffer);
//glUniformBufferEXT(_shader._program, slot, bo);
#endif
(void) CHECK_GL_ERROR();
#endif
}
void GLBackend::releaseResourceTexture(int slot) {
auto& tex = _resource._textures[slot];
if (tex) {
auto* object = Backend::getGPUObject<GLBackend::GLTexture>(*tex);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(target, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
tex.reset();
}
}
void GLBackend::resetResourceStage() {
for (int i = 0; i < _resource._textures.size(); i++) {
releaseResourceTexture(i);
}
}
void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
TexturePointer uniformTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
if (!uniformTexture) {
if (!resourceTexture) {
releaseResourceTexture(slot);
return;
}
// check cache before thinking
if (_resource._textures[slot] == resourceTexture) {
return;
}
GLTexture* object = GLBackend::syncGPUObject(*uniformTexture);
// Always make sure the GLObject is in sync
GLTexture* object = GLBackend::syncGPUObject(*resourceTexture);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
@ -205,7 +294,10 @@ void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
_resource._textures[slot] = resourceTexture;
} else {
releaseResourceTexture(slot);
return;
}
}

View file

@ -8,7 +8,6 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GPULogging.h"
#include "GLBackendShared.h"
@ -104,3 +103,7 @@ void GLBackend::do_getQuery(Batch& batch, uint32 paramOffset) {
(void)CHECK_GL_ERROR();
}
}
void GLBackend::resetQueryStage() {
}

View file

@ -8,7 +8,6 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GPULogging.h"
#include "GLBackendShared.h"
#include "Format.h"
@ -542,7 +541,12 @@ ElementResource getFormatFromGLUniform(GLenum gltype) {
};
int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& uniforms, Shader::SlotSet& textures, Shader::SlotSet& samplers) {
int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings,
Shader::SlotSet& uniforms, Shader::SlotSet& textures, Shader::SlotSet& samplers
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
, Shader::SlotSet& fakeBuffers
#endif
) {
GLint uniformsCount = 0;
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
@ -583,6 +587,15 @@ int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, S
}
if (elementResource._resource == Resource::BUFFER) {
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
// if in legacy profile, we fake the uniform buffer with an array
// this is where we detect it assuming it's explicitely assinged a binding
auto requestedBinding = slotBindings.find(std::string(sname));
if (requestedBinding != slotBindings.end()) {
// found one buffer!
fakeBuffers.insert(Shader::Slot(sname, location, elementResource._element, elementResource._resource));
}
#endif
uniforms.insert(Shader::Slot(sname, location, elementResource._element, elementResource._resource));
} else {
// For texture/Sampler, the location is the actual binding value
@ -640,14 +653,13 @@ int makeUniformBlockSlots(GLuint glprogram, const Shader::BindingSet& slotBindin
GLchar name[NAME_LENGTH];
GLint length = 0;
GLint size = 0;
GLenum type = 0;
GLint binding = -1;
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_NAME_LENGTH, &length);
glGetActiveUniformBlockName(glprogram, i, NAME_LENGTH, &length, name);
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_BINDING, &binding);
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_DATA_SIZE, &size);
GLuint blockIndex = glGetUniformBlockIndex(glprogram, name);
// CHeck if there is a requested binding for this block
@ -738,8 +750,12 @@ bool GLBackend::makeProgram(Shader& shader, const Shader::BindingSet& slotBindin
Shader::SlotSet uniforms;
Shader::SlotSet textures;
Shader::SlotSet samplers;
#if (GPU_FEATURE_PROFILE == GPU_CORE)
makeUniformSlots(object->_program, slotBindings, uniforms, textures, samplers);
#else
makeUniformSlots(object->_program, slotBindings, uniforms, textures, samplers, buffers);
#endif
Shader::SlotSet inputs;
makeInputSlots(object->_program, slotBindings, inputs);

View file

@ -11,11 +11,10 @@
#ifndef hifi_gpu_GLBackend_Shared_h
#define hifi_gpu_GLBackend_Shared_h
#include "GLBackend.h"
#include <QDebug>
#include "Batch.h"
#include "GPULogging.h"
#include "GLBackend.h"
static const GLenum _primitiveToGLmode[gpu::NUM_PRIMITIVES] = {
GL_POINTS,

View file

@ -482,9 +482,15 @@ void GLBackend::syncPipelineStateCache() {
State::Data state;
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS);
// Point size is always on
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glEnable(GL_PROGRAM_POINT_SIZE_EXT);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
getCurrentGLState(state);
State::Signature signature = State::evalSignature(state);
_pipeline._stateCache = state;
_pipeline._stateSignatureCache = signature;
}

View file

@ -184,4 +184,6 @@ void GLBackend::updateTransform() {
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
}
void GLBackend::resetTransformStage() {
}

View file

@ -34,11 +34,11 @@
#elif defined(ANDROID)
#else
#include <GL/gl.h>
#include <GL/glext.h>
#define GPU_FEATURE_PROFILE GPU_LEGACY
#define GPU_TRANSFORM_PROFILE GPU_LEGACY
#include <GL/glew.h>
#define GPU_FEATURE_PROFILE GPU_CORE
#define GPU_TRANSFORM_PROFILE GPU_CORE
#endif

View file

@ -13,13 +13,13 @@
#include <bitset>
#include <map>
#include <qurl.h>
#include <glm/glm.hpp>
#include "gpu/Resource.h"
#include "gpu/Texture.h"
#include <qurl.h>
namespace model {

View file

@ -32,7 +32,7 @@ std::unique_ptr<NLPacket> NLPacket::create(PacketType type, qint64 size, bool is
return packet;
}
std::unique_ptr<NLPacket> NLPacket::fromReceivedPacket(std::unique_ptr<char> data, qint64 size,
std::unique_ptr<NLPacket> NLPacket::fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size,
const HifiSockAddr& senderSockAddr) {
// Fail with null data
Q_ASSERT(data);
@ -87,7 +87,7 @@ NLPacket::NLPacket(const NLPacket& other) : Packet(other) {
_sourceID = other._sourceID;
}
NLPacket::NLPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr) :
NLPacket::NLPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr) :
Packet(std::move(data), size, senderSockAddr)
{
// sanity check before we decrease the payloadSize with the payloadCapacity

View file

@ -20,7 +20,6 @@
class NLPacket : public udt::Packet {
Q_OBJECT
public:
// this is used by the Octree classes - must be known at compile time
static const int MAX_PACKET_HEADER_SIZE =
sizeof(udt::Packet::SequenceNumberAndBitField) + sizeof(udt::Packet::MessageNumberAndBitField) +
@ -29,7 +28,7 @@ public:
static std::unique_ptr<NLPacket> create(PacketType type, qint64 size = -1,
bool isReliable = false, bool isPartOfMessage = false);
static std::unique_ptr<NLPacket> fromReceivedPacket(std::unique_ptr<char> data, qint64 size,
static std::unique_ptr<NLPacket> fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size,
const HifiSockAddr& senderSockAddr);
static std::unique_ptr<NLPacket> fromBase(std::unique_ptr<Packet> packet);
@ -63,7 +62,7 @@ public:
protected:
NLPacket(PacketType type, qint64 size = -1, bool forceReliable = false, bool isPartOfMessage = false);
NLPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr);
NLPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr);
NLPacket(std::unique_ptr<Packet> packet);
NLPacket(const NLPacket& other);
NLPacket(NLPacket&& other);

View file

@ -33,7 +33,7 @@ std::unique_ptr<BasePacket> BasePacket::create(qint64 size) {
return packet;
}
std::unique_ptr<BasePacket> BasePacket::fromReceivedPacket(std::unique_ptr<char> data,
std::unique_ptr<BasePacket> BasePacket::fromReceivedPacket(std::unique_ptr<char[]> data,
qint64 size, const HifiSockAddr& senderSockAddr) {
// Fail with invalid size
Q_ASSERT(size >= 0);
@ -64,7 +64,7 @@ BasePacket::BasePacket(qint64 size) {
_payloadStart = _packet.get();
}
BasePacket::BasePacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr) :
BasePacket::BasePacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr) :
_packetSize(size),
_packet(std::move(data)),
_payloadStart(_packet.get()),
@ -83,7 +83,7 @@ BasePacket::BasePacket(const BasePacket& other) :
BasePacket& BasePacket::operator=(const BasePacket& other) {
_packetSize = other._packetSize;
_packet = std::unique_ptr<char>(new char[_packetSize]);
_packet = std::unique_ptr<char[]>(new char[_packetSize]);
memcpy(_packet.get(), other._packet.get(), _packetSize);
_payloadStart = _packet.get() + (other._payloadStart - other._packet.get());

View file

@ -27,7 +27,7 @@ public:
static const qint64 PACKET_WRITE_ERROR;
static std::unique_ptr<BasePacket> create(qint64 size = -1);
static std::unique_ptr<BasePacket> fromReceivedPacket(std::unique_ptr<char> data, qint64 size,
static std::unique_ptr<BasePacket> fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size,
const HifiSockAddr& senderSockAddr);
// Current level's header size
@ -78,7 +78,7 @@ public:
protected:
BasePacket(qint64 size);
BasePacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr);
BasePacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr);
BasePacket(const BasePacket& other);
BasePacket& operator=(const BasePacket& other);
BasePacket(BasePacket&& other);
@ -91,7 +91,7 @@ protected:
void adjustPayloadStartAndCapacity(qint64 headerSize, bool shouldDecreasePayloadSize = false);
qint64 _packetSize = 0; // Total size of the allocated memory
std::unique_ptr<char> _packet; // Allocated memory
std::unique_ptr<char[]> _packet; // Allocated memory
char* _payloadStart = nullptr; // Start of the payload
qint64 _payloadCapacity = 0; // Total capacity of the payload

View file

@ -25,7 +25,7 @@ int ControlPacket::maxPayloadSize() {
return BasePacket::maxPayloadSize() - ControlPacket::localHeaderSize();
}
std::unique_ptr<ControlPacket> ControlPacket::fromReceivedPacket(std::unique_ptr<char> data, qint64 size,
std::unique_ptr<ControlPacket> ControlPacket::fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size,
const HifiSockAddr &senderSockAddr) {
// Fail with null data
Q_ASSERT(data);
@ -56,7 +56,7 @@ ControlPacket::ControlPacket(Type type, qint64 size) :
writeType();
}
ControlPacket::ControlPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr) :
ControlPacket::ControlPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr) :
BasePacket(std::move(data), size, senderSockAddr)
{
// sanity check before we decrease the payloadSize with the payloadCapacity

View file

@ -34,7 +34,7 @@ public:
};
static std::unique_ptr<ControlPacket> create(Type type, qint64 size = -1);
static std::unique_ptr<ControlPacket> fromReceivedPacket(std::unique_ptr<char> data, qint64 size,
static std::unique_ptr<ControlPacket> fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size,
const HifiSockAddr& senderSockAddr);
// Current level's header size
static int localHeaderSize();
@ -48,7 +48,7 @@ public:
private:
ControlPacket(Type type, qint64 size = -1);
ControlPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr);
ControlPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr);
ControlPacket(ControlPacket&& other);
ControlPacket(const ControlPacket& other) = delete;

View file

@ -35,7 +35,7 @@ std::unique_ptr<Packet> Packet::create(qint64 size, bool isReliable, bool isPart
return packet;
}
std::unique_ptr<Packet> Packet::fromReceivedPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr) {
std::unique_ptr<Packet> Packet::fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr) {
// Fail with invalid size
Q_ASSERT(size >= 0);
@ -62,7 +62,7 @@ Packet::Packet(qint64 size, bool isReliable, bool isPartOfMessage) :
writeHeader();
}
Packet::Packet(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr) :
Packet::Packet(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr) :
BasePacket(std::move(data), size, senderSockAddr)
{
readHeader();

View file

@ -33,7 +33,7 @@ public:
using MessageNumberAndBitField = uint32_t;
static std::unique_ptr<Packet> create(qint64 size = -1, bool isReliable = false, bool isPartOfMessage = false);
static std::unique_ptr<Packet> fromReceivedPacket(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr);
static std::unique_ptr<Packet> fromReceivedPacket(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr);
// Provided for convenience, try to limit use
static std::unique_ptr<Packet> createCopy(const Packet& other);
@ -53,7 +53,7 @@ public:
protected:
Packet(qint64 size, bool isReliable = false, bool isPartOfMessage = false);
Packet(std::unique_ptr<char> data, qint64 size, const HifiSockAddr& senderSockAddr);
Packet(std::unique_ptr<char[]> data, qint64 size, const HifiSockAddr& senderSockAddr);
Packet(const Packet& other);
Packet(Packet&& other);

View file

@ -135,7 +135,7 @@ void Socket::readPendingDatagrams() {
// setup a buffer to read the packet into
int packetSizeWithHeader = _udpSocket.pendingDatagramSize();
std::unique_ptr<char> buffer = std::unique_ptr<char>(new char[packetSizeWithHeader]);
auto buffer = std::unique_ptr<char[]>(new char[packetSizeWithHeader]);
// pull the datagram
_udpSocket.readDatagram(buffer.get(), packetSizeWithHeader,

View file

@ -0,0 +1,251 @@
//
// AmbientOcclusionEffect.cpp
// libraries/render-utils/src/
//
// Created by Niraj Venkat on 7/15/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <glm/gtc/random.hpp>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <gpu/Context.h>
#include "gpu/StandardShaderLib.h"
#include "AmbientOcclusionEffect.h"
#include "TextureCache.h"
#include "FramebufferCache.h"
#include "DependencyManager.h"
#include "ViewFrustum.h"
#include "GeometryCache.h"
#include "ambient_occlusion_vert.h"
#include "ambient_occlusion_frag.h"
#include "gaussian_blur_vertical_vert.h"
#include "gaussian_blur_horizontal_vert.h"
#include "gaussian_blur_frag.h"
#include "occlusion_blend_frag.h"
AmbientOcclusion::AmbientOcclusion() {
}
const gpu::PipelinePointer& AmbientOcclusion::getOcclusionPipeline() {
if (!_occlusionPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(ambient_occlusion_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(ambient_occlusion_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("depthTexture"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("normalTexture"), 1));
gpu::Shader::makeProgram(*program, slotBindings);
_gScaleLoc = program->getUniforms().findLocation("g_scale");
_gBiasLoc = program->getUniforms().findLocation("g_bias");
_gSampleRadiusLoc = program->getUniforms().findLocation("g_sample_rad");
_gIntensityLoc = program->getUniforms().findLocation("g_intensity");
_bufferWidthLoc = program->getUniforms().findLocation("bufferWidth");
_bufferHeightLoc = program->getUniforms().findLocation("bufferHeight");
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
// Link the occlusion FBO to texture
_occlusionBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _occlusionBuffer->getWidth();
auto height = _occlusionBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_occlusionTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
// Good to go add the brand new pipeline
_occlusionPipeline.reset(gpu::Pipeline::create(program, state));
}
return _occlusionPipeline;
}
const gpu::PipelinePointer& AmbientOcclusion::getVBlurPipeline() {
if (!_vBlurPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(gaussian_blur_vertical_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(gaussian_blur_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
// Link the horizontal blur FBO to texture
_vBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _vBlurBuffer->getWidth();
auto height = _vBlurBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_vBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
// Good to go add the brand new pipeline
_vBlurPipeline.reset(gpu::Pipeline::create(program, state));
}
return _vBlurPipeline;
}
const gpu::PipelinePointer& AmbientOcclusion::getHBlurPipeline() {
if (!_hBlurPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(gaussian_blur_horizontal_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(gaussian_blur_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(false,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
// Link the horizontal blur FBO to texture
_hBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _hBlurBuffer->getWidth();
auto height = _hBlurBuffer->getHeight();
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
_hBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
// Good to go add the brand new pipeline
_hBlurPipeline.reset(gpu::Pipeline::create(program, state));
}
return _hBlurPipeline;
}
const gpu::PipelinePointer& AmbientOcclusion::getBlendPipeline() {
if (!_blendPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(ambient_occlusion_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(occlusion_blend_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("blurredOcclusionTexture"), 0));
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(true,
gpu::State::INV_SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::SRC_ALPHA);
// Good to go add the brand new pipeline
_blendPipeline.reset(gpu::Pipeline::create(program, state));
}
return _blendPipeline;
}
void AmbientOcclusion::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);
RenderArgs* args = renderContext->args;
gpu::Batch batch;
glm::mat4 projMat;
Transform viewMat;
args->_viewFrustum->evalProjectionMatrix(projMat);
args->_viewFrustum->evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
// Occlusion step
getOcclusionPipeline();
batch.setResourceTexture(0, DependencyManager::get<FramebufferCache>()->getPrimaryDepthTexture());
batch.setResourceTexture(1, DependencyManager::get<FramebufferCache>()->getPrimaryNormalTexture());
_occlusionBuffer->setRenderBuffer(0, _occlusionTexture);
batch.setFramebuffer(_occlusionBuffer);
// Occlusion uniforms
g_scale = 1.0f;
g_bias = 1.0f;
g_sample_rad = 1.0f;
g_intensity = 1.0f;
// Bind the first gpu::Pipeline we need - for calculating occlusion buffer
batch.setPipeline(getOcclusionPipeline());
batch._glUniform1f(_gScaleLoc, g_scale);
batch._glUniform1f(_gBiasLoc, g_bias);
batch._glUniform1f(_gSampleRadiusLoc, g_sample_rad);
batch._glUniform1f(_gIntensityLoc, g_intensity);
batch._glUniform1f(_bufferWidthLoc, DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width());
batch._glUniform1f(_bufferHeightLoc, DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height());
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
glm::vec2 bottomLeft(-1.0f, -1.0f);
glm::vec2 topRight(1.0f, 1.0f);
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Vertical blur step
getVBlurPipeline();
batch.setResourceTexture(0, _occlusionTexture);
_vBlurBuffer->setRenderBuffer(0, _vBlurTexture);
batch.setFramebuffer(_vBlurBuffer);
// Bind the second gpu::Pipeline we need - for calculating blur buffer
batch.setPipeline(getVBlurPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Horizontal blur step
getHBlurPipeline();
batch.setResourceTexture(0, _vBlurTexture);
_hBlurBuffer->setRenderBuffer(0, _hBlurTexture);
batch.setFramebuffer(_hBlurBuffer);
// Bind the third gpu::Pipeline we need - for calculating blur buffer
batch.setPipeline(getHBlurPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Blend step
getBlendPipeline();
batch.setResourceTexture(0, _hBlurTexture);
batch.setFramebuffer(DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer());
// Bind the fourth gpu::Pipeline we need - for blending the primary color buffer with blurred occlusion texture
batch.setPipeline(getBlendPipeline());
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
// Ready to render
args->_context->syncCache();
args->_context->render((batch));
}

View file

@ -0,0 +1,61 @@
//
// AmbientOcclusionEffect.h
// libraries/render-utils/src/
//
// Created by Niraj Venkat on 7/15/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AmbientOcclusionEffect_h
#define hifi_AmbientOcclusionEffect_h
#include <DependencyManager.h>
#include "render/DrawTask.h"
class AmbientOcclusion {
public:
AmbientOcclusion();
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
typedef render::Job::Model<AmbientOcclusion> JobModel;
const gpu::PipelinePointer& getOcclusionPipeline();
const gpu::PipelinePointer& getHBlurPipeline();
const gpu::PipelinePointer& getVBlurPipeline();
const gpu::PipelinePointer& getBlendPipeline();
private:
// Uniforms for AO
gpu::int32 _gScaleLoc;
gpu::int32 _gBiasLoc;
gpu::int32 _gSampleRadiusLoc;
gpu::int32 _gIntensityLoc;
gpu::int32 _bufferWidthLoc;
gpu::int32 _bufferHeightLoc;
float g_scale;
float g_bias;
float g_sample_rad;
float g_intensity;
gpu::PipelinePointer _occlusionPipeline;
gpu::PipelinePointer _hBlurPipeline;
gpu::PipelinePointer _vBlurPipeline;
gpu::PipelinePointer _blendPipeline;
gpu::FramebufferPointer _occlusionBuffer;
gpu::FramebufferPointer _hBlurBuffer;
gpu::FramebufferPointer _vBlurBuffer;
gpu::TexturePointer _occlusionTexture;
gpu::TexturePointer _hBlurTexture;
gpu::TexturePointer _vBlurTexture;
};
#endif // hifi_AmbientOcclusionEffect_h

View file

@ -12,12 +12,12 @@
#include "DeferredLightingEffect.h"
#include <GLMHelpers.h>
#include <gpu/GPUConfig.h>
#include <PathUtils.h>
#include <ViewFrustum.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
#include <PathUtils.h>
#include <ViewFrustum.h>
#include "AbstractViewStateInterface.h"
#include "GeometryCache.h"
@ -291,7 +291,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
locations = &_directionalAmbientSphereLightCascadedShadowMapLocations;
}
batch.setPipeline(program);
batch._glUniform3fv(locations->shadowDistances, 1, (const GLfloat*) &_viewState->getShadowDistances());
batch._glUniform3fv(locations->shadowDistances, 1, (const float*) &_viewState->getShadowDistances());
} else {
if (useSkyboxCubemap) {
@ -325,7 +325,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
sh = (*_skybox->getCubemap()->getIrradiance());
}
for (int i =0; i <gpu::SphericalHarmonics::NUM_COEFFICIENTS; i++) {
batch._glUniform4fv(locations->ambientSphere + i, 1, (const GLfloat*) (&sh) + i * 4);
batch._glUniform4fv(locations->ambientSphere + i, 1, (const float*) (&sh) + i * 4);
}
}
@ -340,7 +340,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
if (_atmosphere && (locations->atmosphereBufferUnit >= 0)) {
batch.setUniformBuffer(locations->atmosphereBufferUnit, _atmosphere->getDataBuffer());
}
batch._glUniformMatrix4fv(locations->invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(locations->invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
}
float left, right, bottom, top, nearVal, farVal;
@ -419,9 +419,9 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch._glUniform2f(_pointLightLocations.depthTexCoordOffset, depthTexCoordOffsetS, depthTexCoordOffsetT);
batch._glUniform2f(_pointLightLocations.depthTexCoordScale, depthTexCoordScaleS, depthTexCoordScaleT);
batch._glUniformMatrix4fv(_pointLightLocations.invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(_pointLightLocations.invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
batch._glUniformMatrix4fv(_pointLightLocations.texcoordMat, 1, false, reinterpret_cast< const GLfloat* >(&texcoordMat));
batch._glUniformMatrix4fv(_pointLightLocations.texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
for (auto lightID : _pointLights) {
auto& light = _allocatedLights[lightID];
@ -467,9 +467,9 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch._glUniform2f(_spotLightLocations.depthTexCoordOffset, depthTexCoordOffsetS, depthTexCoordOffsetT);
batch._glUniform2f(_spotLightLocations.depthTexCoordScale, depthTexCoordScaleS, depthTexCoordScaleT);
batch._glUniformMatrix4fv(_spotLightLocations.invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(_spotLightLocations.invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
batch._glUniformMatrix4fv(_spotLightLocations.texcoordMat, 1, false, reinterpret_cast< const GLfloat* >(&texcoordMat));
batch._glUniformMatrix4fv(_spotLightLocations.texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
for (auto lightID : _spotLights) {
auto light = _allocatedLights[lightID];
@ -489,7 +489,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
if ((eyeHalfPlaneDistance > -nearRadius) &&
(glm::distance(eyePoint, glm::vec3(light->getPosition())) < expandedRadius + nearRadius)) {
coneParam.w = 0.0f;
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const GLfloat* >(&coneParam));
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const float* >(&coneParam));
Transform model;
model.setTranslation(glm::vec3(0.0f, 0.0f, -1.0f));
@ -509,7 +509,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setViewTransform(viewMat);
} else {
coneParam.w = 1.0f;
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const GLfloat* >(&coneParam));
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const float* >(&coneParam));
Transform model;
model.setTranslation(light->getPosition());
@ -595,9 +595,9 @@ void DeferredLightingEffect::loadLightProgram(const char* vertSource, const char
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), 3));
slotBindings.insert(gpu::Shader::Binding(std::string("shadowMap"), 4));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), 5));
const GLint LIGHT_GPU_SLOT = 3;
const int LIGHT_GPU_SLOT = 3;
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), LIGHT_GPU_SLOT));
const GLint ATMOSPHERE_GPU_SLOT = 4;
const int ATMOSPHERE_GPU_SLOT = 4;
slotBindings.insert(gpu::Shader::Binding(std::string("atmosphereBufferUnit"), ATMOSPHERE_GPU_SLOT));
gpu::Shader::makeProgram(*program, slotBindings);
@ -614,13 +614,8 @@ void DeferredLightingEffect::loadLightProgram(const char* vertSource, const char
locations.texcoordMat = program->getUniforms().findLocation("texcoordMat");
locations.coneParam = program->getUniforms().findLocation("coneParam");
#if (GPU_FEATURE_PROFILE == GPU_CORE)
locations.lightBufferUnit = program->getBuffers().findLocation("lightBuffer");
locations.atmosphereBufferUnit = program->getBuffers().findLocation("atmosphereBufferUnit");
#else
locations.lightBufferUnit = program->getUniforms().findLocation("lightBuffer");
locations.atmosphereBufferUnit = program->getUniforms().findLocation("atmosphereBufferUnit");
#endif
auto state = std::make_shared<gpu::State>();
if (lightVolume) {
@ -677,10 +672,10 @@ model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {
int ringFloatOffset = slices * 3;
GLfloat* vertexData = new GLfloat[verticesSize];
GLfloat* vertexRing0 = vertexData;
GLfloat* vertexRing1 = vertexRing0 + ringFloatOffset;
GLfloat* vertexRing2 = vertexRing1 + ringFloatOffset;
float* vertexData = new float[verticesSize];
float* vertexRing0 = vertexData;
float* vertexRing1 = vertexRing0 + ringFloatOffset;
float* vertexRing2 = vertexRing1 + ringFloatOffset;
for (int i = 0; i < slices; i++) {
float theta = TWO_PI * i / slices;
@ -746,7 +741,7 @@ model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {
*(index++) = capVertex;
}
_spotLightMesh->setIndexBuffer(gpu::BufferView(new gpu::Buffer(sizeof(GLushort) * indices, (gpu::Byte*) indexData), gpu::Element::INDEX_UINT16));
_spotLightMesh->setIndexBuffer(gpu::BufferView(new gpu::Buffer(sizeof(unsigned short) * indices, (gpu::Byte*) indexData), gpu::Element::INDEX_UINT16));
delete[] indexData;
model::Mesh::Part part(0, indices, 0, model::Mesh::TRIANGLES);

Some files were not shown because too many files have changed in this diff Show more