mirror of
https://github.com/overte-org/overte.git
synced 2025-04-13 23:27:34 +02:00
Merge howard/rig into andrew/caesium
This commit is contained in:
commit
2c66af67e9
51 changed files with 2570 additions and 314 deletions
100
examples/afk.js
Normal file
100
examples/afk.js
Normal file
|
@ -0,0 +1,100 @@
|
|||
//
|
||||
// #20485: AFK - Away From Keyboard Setting
|
||||
// *****************************************
|
||||
//
|
||||
// Created by Kevin M. Thomas and Thoys 07/16/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
// kevintown.net
|
||||
//
|
||||
// JavaScript for the High Fidelity interface that creates an away from keyboard functionality by providing a UI and keyPressEvent which will mute toggle the connected microphone, face tracking dde and set the avatar to a hand raise pose.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
var originalOutputDevice;
|
||||
var originalName;
|
||||
var muted = false;
|
||||
var wasAudioEnabled;
|
||||
var afkText = "AFK - I Will Return!\n";
|
||||
|
||||
// Set up toggleMuteButton text overlay.
|
||||
var toggleMuteButton = Overlays.addOverlay("text", {
|
||||
x: 10,
|
||||
y: 275,
|
||||
width: 60,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8
|
||||
});
|
||||
|
||||
// Function that overlays text upon state change.
|
||||
function onMuteStateChanged() {
|
||||
Overlays.editOverlay(toggleMuteButton, muted ? {text: "Go Live", leftMargin: 5} : {text: "Go AFK", leftMargin: 5});
|
||||
}
|
||||
|
||||
function toggleMute() {
|
||||
if (!muted) {
|
||||
if (!AudioDevice.getMuted()) {
|
||||
AudioDevice.toggleMute();
|
||||
}
|
||||
originalOutputDevice = AudioDevice.getOutputDevice();
|
||||
Menu.setIsOptionChecked("Mute Face Tracking", true);
|
||||
originalName = MyAvatar.displayName;
|
||||
AudioDevice.setOutputDevice("none");
|
||||
MyAvatar.displayName = afkText + MyAvatar.displayName;
|
||||
MyAvatar.setJointData("LeftShoulder", Quat.fromPitchYawRollDegrees(0, 180, 0));
|
||||
MyAvatar.setJointData("RightShoulder", Quat.fromPitchYawRollDegrees(0, 180, 0));
|
||||
} else {
|
||||
if (AudioDevice.getMuted()) {
|
||||
AudioDevice.toggleMute();
|
||||
}
|
||||
AudioDevice.setOutputDevice(originalOutputDevice);
|
||||
Menu.setIsOptionChecked("Mute Face Tracking", false);
|
||||
MyAvatar.setJointData("LeftShoulder", Quat.fromPitchYawRollDegrees(0, 0, 0));
|
||||
MyAvatar.setJointData("RightShoulder", Quat.fromPitchYawRollDegrees(0, 0, 0));
|
||||
MyAvatar.clearJointData("LeftShoulder");
|
||||
MyAvatar.clearJointData("RightShoulder");
|
||||
MyAvatar.displayName = originalName;
|
||||
}
|
||||
muted = !muted;
|
||||
onMuteStateChanged();
|
||||
}
|
||||
|
||||
// Function that adds mousePressEvent functionality to toggle mic mute, AFK message above display name and toggle avatar arms upward.
|
||||
function mousePressEvent(event) {
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleMuteButton) {
|
||||
toggleMute();
|
||||
}
|
||||
}
|
||||
|
||||
// Call functions.
|
||||
onMuteStateChanged();
|
||||
|
||||
//AudioDevice.muteToggled.connect(onMuteStateChanged);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
|
||||
// Function that adds keyPressEvent functionality to toggle mic mute, AFK message above display name and toggle avatar arms upward.
|
||||
Controller.keyPressEvent.connect(function(event) {
|
||||
if (event.text == "y") {
|
||||
toggleMute();
|
||||
}
|
||||
});
|
||||
|
||||
// Function that sets a timeout value of 1 second so that the display name does not get overwritten in the event of a crash.
|
||||
Script.setTimeout(function() {
|
||||
MyAvatar.displayName = MyAvatar.displayName.replace(afkText, "");
|
||||
}, 1000);
|
||||
|
||||
// Function that calls upon exit to restore avatar display name to original state.
|
||||
Script.scriptEnding.connect(function(){
|
||||
if (muted) {
|
||||
AudioDevice.setOutputDevice(originalOutputDevice);
|
||||
Overlays.deleteOverlay(toggleMuteButton);
|
||||
MyAvatar.displayName = originalName;
|
||||
}
|
||||
Overlays.deleteOverlay(toggleMuteButton);
|
||||
});
|
|
@ -329,7 +329,7 @@ var toolBar = (function () {
|
|||
|
||||
Script.setTimeout(resize, RESIZE_INTERVAL);
|
||||
} else {
|
||||
print("Can't add model: Model would be out of bounds.");
|
||||
Window.alert("Can't add model: Model would be out of bounds.");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -374,7 +374,7 @@ var toolBar = (function () {
|
|||
|
||||
});
|
||||
} else {
|
||||
print("Can't create box: Box would be out of bounds.");
|
||||
Window.alert("Can't create box: Box would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -390,7 +390,7 @@ var toolBar = (function () {
|
|||
color: { red: 255, green: 0, blue: 0 }
|
||||
});
|
||||
} else {
|
||||
print("Can't create sphere: Sphere would be out of bounds.");
|
||||
Window.alert("Can't create sphere: Sphere would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -413,7 +413,7 @@ var toolBar = (function () {
|
|||
cutoff: 180, // in degrees
|
||||
});
|
||||
} else {
|
||||
print("Can't create Light: Light would be out of bounds.");
|
||||
Window.alert("Can't create Light: Light would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -433,7 +433,7 @@ var toolBar = (function () {
|
|||
lineHeight: 0.06
|
||||
});
|
||||
} else {
|
||||
print("Can't create box: Text would be out of bounds.");
|
||||
Window.alert("Can't create box: Text would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -449,7 +449,7 @@ var toolBar = (function () {
|
|||
sourceUrl: "https://highfidelity.com/",
|
||||
});
|
||||
} else {
|
||||
print("Can't create Web Entity: would be out of bounds.");
|
||||
Window.alert("Can't create Web Entity: would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -464,7 +464,7 @@ var toolBar = (function () {
|
|||
dimensions: { x: 10, y: 10, z: 10 },
|
||||
});
|
||||
} else {
|
||||
print("Can't create box: Text would be out of bounds.");
|
||||
Window.alert("Can't create box: Text would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -482,7 +482,7 @@ var toolBar = (function () {
|
|||
voxelSurfaceStyle: 1
|
||||
});
|
||||
} else {
|
||||
print("Can't create PolyVox: would be out of bounds.");
|
||||
Window.alert("Can't create PolyVox: would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -1068,13 +1068,16 @@ function importSVO(importURL) {
|
|||
if (Clipboard.getClipboardContentsLargestDimension() < VERY_LARGE) {
|
||||
position = getPositionToCreateEntity();
|
||||
}
|
||||
var pastedEntityIDs = Clipboard.pasteEntities(position);
|
||||
|
||||
if (isActive) {
|
||||
selectionManager.setSelections(pastedEntityIDs);
|
||||
}
|
||||
if (position.x > 0 && position.y > 0 && position.z > 0) {
|
||||
var pastedEntityIDs = Clipboard.pasteEntities(position);
|
||||
|
||||
if (isActive) {
|
||||
selectionManager.setSelections(pastedEntityIDs);
|
||||
}
|
||||
Window.raiseMainWindow();
|
||||
} else {
|
||||
Window.alert("Can't import objects: objects would be out of bounds.");
|
||||
}
|
||||
} else {
|
||||
Window.alert("There was an error importing the entity file.");
|
||||
}
|
||||
|
|
145
examples/example/audio/jsstreamplayer.js
Normal file
145
examples/example/audio/jsstreamplayer.js
Normal file
|
@ -0,0 +1,145 @@
|
|||
//
|
||||
// #20622: JS Stream Player
|
||||
// *************************
|
||||
//
|
||||
// Created by Kevin M. Thomas and Thoys 07/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
// kevintown.net
|
||||
//
|
||||
// JavaScript for the High Fidelity interface that creates a stream player with a UI and keyPressEvents for adding a stream URL in addition to play, stop and volume functionality.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
// Declare HiFi public bucket.
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
// Declare variables and set up new WebWindow.
|
||||
var stream;
|
||||
var volume = 1;
|
||||
var streamWindow = new WebWindow('Stream', HIFI_PUBLIC_BUCKET + "examples/html/jsstreamplayer.html", 0, 0, false);
|
||||
|
||||
// Set up toggleStreamURLButton overlay.
|
||||
var toggleStreamURLButton = Overlays.addOverlay("text", {
|
||||
x: 76,
|
||||
y: 275,
|
||||
width: 40,
|
||||
height: 28,
|
||||
backgroundColor: {red: 0, green: 0, blue: 0},
|
||||
color: {red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
text: " URL"
|
||||
});
|
||||
|
||||
// Set up toggleStreamPlayButton overlay.
|
||||
var toggleStreamPlayButton = Overlays.addOverlay("text", {
|
||||
x: 122,
|
||||
y: 275,
|
||||
width: 38,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
text: " Play"
|
||||
});
|
||||
|
||||
// Set up toggleStreamStopButton overlay.
|
||||
var toggleStreamStopButton = Overlays.addOverlay("text", {
|
||||
x: 166,
|
||||
y: 275,
|
||||
width: 40,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
text: " Stop"
|
||||
});
|
||||
|
||||
// Set up increaseVolumeButton overlay.
|
||||
var toggleIncreaseVolumeButton = Overlays.addOverlay("text", {
|
||||
x: 211,
|
||||
y: 275,
|
||||
width: 18,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
text: " +"
|
||||
});
|
||||
|
||||
// Set up decreaseVolumeButton overlay.
|
||||
var toggleDecreaseVolumeButton = Overlays.addOverlay("text", {
|
||||
x: 234,
|
||||
y: 275,
|
||||
width: 15,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
text: " -"
|
||||
});
|
||||
|
||||
// Function that adds mousePressEvent functionality to connect UI to enter stream URL, play and stop stream.
|
||||
function mousePressEvent(event) {
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamURLButton) {
|
||||
stream = Window.prompt("Enter Stream: ");
|
||||
var streamJSON = {
|
||||
action: "changeStream",
|
||||
stream: stream
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamPlayButton) {
|
||||
var streamJSON = {
|
||||
action: "changeStream",
|
||||
stream: stream
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamStopButton) {
|
||||
var streamJSON = {
|
||||
action: "changeStream",
|
||||
stream: ""
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleIncreaseVolumeButton) {
|
||||
volume += 0.2;
|
||||
var volumeJSON = {
|
||||
action: "changeVolume",
|
||||
volume: volume
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleDecreaseVolumeButton) {
|
||||
volume -= 0.2;
|
||||
var volumeJSON = {
|
||||
action: "changeVolume",
|
||||
volume: volume
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
|
||||
}
|
||||
}
|
||||
|
||||
// Call function.
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
streamWindow.setVisible(false);
|
||||
|
||||
// Function to delete overlays upon exit.
|
||||
function onScriptEnding() {
|
||||
Overlays.deleteOverlay(toggleStreamURLButton);
|
||||
Overlays.deleteOverlay(toggleStreamPlayButton);
|
||||
Overlays.deleteOverlay(toggleStreamStopButton);
|
||||
Overlays.deleteOverlay(toggleIncreaseVolumeButton);
|
||||
Overlays.deleteOverlay(toggleDecreaseVolumeButton);
|
||||
}
|
||||
|
||||
// Call function.
|
||||
Script.scriptEnding.connect(onScriptEnding);
|
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// #20628: JS Stream Player Domain-Zone-Entity
|
||||
// ********************************************
|
||||
//
|
||||
// Created by Kevin M. Thomas and Thoys 07/20/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
// kevintown.net
|
||||
//
|
||||
// JavaScript for the High Fidelity interface that is an entity script to be placed in a chosen entity inside a domain-zone.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
// Function which exists inside of an entity which triggers as a user approches it.
|
||||
(function() {
|
||||
const SCRIPT_NAME = "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.js";
|
||||
function isScriptRunning(script) {
|
||||
script = script.toLowerCase().trim();
|
||||
var runningScripts = ScriptDiscoveryService.getRunning();
|
||||
for (i in runningScripts) {
|
||||
if (runningScripts[i].url.toLowerCase().trim() == script) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (!isScriptRunning(SCRIPT_NAME)) {
|
||||
Script.load(SCRIPT_NAME);
|
||||
}
|
||||
})
|
|
@ -12,29 +12,297 @@
|
|||
//
|
||||
HIFI_PUBLIC_BUCKET = "http://s3.amazonaws.com/hifi-public/";
|
||||
|
||||
const NUM_LAYERS = 16;
|
||||
const BASE_DIMENSION = { x: 7, y: 2, z: 7 };
|
||||
const BLOCKS_PER_LAYER = 3;
|
||||
const BLOCK_SIZE = {x: 0.2, y: 0.1, z: 0.8};
|
||||
const BLOCK_SPACING = BLOCK_SIZE.x / 3;
|
||||
Script.include("../../libraries/toolBars.js");
|
||||
|
||||
const DEFAULT_NUM_LAYERS = 16;
|
||||
const DEFAULT_BASE_DIMENSION = { x: 7, y: 2, z: 7 };
|
||||
const DEFAULT_BLOCKS_PER_LAYER = 3;
|
||||
const DEFAULT_BLOCK_SIZE = {x: 0.2, y: 0.1, z: 0.8};
|
||||
const DEFAULT_BLOCK_SPACING = DEFAULT_BLOCK_SIZE.x / DEFAULT_BLOCKS_PER_LAYER;
|
||||
// BLOCK_HEIGHT_VARIATION removes a random percentages of the default block height per block. (for example 0.001 %)
|
||||
const BLOCK_HEIGHT_VARIATION = 0.001;
|
||||
const GRAVITY = {x: 0, y: -2.8, z: 0};
|
||||
const DENSITY = 2000;
|
||||
const DAMPING_FACTOR = 0.98;
|
||||
const ANGULAR_DAMPING_FACTOR = 0.8;
|
||||
const FRICTION = 0.99;
|
||||
const RESTITUTION = 0.0;
|
||||
const SPAWN_DISTANCE = 3;
|
||||
const BLOCK_YAW_OFFSET = 45;
|
||||
const DEFAULT_BLOCK_HEIGHT_VARIATION = 0.001;
|
||||
const DEFAULT_GRAVITY = {x: 0, y: -2.8, z: 0};
|
||||
const DEFAULT_DENSITY = 2000;
|
||||
const DEFAULT_DAMPING_FACTOR = 0.98;
|
||||
const DEFAULT_ANGULAR_DAMPING_FACTOR = 0.8;
|
||||
const DEFAULT_FRICTION = 0.99;
|
||||
const DEFAULT_RESTITUTION = 0.0;
|
||||
const DEFAULT_SPAWN_DISTANCE = 3;
|
||||
const DEFAULT_BLOCK_YAW_OFFSET = 45;
|
||||
|
||||
var editMode = false;
|
||||
|
||||
const BUTTON_DIMENSIONS = {width: 49, height: 49};
|
||||
const MAXIMUM_PERCENTAGE = 100.0;
|
||||
const NO_ANGLE = 0;
|
||||
const RIGHT_ANGLE = 90;
|
||||
|
||||
var windowWidth = Window.innerWidth;
|
||||
var size;
|
||||
var pieces = [];
|
||||
var ground = false;
|
||||
var layerRotated = false;
|
||||
var button;
|
||||
var cogButton;
|
||||
var toolBar;
|
||||
|
||||
SettingsWindow = function() {
|
||||
var _this = this;
|
||||
this.plankyStack = null;
|
||||
this.webWindow = null;
|
||||
this.init = function(plankyStack) {
|
||||
_this.webWindow = new WebWindow('Planky', Script.resolvePath('../../html/plankySettings.html'), 255, 500, true);
|
||||
_this.webWindow.setVisible(false);
|
||||
_this.webWindow.eventBridge.webEventReceived.connect(_this.onWebEventReceived);
|
||||
_this.plankyStack = plankyStack;
|
||||
};
|
||||
this.sendData = function(data) {
|
||||
_this.webWindow.eventBridge.emitScriptEvent(JSON.stringify(data));
|
||||
};
|
||||
this.onWebEventReceived = function(data) {
|
||||
data = JSON.parse(data);
|
||||
switch (data.action) {
|
||||
case 'loaded':
|
||||
_this.sendData({action: 'load', options: _this.plankyStack.options.getJSON()})
|
||||
break;
|
||||
case 'value-change':
|
||||
_this.plankyStack.onValueChanged(data.option, data.value);
|
||||
break;
|
||||
case 'factory-reset':
|
||||
_this.plankyStack.options.factoryReset();
|
||||
_this.sendData({action: 'load', options: _this.plankyStack.options.getJSON()})
|
||||
break;
|
||||
case 'save-default':
|
||||
_this.plankyStack.options.save();
|
||||
break;
|
||||
case 'cleanup':
|
||||
_this.plankyStack.deRez();
|
||||
break;
|
||||
default:
|
||||
Window.alert('[planky] unknown action ' + data.action);
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
PlankyOptions = function() {
|
||||
var _this = this;
|
||||
this.factoryReset = function() {
|
||||
_this.setDefaults();
|
||||
Settings.setValue('plankyOptions', '');
|
||||
};
|
||||
this.save = function() {
|
||||
Settings.setValue('plankyOptions', JSON.stringify(_this.getJSON()));
|
||||
};
|
||||
this.load = function() {
|
||||
_this.setDefaults();
|
||||
var plankyOptions = Settings.getValue('plankyOptions')
|
||||
if (plankyOptions === null || plankyOptions === '') {
|
||||
return;
|
||||
}
|
||||
var options = JSON.parse(plankyOptions);
|
||||
for (option in options) {
|
||||
_this[option] = options[option];
|
||||
}
|
||||
};
|
||||
this.getJSON = function() {
|
||||
return {
|
||||
numLayers: _this.numLayers,
|
||||
baseDimension: _this.baseDimension,
|
||||
blocksPerLayer: _this.blocksPerLayer,
|
||||
blockSize: _this.blockSize,
|
||||
blockSpacing: _this.blockSpacing,
|
||||
blockHeightVariation: _this.blockHeightVariation,
|
||||
gravity: _this.gravity,
|
||||
density: _this.density,
|
||||
dampingFactor: _this.dampingFactor,
|
||||
angularDampingFactor: _this.angularDampingFactor,
|
||||
friction: _this.friction,
|
||||
restitution: _this.restitution,
|
||||
spawnDistance: _this.spawnDistance,
|
||||
blockYawOffset: _this.blockYawOffset,
|
||||
};
|
||||
}
|
||||
this.setDefaults = function() {
|
||||
_this.numLayers = DEFAULT_NUM_LAYERS;
|
||||
_this.baseDimension = DEFAULT_BASE_DIMENSION;
|
||||
_this.blocksPerLayer = DEFAULT_BLOCKS_PER_LAYER;
|
||||
_this.blockSize = DEFAULT_BLOCK_SIZE;
|
||||
_this.blockSpacing = DEFAULT_BLOCK_SPACING;
|
||||
_this.blockHeightVariation = DEFAULT_BLOCK_HEIGHT_VARIATION;
|
||||
_this.gravity = DEFAULT_GRAVITY;
|
||||
_this.density = DEFAULT_DENSITY;
|
||||
_this.dampingFactor = DEFAULT_DAMPING_FACTOR;
|
||||
_this.angularDampingFactor = DEFAULT_ANGULAR_DAMPING_FACTOR;
|
||||
_this.friction = DEFAULT_FRICTION;
|
||||
_this.restitution = DEFAULT_RESTITUTION;
|
||||
_this.spawnDistance = DEFAULT_SPAWN_DISTANCE;
|
||||
_this.blockYawOffset = DEFAULT_BLOCK_YAW_OFFSET;
|
||||
};
|
||||
this.load();
|
||||
};
|
||||
|
||||
// The PlankyStack exists out of rows and layers
|
||||
PlankyStack = function() {
|
||||
var _this = this;
|
||||
this.planks = [];
|
||||
this.ground = false;
|
||||
this.editLines = [];
|
||||
this.options = new PlankyOptions();
|
||||
|
||||
this.deRez = function() {
|
||||
_this.planks.forEach(function(plank) {
|
||||
Entities.deleteEntity(plank.entity);
|
||||
});
|
||||
_this.planks = [];
|
||||
if (_this.ground) {
|
||||
Entities.deleteEntity(_this.ground);
|
||||
}
|
||||
_this.editLines.forEach(function(line) {
|
||||
Entities.deleteEntity(line);
|
||||
})
|
||||
_this.editLines = [];
|
||||
if (_this.centerLine) {
|
||||
Entities.deleteEntity(_this.centerLine);
|
||||
}
|
||||
_this.ground = false;
|
||||
_this.centerLine = false;
|
||||
};
|
||||
|
||||
this.rez = function() {
|
||||
if (_this.planks.length > 0) {
|
||||
_this.deRez();
|
||||
}
|
||||
_this.baseRotation = Quat.fromPitchYawRollDegrees(0.0, MyAvatar.bodyYaw, 0.0);
|
||||
var basePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(_this.options.spawnDistance, Quat.getFront(_this.baseRotation)));
|
||||
basePosition.y = grabLowestJointY();
|
||||
_this.basePosition = basePosition;
|
||||
_this.refresh();
|
||||
};
|
||||
|
||||
//private function
|
||||
var refreshGround = function() {
|
||||
if (!_this.ground) {
|
||||
_this.ground = Entities.addEntity({
|
||||
type: 'Model',
|
||||
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
|
||||
dimensions: _this.options.baseDimension,
|
||||
position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}),
|
||||
rotation: _this.baseRotation,
|
||||
shapeType: 'box'
|
||||
});
|
||||
return;
|
||||
}
|
||||
// move ground to rez position/rotation
|
||||
Entities.editEntity(_this.ground, {dimensions: _this.options.baseDimension, position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}), rotation: _this.baseRotation});
|
||||
};
|
||||
|
||||
var refreshLines = function() {
|
||||
if (_this.editLines.length === 0) {
|
||||
_this.editLines.push(Entities.addEntity({
|
||||
type: 'Line',
|
||||
dimensions: {x: 5, y: 21, z: 5},
|
||||
position: Vec3.sum(_this.basePosition, {y: -(_this.options.baseDimension.y / 2)}),
|
||||
lineWidth: 7,
|
||||
color: {red: 20, green: 20, blue: 20},
|
||||
linePoints: [{x: 0, y: 0, z: 0}, {x: 0, y: 10, z: 0}],
|
||||
visible: editMode
|
||||
}));
|
||||
return;
|
||||
}
|
||||
_this.editLines.forEach(function(line) {
|
||||
Entities.editEntity(line, {visible: editMode});
|
||||
})
|
||||
};
|
||||
|
||||
var trimDimension = function(dimension, maxIndex) {
|
||||
var removingPlanks = [];
|
||||
_this.planks.forEach(function(plank, index, object) {
|
||||
if (plank[dimension] > maxIndex) {
|
||||
removingPlanks.push(index);
|
||||
}
|
||||
});
|
||||
removingPlanks.reverse();
|
||||
for (var i = 0; i < removingPlanks.length; i++) {
|
||||
Entities.deleteEntity(_this.planks[removingPlanks[i]].entity);
|
||||
_this.planks.splice(removingPlanks[i], 1);
|
||||
}
|
||||
};
|
||||
|
||||
var createOrUpdate = function(layer, row) {
|
||||
var found = false;
|
||||
var layerRotated = layer % 2 === 0;
|
||||
var layerRotation = Quat.fromPitchYawRollDegrees(0, layerRotated ? NO_ANGLE : RIGHT_ANGLE, 0.0);
|
||||
var blockPositionXZ = (row - (_this.options.blocksPerLayer / 2) + 0.5) * (_this.options.blockSpacing + _this.options.blockSize.x);
|
||||
var localTransform = Vec3.multiplyQbyV(_this.offsetRot, {
|
||||
x: (layerRotated ? blockPositionXZ : 0),
|
||||
y: (_this.options.blockSize.y / 2) + (_this.options.blockSize.y * layer),
|
||||
z: (layerRotated ? 0 : blockPositionXZ)
|
||||
});
|
||||
var newProperties = {
|
||||
type: 'Model',
|
||||
modelURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/block.fbx',
|
||||
shapeType: 'box',
|
||||
name: 'PlankyBlock' + layer + '-' + row,
|
||||
dimensions: Vec3.sum(_this.options.blockSize, {x: 0, y: -((_this.options.blockSize.y * (_this.options.blockHeightVariation / MAXIMUM_PERCENTAGE)) * Math.random()), z: 0}),
|
||||
position: Vec3.sum(_this.basePosition, localTransform),
|
||||
rotation: Quat.multiply(layerRotation, _this.offsetRot),
|
||||
damping: _this.options.dampingFactor,
|
||||
restitution: _this.options.restitution,
|
||||
friction: _this.options.friction,
|
||||
angularDamping: _this.options.angularDampingFactor,
|
||||
gravity: _this.options.gravity,
|
||||
density: _this.options.density,
|
||||
velocity: {x: 0, y: 0, z: 0},
|
||||
angularVelocity: Quat.fromPitchYawRollDegrees(0, 0, 0),
|
||||
ignoreForCollisions: true
|
||||
};
|
||||
_this.planks.forEach(function(plank, index, object) {
|
||||
if (plank.layer === layer && plank.row === row) {
|
||||
Entities.editEntity(plank.entity, newProperties);
|
||||
found = true;
|
||||
// break loop:
|
||||
return false;
|
||||
}
|
||||
});
|
||||
if (!found) {
|
||||
_this.planks.push({layer: layer, row: row, entity: Entities.addEntity(newProperties)})
|
||||
}
|
||||
};
|
||||
|
||||
this.onValueChanged = function(option, value) {
|
||||
_this.options[option] = value;
|
||||
if (['numLayers', 'blocksPerLayer', 'blockSize', 'blockSpacing', 'blockHeightVariation'].indexOf(option) !== -1) {
|
||||
_this.refresh();
|
||||
}
|
||||
};
|
||||
|
||||
this.refresh = function() {
|
||||
refreshGround();
|
||||
refreshLines();
|
||||
trimDimension('layer', _this.options.numLayers - 1);
|
||||
trimDimension('row', _this.options.blocksPerLayer - 1);
|
||||
_this.offsetRot = Quat.multiply(_this.baseRotation, Quat.fromPitchYawRollDegrees(0.0, _this.options.blockYawOffset, 0.0));
|
||||
for (var layer = 0; layer < _this.options.numLayers; layer++) {
|
||||
for (var row = 0; row < _this.options.blocksPerLayer; row++) {
|
||||
createOrUpdate(layer, row);
|
||||
}
|
||||
}
|
||||
if (!editMode) {
|
||||
_this.planks.forEach(function(plank, index, object) {
|
||||
Entities.editEntity(plank.entity, {ignoreForCollisions: false, collisionsWillMove: true});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
this.isFound = function() {
|
||||
//TODO: identify entities here until one is found
|
||||
return _this.planks.length > 0;
|
||||
};
|
||||
};
|
||||
|
||||
var settingsWindow = new SettingsWindow();
|
||||
var plankyStack = new PlankyStack();
|
||||
settingsWindow.init(plankyStack);
|
||||
|
||||
function grabLowestJointY() {
|
||||
var jointNames = MyAvatar.getJointNames();
|
||||
|
@ -47,108 +315,60 @@ function grabLowestJointY() {
|
|||
return floorY;
|
||||
}
|
||||
|
||||
function getButtonPosX() {
|
||||
return windowWidth - ((BUTTON_DIMENSIONS.width / 2) + BUTTON_DIMENSIONS.width);
|
||||
}
|
||||
toolBar = new ToolBar(0, 0, ToolBar.HORIZONTAL, "highfidelity.games.planky", function (windowDimensions, toolbar) {
|
||||
return {
|
||||
x: windowDimensions.x - (toolbar.width * 1.1),
|
||||
y: toolbar.height / 2
|
||||
};
|
||||
});
|
||||
|
||||
var button = Overlays.addOverlay('image', {
|
||||
x: getButtonPosX(),
|
||||
y: 10,
|
||||
button = toolBar.addTool({
|
||||
width: BUTTON_DIMENSIONS.width,
|
||||
height: BUTTON_DIMENSIONS.height,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/planky_button.svg',
|
||||
alpha: 0.8
|
||||
alpha: 0.8,
|
||||
visible: true
|
||||
});
|
||||
|
||||
cogButton = toolBar.addTool({
|
||||
width: BUTTON_DIMENSIONS.width,
|
||||
height: BUTTON_DIMENSIONS.height,
|
||||
imageURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/cog.svg',
|
||||
subImage: { x: 0, y: BUTTON_DIMENSIONS.height, width: BUTTON_DIMENSIONS.width, height: BUTTON_DIMENSIONS.height },
|
||||
alpha: 0.8,
|
||||
visible: true
|
||||
}, true, false);
|
||||
|
||||
function resetBlocks() {
|
||||
pieces.forEach(function(piece) {
|
||||
Entities.deleteEntity(piece);
|
||||
});
|
||||
pieces = [];
|
||||
var avatarRot = Quat.fromPitchYawRollDegrees(0.0, MyAvatar.bodyYaw, 0.0);
|
||||
basePosition = Vec3.sum(MyAvatar.position, Vec3.multiply(SPAWN_DISTANCE, Quat.getFront(avatarRot)));
|
||||
basePosition.y = grabLowestJointY() - (BASE_DIMENSION.y / 2);
|
||||
if (!ground) {
|
||||
ground = Entities.addEntity({
|
||||
type: 'Model',
|
||||
modelURL: HIFI_PUBLIC_BUCKET + 'eric/models/woodFloor.fbx',
|
||||
dimensions: BASE_DIMENSION,
|
||||
position: basePosition,
|
||||
rotation: avatarRot,
|
||||
shapeType: 'box'
|
||||
});
|
||||
} else {
|
||||
Entities.editEntity(ground, {position: basePosition, rotation: avatarRot});
|
||||
}
|
||||
var offsetRot = Quat.multiply(avatarRot, Quat.fromPitchYawRollDegrees(0.0, BLOCK_YAW_OFFSET, 0.0));
|
||||
basePosition.y += (BASE_DIMENSION.y / 2);
|
||||
for (var layerIndex = 0; layerIndex < NUM_LAYERS; layerIndex++) {
|
||||
var layerRotated = layerIndex % 2 === 0;
|
||||
var offset = -(BLOCK_SPACING);
|
||||
var layerRotation = Quat.fromPitchYawRollDegrees(0, layerRotated ? 0 : 90, 0.0);
|
||||
for (var blockIndex = 0; blockIndex < BLOCKS_PER_LAYER; blockIndex++) {
|
||||
var blockPositionXZ = BLOCK_SIZE.x * blockIndex - (BLOCK_SIZE.x * 3 / 2 - BLOCK_SIZE.x / 2);
|
||||
var localTransform = Vec3.multiplyQbyV(offsetRot, {
|
||||
x: (layerRotated ? blockPositionXZ + offset: 0),
|
||||
y: (BLOCK_SIZE.y / 2) + (BLOCK_SIZE.y * layerIndex),
|
||||
z: (layerRotated ? 0 : blockPositionXZ + offset)
|
||||
});
|
||||
pieces.push(Entities.addEntity({
|
||||
type: 'Model',
|
||||
modelURL: HIFI_PUBLIC_BUCKET + 'marketplace/hificontent/Games/blocks/block.fbx',
|
||||
shapeType: 'box',
|
||||
name: 'PlankyBlock' + ((layerIndex * BLOCKS_PER_LAYER) + blockIndex),
|
||||
dimensions: {
|
||||
x: BLOCK_SIZE.x,
|
||||
y: BLOCK_SIZE.y - ((BLOCK_SIZE.y * (BLOCK_HEIGHT_VARIATION / MAXIMUM_PERCENTAGE)) * Math.random()),
|
||||
z: BLOCK_SIZE.z
|
||||
},
|
||||
position: {
|
||||
x: basePosition.x + localTransform.x,
|
||||
y: basePosition.y + localTransform.y,
|
||||
z: basePosition.z + localTransform.z
|
||||
},
|
||||
rotation: Quat.multiply(layerRotation, offsetRot),
|
||||
collisionsWillMove: true,
|
||||
damping: DAMPING_FACTOR,
|
||||
restitution: RESTITUTION,
|
||||
friction: FRICTION,
|
||||
angularDamping: ANGULAR_DAMPING_FACTOR,
|
||||
gravity: GRAVITY,
|
||||
density: DENSITY
|
||||
}));
|
||||
offset += BLOCK_SPACING;
|
||||
Controller.mousePressEvent.connect(function(event) {
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
if (toolBar.clicked(clickedOverlay) === button) {
|
||||
if (!plankyStack.isFound()) {
|
||||
plankyStack.rez();
|
||||
return;
|
||||
}
|
||||
plankyStack.refresh();
|
||||
} else if (toolBar.clicked(clickedOverlay) === cogButton) {
|
||||
editMode = !editMode;
|
||||
toolBar.selectTool(cogButton, editMode);
|
||||
settingsWindow.webWindow.setVisible(editMode);
|
||||
if(plankyStack.planks.length) {
|
||||
plankyStack.refresh();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
function mousePressEvent(event) {
|
||||
var clickedOverlay = Overlays.getOverlayAtPoint({x: event.x, y: event.y});
|
||||
if (clickedOverlay === button) {
|
||||
resetBlocks();
|
||||
Script.update.connect(function() {
|
||||
if (windowWidth !== Window.innerWidth) {
|
||||
windowWidth = Window.innerWidth;
|
||||
Overlays.editOverlay(button, {x: getButtonPosX()});
|
||||
Overlays.editOverlay(cogButton, {x: getCogButtonPosX()});
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
|
||||
function cleanup() {
|
||||
Overlays.deleteOverlay(button);
|
||||
Script.scriptEnding.connect(function() {
|
||||
toolBar.cleanup();
|
||||
if (ground) {
|
||||
Entities.deleteEntity(ground);
|
||||
}
|
||||
pieces.forEach(function(piece) {
|
||||
Entities.deleteEntity(piece);
|
||||
});
|
||||
pieces = [];
|
||||
}
|
||||
|
||||
function onUpdate() {
|
||||
if (windowWidth != Window.innerWidth) {
|
||||
windowWidth = Window.innerWidth;
|
||||
Overlays.editOverlay(button, {x: getButtonPosX()});
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(onUpdate)
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
plankyStack.deRez();
|
||||
});
|
||||
|
|
42
examples/html/jsstreamplayer.html
Normal file
42
examples/html/jsstreamplayer.html
Normal file
|
@ -0,0 +1,42 @@
|
|||
<!-- -->
|
||||
<!-- #20622: JS Stream Player -->
|
||||
<!-- ************************* -->
|
||||
<!-- -->
|
||||
<!-- Created by Kevin M. Thomas and Thoys 07/17/15. -->
|
||||
<!-- Copyright 2015 High Fidelity, Inc. -->
|
||||
<!-- kevintown.net -->
|
||||
<!-- -->
|
||||
<!-- JavaScript for the High Fidelity interface that creates a stream player with a UI and keyPressEvents for adding a stream URL in addition to play, stop and volume functionality. -->
|
||||
<!-- -->
|
||||
<!-- Distributed under the Apache License, Version 2.0. -->
|
||||
<!-- See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -->
|
||||
<!-- -->
|
||||
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="http://code.jquery.com/jquery-1.11.3.min.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
$(function(){
|
||||
if (window.EventBridge !== undefined) {
|
||||
EventBridge.scriptEventReceived.connect(function(data) {
|
||||
var myData = JSON.parse(data);
|
||||
if (myData.action == "changeStream") {
|
||||
$('body > audio').attr("src", myData.stream);
|
||||
}
|
||||
if (myData.action == "changeVolume") {
|
||||
$('body > audio').prop("volume", myData.volume);
|
||||
}
|
||||
});
|
||||
}
|
||||
EventBridge.emitWebEvent("loaded");
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<audio controls src="" controls autoplay></audio>
|
||||
</body>
|
||||
</html>
|
140
examples/html/plankySettings.html
Normal file
140
examples/html/plankySettings.html
Normal file
|
@ -0,0 +1,140 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<link rel="stylesheet" type="text/css" href="style.css">
|
||||
<script type="text/javascript" src="jquery-2.1.4.min.js"></script>
|
||||
<script type="text/javascript">
|
||||
var properties = [];
|
||||
function sendWebEvent(data) {
|
||||
EventBridge.emitWebEvent(JSON.stringify(data));
|
||||
}
|
||||
PropertyInput = function(key, label, value, attributes) {
|
||||
this.key = key;
|
||||
this.label = label;
|
||||
this.value = value;
|
||||
this.attributes = attributes;
|
||||
var self = this;
|
||||
this.construct = function() {
|
||||
self.widget = $('<div>').addClass('property').append(self.createLabel()).append(self.createValueDiv());
|
||||
$('#properties-list').append(self.widget);
|
||||
};
|
||||
this.createValue = self.__proto__.createValue;
|
||||
this.getValue = self.__proto__.getValue;
|
||||
this.createValueDiv = function() {
|
||||
self.inputDiv = $('<div>').addClass('value').append(self.createValue());
|
||||
return self.inputDiv;
|
||||
};
|
||||
this.addButton = function(id, buttonText) {
|
||||
self.inputDiv.append($('<div>').append($('<input>').attr('type', 'button').attr('id', id).val(buttonText)));
|
||||
};
|
||||
this.createWidget = function() {
|
||||
self.widget = $('<div>').addClass('property').append(self.createLabel()).append(self.inputDiv);
|
||||
return self.widget;
|
||||
};
|
||||
this.createLabel = function() {
|
||||
self.label = $('<div>').addClass('label').text(label);
|
||||
return self.label;
|
||||
};
|
||||
this.setValue = function(value) {
|
||||
self.input.val(value);
|
||||
};
|
||||
this.construct();
|
||||
};
|
||||
|
||||
var valueChangeHandler = function() {
|
||||
|
||||
sendWebEvent({
|
||||
action: 'value-change',
|
||||
option: $(this).data('var-name'),
|
||||
value: properties[$(this).data('var-name')].getValue()
|
||||
});
|
||||
};
|
||||
|
||||
NumberInput = function(key, label, value, attributes) {
|
||||
PropertyInput.call(this, key, label, value, attributes);
|
||||
};
|
||||
NumberInput.prototype = Object.create(PropertyInput.prototype);
|
||||
NumberInput.prototype.constructor = NumberInput;
|
||||
NumberInput.prototype.createValue = function() {
|
||||
this.input = $('<input>').data('var-name', this.key).attr('name', this.key).attr('type', 'number').val(this.value).on('change', valueChangeHandler);
|
||||
if (this.attributes !== undefined) {
|
||||
this.input.attr(this.attributes);
|
||||
}
|
||||
return this.input;
|
||||
};
|
||||
NumberInput.prototype.getValue = function() {
|
||||
return parseFloat(this.input.val());
|
||||
};
|
||||
|
||||
CoordinateInput = function(key, label, value, attributes) {
|
||||
PropertyInput.call(this, key, label, value, attributes);
|
||||
};
|
||||
CoordinateInput.prototype = Object.create(PropertyInput.prototype);
|
||||
CoordinateInput.prototype.constructor = CoordinateInput;
|
||||
CoordinateInput.prototype.createValue = function() {
|
||||
this.inputX = $('<input>').data('var-name', this.key).attr('name', this.key + '-x').attr('type', 'number').addClass('coord').val(this.value.x).on('change', valueChangeHandler);
|
||||
this.inputY = $('<input>').data('var-name', this.key).attr('name', this.key + '-y').attr('type', 'number').addClass('coord').val(this.value.y).on('change', valueChangeHandler);
|
||||
this.inputZ = $('<input>').data('var-name', this.key).attr('name', this.key + '-z').attr('type', 'number').addClass('coord').val(this.value.z).on('change', valueChangeHandler);
|
||||
if (this.attributes !== undefined) {
|
||||
this.inputX.attr(this.attributes);
|
||||
this.inputY.attr(this.attributes);
|
||||
this.inputZ.attr(this.attributes);
|
||||
}
|
||||
return [encapsulateInput(this.inputX, 'X'), encapsulateInput(this.inputY, 'Y'), encapsulateInput(this.inputZ, 'Z')];
|
||||
};
|
||||
CoordinateInput.prototype.getValue = function() {
|
||||
return {x: parseFloat(this.inputX.val()), y: parseFloat(this.inputY.val()), z: parseFloat(this.inputZ.val())};
|
||||
};
|
||||
function encapsulateInput(input, label) {
|
||||
return $('<div>').addClass('input-area').append(label + ' ').append(input);
|
||||
}
|
||||
|
||||
function addHeader(label) {
|
||||
$('#properties-list').append($('<div>').addClass('section-header').append($('<label>').text(label)));
|
||||
}
|
||||
|
||||
$(function() {
|
||||
addHeader('Stack Settings');
|
||||
properties['numLayers'] = new NumberInput('numLayers', 'Layers', 17, {'min': 0, 'max': 300, 'step': 1});
|
||||
properties['blocksPerLayer'] = new NumberInput('blocksPerLayer', 'Blocks per layer', 4, {'min': 1, 'max': 100, 'step': 1});
|
||||
properties['blockSize'] = new CoordinateInput('blockSize', 'Block size', {x: 0.2, y: 0.1, z: 0.8}, {'min': 0.05, 'max': 20, 'step': 0.1});
|
||||
properties['blockSpacing'] = new NumberInput('blockSpacing', 'Block spacing', properties['blockSize'].getValue().x / properties['blocksPerLayer'].getValue(), {'min': 0, 'max': 20, 'step': 0.01});
|
||||
properties['blockSpacing'].addButton('btn-recalculate-spacing', 'Recalculate spacing');
|
||||
$('#btn-recalculate-spacing').on('click', function() {
|
||||
properties['blockSpacing'].setValue(properties['blockSize'].getValue().x / properties['blocksPerLayer'].getValue());
|
||||
});
|
||||
properties['blockHeightVariation'] = new NumberInput('blockHeightVariation', 'Block height variation (%)', 0.1, {'min': 0, 'max': 1, 'step': 0.01});
|
||||
addHeader('Physics Settings');
|
||||
properties['gravity'] = new CoordinateInput('gravity', 'Gravity', {x: 0, y: -2.8, z: 0}, {'step': 0.01});
|
||||
properties['density'] = new NumberInput('density', 'Density', 4000, {'min': 0, 'max': 4000, 'step': 1});
|
||||
properties['dampingFactor'] = new NumberInput('dampingFactor', 'Damping factor', 0.98, {'min': 0, 'max': 1, 'step': 0.01});
|
||||
properties['angularDampingFactor'] = new NumberInput('angularDampingFactor', 'Angular damping factor', 0.8, {'min': 0, 'max': 1, 'step': 0.01});
|
||||
properties['friction'] = new NumberInput('friction', 'Friction', 0.99, {'min': 0, 'max': 1, 'step': 0.01});
|
||||
properties['restitution'] = new NumberInput('restitution', 'Restitution', 0.0, {'min': 0, 'max': 1, 'step': 0.01});
|
||||
addHeader('Spawn Settings');
|
||||
properties['spawnDistance'] = new NumberInput('spawnDistance', 'Spawn distance (meters)', 3);
|
||||
properties['blockYawOffset'] = new NumberInput('blockYawOffset', 'Block yaw offset (degrees)', 45, {'min': 0, 'max': 360, 'step': 1});
|
||||
properties['baseDimension'] = new CoordinateInput('baseDimension', 'Base dimension', {x: 7, y: 2, z: 7}, {'min': 0.5, 'max': 200, 'step': 0.1});
|
||||
addHeader('Actions');
|
||||
$('#properties-list')
|
||||
.append($('<input>').val('factory reset').attr('type', 'button').on('click', function() { sendWebEvent({action: 'factory-reset'}); }))
|
||||
.append($('<input>').val('save as default').attr('type', 'button').on('click', function() { sendWebEvent({action: 'save-default'}); }))
|
||||
.append($('<input>').val('cleanup planky').attr('type', 'button').on('click', function() { sendWebEvent({action: 'cleanup'}); }));
|
||||
if (window.EventBridge !== undefined) {
|
||||
EventBridge.scriptEventReceived.connect(function(data) {
|
||||
data = JSON.parse(data);
|
||||
if (data.action == 'load') {
|
||||
$.each(data.options, function(option, value) {
|
||||
properties[option].setValue(value);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
sendWebEvent({action: 'loaded'});
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
<body class="properties">
|
||||
<div id="properties-list"></div>
|
||||
</body>
|
||||
</html>
|
331
examples/leaves.js
Executable file
331
examples/leaves.js
Executable file
|
@ -0,0 +1,331 @@
|
|||
//
|
||||
// Leaves.js
|
||||
// examples
|
||||
//
|
||||
// Created by Bing Shearer on 14 Jul 2015
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
var leafName = "scriptLeaf";
|
||||
var leafSquall = function (properties) {
|
||||
var // Properties
|
||||
squallOrigin,
|
||||
squallRadius,
|
||||
leavesPerMinute = 60,
|
||||
leafSize = {
|
||||
x: 0.1,
|
||||
y: 0.1,
|
||||
z: 0.1
|
||||
},
|
||||
leafFallSpeed = 1, // m/s
|
||||
leafLifetime = 60, // Seconds
|
||||
leafSpinMax = 0, // Maximum angular velocity per axis; deg/s
|
||||
debug = false, // Display origin circle; don't use running on Stack Manager
|
||||
// Other
|
||||
squallCircle,
|
||||
SQUALL_CIRCLE_COLOR = {
|
||||
red: 255,
|
||||
green: 0,
|
||||
blue: 0
|
||||
},
|
||||
SQUALL_CIRCLE_ALPHA = 0.5,
|
||||
SQUALL_CIRCLE_ROTATION = Quat.fromPitchYawRollDegrees(90, 0, 0),
|
||||
leafProperties,
|
||||
leaf_MODEL_URL = "https://hifi-public.s3.amazonaws.com/ozan/support/forBing/palmLeaf.fbx",
|
||||
leafTimer,
|
||||
leaves = [], // HACK: Work around leaves not always getting velocities
|
||||
leafVelocities = [], // HACK: Work around leaves not always getting velocities
|
||||
DEGREES_TO_RADIANS = Math.PI / 180,
|
||||
leafDeleteOnTearDown = true,
|
||||
maxLeaves,
|
||||
leafCount,
|
||||
nearbyEntities,
|
||||
complexMovement = false,
|
||||
movementTime = 0,
|
||||
maxSpinRadians = properties.leafSpinMax * DEGREES_TO_RADIANS,
|
||||
windFactor,
|
||||
leafDeleteOnGround = false,
|
||||
floorHeight = null;
|
||||
|
||||
|
||||
function processProperties() {
|
||||
if (!properties.hasOwnProperty("origin")) {
|
||||
print("ERROR: Leaf squall origin must be specified");
|
||||
return;
|
||||
}
|
||||
squallOrigin = properties.origin;
|
||||
|
||||
if (!properties.hasOwnProperty("radius")) {
|
||||
print("ERROR: Leaf squall radius must be specified");
|
||||
return;
|
||||
}
|
||||
squallRadius = properties.radius;
|
||||
|
||||
if (properties.hasOwnProperty("leavesPerMinute")) {
|
||||
leavesPerMinute = properties.leavesPerMinute;
|
||||
}
|
||||
|
||||
if (properties.hasOwnProperty("leafSize")) {
|
||||
leafSize = properties.leafSize;
|
||||
}
|
||||
|
||||
if (properties.hasOwnProperty("leafFallSpeed")) {
|
||||
leafFallSpeed = properties.leafFallSpeed;
|
||||
}
|
||||
|
||||
if (properties.hasOwnProperty("leafLifetime")) {
|
||||
leafLifetime = properties.leafLifetime;
|
||||
}
|
||||
|
||||
if (properties.hasOwnProperty("leafSpinMax")) {
|
||||
leafSpinMax = properties.leafSpinMax;
|
||||
}
|
||||
|
||||
if (properties.hasOwnProperty("debug")) {
|
||||
debug = properties.debug;
|
||||
}
|
||||
if (properties.hasOwnProperty("floorHeight")) {
|
||||
floorHeight = properties.floorHeight;
|
||||
}
|
||||
if (properties.hasOwnProperty("maxLeaves")) {
|
||||
maxLeaves = properties.maxLeaves;
|
||||
}
|
||||
if (properties.hasOwnProperty("complexMovement")) {
|
||||
complexMovement = properties.complexMovement;
|
||||
}
|
||||
if (properties.hasOwnProperty("leafDeleteOnGround")) {
|
||||
leafDeleteOnGround = properties.leafDeleteOnGround;
|
||||
}
|
||||
if (properties.hasOwnProperty("windFactor")) {
|
||||
windFactor = properties.windFactor;
|
||||
} else if (complexMovement == true){
|
||||
print("ERROR: Wind Factor must be defined for complex movement")
|
||||
}
|
||||
|
||||
leafProperties = {
|
||||
type: "Model",
|
||||
name: leafName,
|
||||
modelURL: leaf_MODEL_URL,
|
||||
lifetime: leafLifetime,
|
||||
dimensions: leafSize,
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: -leafFallSpeed,
|
||||
z: 0
|
||||
},
|
||||
damping: 0,
|
||||
angularDamping: 0,
|
||||
ignoreForCollisions: true
|
||||
|
||||
};
|
||||
}
|
||||
|
||||
function createleaf() {
|
||||
var angle,
|
||||
radius,
|
||||
offset,
|
||||
leaf,
|
||||
spin = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
i;
|
||||
|
||||
// HACK: Work around leaves not always getting velocities set at creation
|
||||
for (i = 0; i < leaves.length; i++) {
|
||||
Entities.editEntity(leaves[i], leafVelocities[i]);
|
||||
}
|
||||
|
||||
angle = Math.random() * leafSpinMax;
|
||||
radius = Math.random() * squallRadius;
|
||||
offset = Vec3.multiplyQbyV(Quat.fromPitchYawRollDegrees(0, angle, 0), {
|
||||
x: 0,
|
||||
y: -0.1,
|
||||
z: radius
|
||||
});
|
||||
leafProperties.position = Vec3.sum(squallOrigin, offset);
|
||||
if (properties.leafSpinMax > 0 && !complexMovement) {
|
||||
spin = {
|
||||
x: Math.random() * maxSpinRadians,
|
||||
y: Math.random() * maxSpinRadians,
|
||||
z: Math.random() * maxSpinRadians
|
||||
};
|
||||
leafProperties.angularVelocity = spin;
|
||||
} else if (complexMovement) {
|
||||
spin = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
};
|
||||
leafProperties.angularVelocity = spin
|
||||
}
|
||||
leaf = Entities.addEntity(leafProperties);
|
||||
|
||||
// HACK: Work around leaves not always getting velocities set at creation
|
||||
leaves.push(leaf);
|
||||
leafVelocities.push({
|
||||
velocity: leafProperties.velocity,
|
||||
angularVelocity: spin
|
||||
});
|
||||
if (leaves.length > 5) {
|
||||
leaves.shift();
|
||||
leafVelocities.shift();
|
||||
}
|
||||
}
|
||||
|
||||
function setUp() {
|
||||
if (debug) {
|
||||
squallCircle = Overlays.addOverlay("circle3d", {
|
||||
size: {
|
||||
x: 2 * squallRadius,
|
||||
y: 2 * squallRadius
|
||||
},
|
||||
color: SQUALL_CIRCLE_COLOR,
|
||||
alpha: SQUALL_CIRCLE_ALPHA,
|
||||
solid: true,
|
||||
visible: debug,
|
||||
position: squallOrigin,
|
||||
rotation: SQUALL_CIRCLE_ROTATION
|
||||
});
|
||||
}
|
||||
|
||||
leafTimer = Script.setInterval(function () {
|
||||
if (leafCount <= maxLeaves - 1) {
|
||||
createleaf()
|
||||
}
|
||||
}, 60000 / leavesPerMinute);
|
||||
}
|
||||
Script.setInterval(function () {
|
||||
nearbyEntities = Entities.findEntities(squallOrigin, squallRadius);
|
||||
newLeafMovement()
|
||||
}, 100);
|
||||
|
||||
function newLeafMovement() { //new additions to leaf code. Operates at 10 Hz or every 100 ms
|
||||
movementTime += 0.1;
|
||||
var currentLeaf,
|
||||
randomRotationSpeed = {
|
||||
x: maxSpinRadians * Math.sin(movementTime),
|
||||
y: maxSpinRadians * Math.random(),
|
||||
z: maxSpinRadians * Math.sin(movementTime / 7)
|
||||
};
|
||||
for (var i = 0; i < nearbyEntities.length; i++) {
|
||||
var entityProperties = Entities.getEntityProperties(nearbyEntities[i]);
|
||||
var entityName = entityProperties.name;
|
||||
if (leafName === entityName) {
|
||||
currentLeaf = nearbyEntities[i];
|
||||
var leafHeight = entityProperties.position.y;
|
||||
if (complexMovement && leafHeight > floorHeight || complexMovement && floorHeight == null) { //actual new movement code;
|
||||
var leafCurrentVel = entityProperties.velocity,
|
||||
leafCurrentRot = entityProperties.rotation,
|
||||
yVec = {
|
||||
x: 0,
|
||||
y: 1,
|
||||
z: 0
|
||||
},
|
||||
leafYinWFVec = Vec3.multiplyQbyV(leafCurrentRot, yVec),
|
||||
leafLocalHorVec = Vec3.cross(leafYinWFVec, yVec),
|
||||
leafMostDownVec = Vec3.cross(leafYinWFVec, leafLocalHorVec),
|
||||
leafDesiredVel = Vec3.multiply(leafMostDownVec, windFactor),
|
||||
leafVelDelt = Vec3.subtract(leafDesiredVel, leafCurrentVel),
|
||||
leafNewVel = Vec3.sum(leafCurrentVel, Vec3.multiply(leafVelDelt, windFactor));
|
||||
Entities.editEntity(currentLeaf, {
|
||||
angularVelocity: randomRotationSpeed,
|
||||
velocity: leafNewVel
|
||||
})
|
||||
} else if (leafHeight <= floorHeight) {
|
||||
if (!leafDeleteOnGround) {
|
||||
Entities.editEntity(nearbyEntities[i], {
|
||||
locked: false,
|
||||
velocity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
},
|
||||
angularVelocity: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
z: 0
|
||||
}
|
||||
})
|
||||
} else {
|
||||
Entity.deleteEntity(currentLeaf);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
getLeafCount = Script.setInterval(function () {
|
||||
leafCount = 0
|
||||
for (var i = 0; i < nearbyEntities.length; i++) {
|
||||
var entityName = Entities.getEntityProperties(nearbyEntities[i]).name;
|
||||
//Stop Leaves at floorHeight
|
||||
if (leafName === entityName) {
|
||||
leafCount++;
|
||||
if (i == nearbyEntities.length - 1) {
|
||||
//print(leafCount);
|
||||
}
|
||||
}
|
||||
}
|
||||
}, 1000)
|
||||
|
||||
|
||||
|
||||
function tearDown() {
|
||||
Script.clearInterval(leafTimer);
|
||||
Overlays.deleteOverlay(squallCircle);
|
||||
if (leafDeleteOnTearDown) {
|
||||
for (var i = 0; i < nearbyEntities.length; i++) {
|
||||
var entityName = Entities.getEntityProperties(nearbyEntities[i]).name;
|
||||
if (leafName === entityName) {
|
||||
//We have a match - delete this entity
|
||||
Entities.editEntity(nearbyEntities[i], {
|
||||
locked: false
|
||||
});
|
||||
Entities.deleteEntity(nearbyEntities[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
processProperties();
|
||||
setUp();
|
||||
Script.scriptEnding.connect(tearDown);
|
||||
|
||||
return {};
|
||||
};
|
||||
|
||||
var leafSquall1 = new leafSquall({
|
||||
origin: {
|
||||
x: 3071.5,
|
||||
y: 2170,
|
||||
z: 6765.3
|
||||
},
|
||||
radius: 100,
|
||||
leavesPerMinute: 30,
|
||||
leafSize: {
|
||||
x: 0.3,
|
||||
y: 0.00,
|
||||
z: 0.3
|
||||
},
|
||||
leafFallSpeed: 0.4,
|
||||
leafLifetime: 100,
|
||||
leafSpinMax: 30,
|
||||
debug: false,
|
||||
maxLeaves: 100,
|
||||
leafDeleteOnTearDown: true,
|
||||
complexMovement: true,
|
||||
floorHeight: 2143.5,
|
||||
windFactor: 0.5,
|
||||
leafDeleteOnGround: false
|
||||
});
|
||||
|
||||
// todo
|
||||
//deal with depth issue
|
|
@ -44,6 +44,7 @@
|
|||
emitStrength: emitStrength,
|
||||
emitDirection: emitDirection,
|
||||
color: color,
|
||||
lifespan: 1.0,
|
||||
visible: true,
|
||||
locked: false });
|
||||
|
||||
|
@ -67,13 +68,13 @@
|
|||
var objs = [];
|
||||
function Init() {
|
||||
objs.push(new TestBox());
|
||||
objs.push(new TestFx({ red: 255, blue: 0, green: 0 },
|
||||
objs.push(new TestFx({ red: 255, green: 0, blue: 0 },
|
||||
{ x: 0.5, y: 1.0, z: 0.0 },
|
||||
100, 3, 1));
|
||||
objs.push(new TestFx({ red: 0, blue: 255, green: 0 },
|
||||
objs.push(new TestFx({ red: 0, green: 255, blue: 0 },
|
||||
{ x: 0, y: 1, z: 0 },
|
||||
1000, 5, 0.5));
|
||||
objs.push(new TestFx({ red: 0, blue: 0, green: 255 },
|
||||
objs.push(new TestFx({ red: 0, green: 0, blue: 255 },
|
||||
{ x: -0.5, y: 1, z: 0 },
|
||||
100, 3, 1));
|
||||
}
|
||||
|
|
33
examples/zones/jsstreamplayerdomain-zone-entity.js
Normal file
33
examples/zones/jsstreamplayerdomain-zone-entity.js
Normal file
|
@ -0,0 +1,33 @@
|
|||
//
|
||||
// #20628: JS Stream Player Domain-Zone-Entity
|
||||
// ********************************************
|
||||
//
|
||||
// Created by Kevin M. Thomas and Thoys 07/20/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
// kevintown.net
|
||||
//
|
||||
// JavaScript for the High Fidelity interface that is an entity script to be placed in a chosen entity inside a domain-zone.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
// Function which exists inside of an entity which triggers as a user approches it.
|
||||
(function() {
|
||||
const SCRIPT_NAME = "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.js";
|
||||
function isScriptRunning(script) {
|
||||
script = script.toLowerCase().trim();
|
||||
var runningScripts = ScriptDiscoveryService.getRunning();
|
||||
for (i in runningScripts) {
|
||||
if (runningScripts[i].url.toLowerCase().trim() == script) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
if (!isScriptRunning(SCRIPT_NAME)) {
|
||||
Script.load(SCRIPT_NAME);
|
||||
}
|
||||
})
|
42
examples/zones/jsstreamplayerdomain-zone.html
Normal file
42
examples/zones/jsstreamplayerdomain-zone.html
Normal file
|
@ -0,0 +1,42 @@
|
|||
<!-- -->
|
||||
<!-- #20628: JS Stream Player Domain-Zone -->
|
||||
<!-- ************************************* -->
|
||||
<!-- -->
|
||||
<!-- Created by Kevin M. Thomas and Thoys 07/20/15. -->
|
||||
<!-- Copyright 2015 High Fidelity, Inc. -->
|
||||
<!-- kevintown.net -->
|
||||
<!-- -->
|
||||
<!-- JavaScript for the High Fidelity interface that creates a stream player with a UI for playing a domain-zone specificed stream URL in addition to play, stop and volume functionality which is resident only in the domain-zone. -->
|
||||
<!-- -->
|
||||
<!-- Distributed under the Apache License, Version 2.0. -->
|
||||
<!-- See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html -->
|
||||
<!-- -->
|
||||
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<script src="http://code.jquery.com/jquery-1.11.3.min.js"></script>
|
||||
|
||||
<script type="text/javascript">
|
||||
$(function(){
|
||||
if (window.EventBridge !== undefined) {
|
||||
EventBridge.scriptEventReceived.connect(function(data) {
|
||||
var myData = JSON.parse(data);
|
||||
if (myData.action == "changeStream") {
|
||||
$('body > audio').attr("src", myData.stream);
|
||||
}
|
||||
if (myData.action == "changeVolume") {
|
||||
$('body > audio').prop("volume", myData.volume);
|
||||
}
|
||||
});
|
||||
}
|
||||
EventBridge.emitWebEvent("loaded");
|
||||
});
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<audio controls src="" controls autoplay></audio>
|
||||
</body>
|
||||
</html>
|
176
examples/zones/jsstreamplayerdomain-zone.js
Normal file
176
examples/zones/jsstreamplayerdomain-zone.js
Normal file
|
@ -0,0 +1,176 @@
|
|||
//
|
||||
// #20628: JS Stream Player Domain-Zone
|
||||
// *************************************
|
||||
//
|
||||
// Created by Kevin M. Thomas, Thoys and Konstantin 07/24/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
// kevintown.net
|
||||
//
|
||||
// JavaScript for the High Fidelity interface that creates a stream player with a UI for playing a domain-zone specificed stream URL in addition to play, stop and volume functionality which is resident only in the domain-zone.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
// Declare variables and set up new WebWindow.
|
||||
var lastZone = "";
|
||||
var volume = 0.5;
|
||||
var stream = "";
|
||||
var streamWindow = new WebWindow('Stream', "https://dl.dropboxusercontent.com/u/17344741/jsstreamplayer/jsstreamplayerdomain-zone.html", 0, 0, false);
|
||||
var visible = false;
|
||||
|
||||
// Set up toggleStreamPlayButton overlay.
|
||||
var toggleStreamPlayButton = Overlays.addOverlay("text", {
|
||||
x: 122,
|
||||
y: 310,
|
||||
width: 38,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
visible: false,
|
||||
text: " Play"
|
||||
});
|
||||
|
||||
// Set up toggleStreamStopButton overlay.
|
||||
var toggleStreamStopButton = Overlays.addOverlay("text", {
|
||||
x: 166,
|
||||
y: 310,
|
||||
width: 40,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
visible: false,
|
||||
text: " Stop"
|
||||
});
|
||||
|
||||
// Set up increaseVolumeButton overlay.
|
||||
var toggleIncreaseVolumeButton = Overlays.addOverlay("text", {
|
||||
x: 211,
|
||||
y: 310,
|
||||
width: 18,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
visible: false,
|
||||
text: " +"
|
||||
});
|
||||
|
||||
// Set up decreaseVolumeButton overlay.
|
||||
var toggleDecreaseVolumeButton = Overlays.addOverlay("text", {
|
||||
x: 234,
|
||||
y: 310,
|
||||
width: 15,
|
||||
height: 28,
|
||||
backgroundColor: { red: 0, green: 0, blue: 0},
|
||||
color: { red: 255, green: 255, blue: 0},
|
||||
font: {size: 15},
|
||||
topMargin: 8,
|
||||
visible: false,
|
||||
text: " -"
|
||||
});
|
||||
|
||||
// Function to change JSON object stream.
|
||||
function changeStream(stream) {
|
||||
var streamJSON = {
|
||||
action: "changeStream",
|
||||
stream: stream
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(streamJSON));
|
||||
}
|
||||
|
||||
// Function to change JSON object volume.
|
||||
function changeVolume(volume) {
|
||||
var volumeJSON = {
|
||||
action: "changeVolume",
|
||||
volume: volume
|
||||
}
|
||||
streamWindow.eventBridge.emitScriptEvent(JSON.stringify(volumeJSON));
|
||||
}
|
||||
|
||||
// Function that adds mousePressEvent functionality to connect UI to enter stream URL, play and stop stream.
|
||||
function mousePressEvent(event) {
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamPlayButton) {
|
||||
changeStream(stream);
|
||||
volume = 0.25;
|
||||
changeVolume(volume);
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleStreamStopButton) {
|
||||
changeStream("");
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleIncreaseVolumeButton) {
|
||||
volume += 0.25;
|
||||
changeVolume(volume);
|
||||
}
|
||||
if (Overlays.getOverlayAtPoint({x: event.x, y: event.y}) == toggleDecreaseVolumeButton) {
|
||||
volume -= 0.25;
|
||||
changeVolume(volume);
|
||||
}
|
||||
}
|
||||
|
||||
// Function checking bool if in proper zone.
|
||||
function isOurZone(properties) {
|
||||
return stream != "" && properties.type == "Zone";
|
||||
}
|
||||
|
||||
// Function to toggle visibile the overlay.
|
||||
function toggleVisible(newVisibility) {
|
||||
if (newVisibility != visible) {
|
||||
visible = newVisibility;
|
||||
Overlays.editOverlay(toggleStreamPlayButton, {visible: visible});
|
||||
Overlays.editOverlay(toggleStreamStopButton, {visible: visible});
|
||||
Overlays.editOverlay(toggleIncreaseVolumeButton, {visible: visible});
|
||||
Overlays.editOverlay(toggleDecreaseVolumeButton, {visible: visible});
|
||||
}
|
||||
}
|
||||
|
||||
// Function to check if avatar is in proper domain.
|
||||
Window.domainChanged.connect(function() {
|
||||
Script.stop();
|
||||
});
|
||||
|
||||
// Function to check if avatar is within zone.
|
||||
Entities.enterEntity.connect(function(entityID) {
|
||||
print("Entered..." + JSON.stringify(entityID));
|
||||
var properties = Entities.getEntityProperties(entityID);
|
||||
stream = properties.userData;
|
||||
if(isOurZone(properties))
|
||||
{
|
||||
lastZone = properties.name;
|
||||
toggleVisible(true);
|
||||
}
|
||||
})
|
||||
|
||||
// Function to check if avatar is leaving zone.
|
||||
Entities.leaveEntity.connect(function(entityID) {
|
||||
print("Left..." + JSON.stringify(entityID));
|
||||
var properties = Entities.getEntityProperties(entityID);
|
||||
if (properties.name == lastZone && properties.type == "Zone") {
|
||||
print("Leaving Zone!");
|
||||
toggleVisible(false);
|
||||
changeStream("");
|
||||
}
|
||||
})
|
||||
|
||||
// Function to delete overlays upon exit.
|
||||
function onScriptEnding() {
|
||||
Overlays.deleteOverlay(toggleStreamPlayButton);
|
||||
Overlays.deleteOverlay(toggleStreamStopButton);
|
||||
Overlays.deleteOverlay(toggleIncreaseVolumeButton);
|
||||
Overlays.deleteOverlay(toggleDecreaseVolumeButton);
|
||||
changeStream("");
|
||||
streamWindow.deleteLater();
|
||||
}
|
||||
|
||||
// Connect mouse and hide WebWindow.
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
streamWindow.setVisible(false);
|
||||
|
||||
// Call function upon ending script.
|
||||
Script.scriptEnding.connect(onScriptEnding);
|
|
@ -330,7 +330,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_lastNackTime(usecTimestampNow()),
|
||||
_lastSendDownstreamAudioStats(usecTimestampNow()),
|
||||
_isVSyncOn(true),
|
||||
_isThrottleFPSEnabled(false),
|
||||
_isThrottleFPSEnabled(true),
|
||||
_aboutToQuit(false),
|
||||
_notifiedPacketVersionMismatchThisDomain(false),
|
||||
_glWidget(new GLCanvas()),
|
||||
|
@ -3265,6 +3265,8 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
|
||||
renderContext._drawItemStatus = sceneInterface->doEngineDisplayItemStatus();
|
||||
|
||||
renderContext._occlusionStatus = Menu::getInstance()->isOptionChecked(MenuOption::DebugAmbientOcclusion);
|
||||
|
||||
renderArgs->_shouldRender = LODManager::shouldRender;
|
||||
|
||||
renderContext.args = renderArgs;
|
||||
|
@ -3596,24 +3598,15 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
|
|||
int statsMessageLength = 0;
|
||||
|
||||
const QUuid& nodeUUID = sendingNode->getUUID();
|
||||
OctreeSceneStats* octreeStats;
|
||||
|
||||
|
||||
// now that we know the node ID, let's add these stats to the stats for that node...
|
||||
_octreeSceneStatsLock.lockForWrite();
|
||||
auto it = _octreeServerSceneStats.find(nodeUUID);
|
||||
if (it != _octreeServerSceneStats.end()) {
|
||||
octreeStats = &it->second;
|
||||
statsMessageLength = octreeStats->unpackFromPacket(packet);
|
||||
} else {
|
||||
OctreeSceneStats temp;
|
||||
statsMessageLength = temp.unpackFromPacket(packet);
|
||||
octreeStats = &temp;
|
||||
}
|
||||
|
||||
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
|
||||
statsMessageLength = octreeStats.unpackFromPacket(packet);
|
||||
|
||||
_octreeSceneStatsLock.unlock();
|
||||
|
||||
VoxelPositionSize rootDetails;
|
||||
voxelDetailsForCode(octreeStats->getJurisdictionRoot(), rootDetails);
|
||||
|
||||
// see if this is the first we've heard of this node...
|
||||
NodeToJurisdictionMap* jurisdiction = NULL;
|
||||
QString serverType;
|
||||
|
@ -3625,6 +3618,9 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
|
|||
jurisdiction->lockForRead();
|
||||
if (jurisdiction->find(nodeUUID) == jurisdiction->end()) {
|
||||
jurisdiction->unlock();
|
||||
|
||||
VoxelPositionSize rootDetails;
|
||||
voxelDetailsForCode(octreeStats.getJurisdictionRoot(), rootDetails);
|
||||
|
||||
qCDebug(interfaceapp, "stats from new %s server... [%f, %f, %f, %f]",
|
||||
qPrintable(serverType),
|
||||
|
@ -3637,7 +3633,7 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
|
|||
// but OctreeSceneStats thinks it's just returning a reference to its contents. So we need to make a copy of the
|
||||
// details from the OctreeSceneStats to construct the JurisdictionMap
|
||||
JurisdictionMap jurisdictionMap;
|
||||
jurisdictionMap.copyContents(octreeStats->getJurisdictionRoot(), octreeStats->getJurisdictionEndNodes());
|
||||
jurisdictionMap.copyContents(octreeStats.getJurisdictionRoot(), octreeStats.getJurisdictionEndNodes());
|
||||
jurisdiction->lockForWrite();
|
||||
(*jurisdiction)[nodeUUID] = jurisdictionMap;
|
||||
jurisdiction->unlock();
|
||||
|
|
|
@ -330,7 +330,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere,
|
||||
0, // QML Qt::SHIFT | Qt::Key_A,
|
||||
true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::AmbientOcclusion);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::DebugAmbientOcclusion);
|
||||
|
||||
MenuWrapper* ambientLightMenu = renderOptionsMenu->addMenu(MenuOption::RenderAmbientLight);
|
||||
QActionGroup* ambientLightGroup = new QActionGroup(ambientLightMenu);
|
||||
|
@ -368,7 +368,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::RenderTargetFramerateVSyncOn, 0, true,
|
||||
qApp, SLOT(setVSyncEnabled()));
|
||||
#endif
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, false,
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true,
|
||||
qApp, SLOT(setThrottleFPSEnabled()));
|
||||
}
|
||||
|
||||
|
|
|
@ -134,7 +134,6 @@ namespace MenuOption {
|
|||
const QString AddressBar = "Show Address Bar";
|
||||
const QString AlignForearmsWithWrists = "Align Forearms with Wrists";
|
||||
const QString AlternateIK = "Alternate IK";
|
||||
const QString AmbientOcclusion = "Ambient Occlusion";
|
||||
const QString Animations = "Animations...";
|
||||
const QString Atmosphere = "Atmosphere";
|
||||
const QString Attachments = "Attachments...";
|
||||
|
@ -165,6 +164,7 @@ namespace MenuOption {
|
|||
const QString ControlWithSpeech = "Control With Speech";
|
||||
const QString CopyAddress = "Copy Address to Clipboard";
|
||||
const QString CopyPath = "Copy Path to Clipboard";
|
||||
const QString DebugAmbientOcclusion = "Debug Ambient Occlusion";
|
||||
const QString DecreaseAvatarSize = "Decrease Avatar Size";
|
||||
const QString DeleteBookmark = "Delete Bookmark...";
|
||||
const QString DisableActivityLogger = "Disable Activity Logger";
|
||||
|
|
|
@ -56,9 +56,9 @@ Head::Head(Avatar* owningAvatar) :
|
|||
_deltaLeanForward(0.0f),
|
||||
_isCameraMoving(false),
|
||||
_isLookingAtMe(false),
|
||||
_faceModel(this, std::make_shared<AvatarRig>()),
|
||||
_lookingAtMeStarted(0),
|
||||
_wasLastLookingAtMe(0),
|
||||
_faceModel(this, std::make_shared<AvatarRig>()),
|
||||
_leftEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID()),
|
||||
_rightEyeLookAtID(DependencyManager::get<GeometryCache>()->allocateID())
|
||||
{
|
||||
|
|
|
@ -738,6 +738,7 @@ void MyAvatar::loadData() {
|
|||
setCollisionSoundURL(settings.value("collisionSoundURL", DEFAULT_AVATAR_COLLISION_SOUND_URL).toString());
|
||||
|
||||
settings.endGroup();
|
||||
_rig->setEnableRig(settings.value("enableRig").toBool());
|
||||
}
|
||||
|
||||
void MyAvatar::saveAttachmentData(const AttachmentData& attachment) const {
|
||||
|
|
|
@ -115,6 +115,11 @@ void SkeletonModel::updateClusterMatrices() {
|
|||
}
|
||||
}
|
||||
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
|
||||
Model::updateRig(deltaTime, parentTransform);
|
||||
}
|
||||
|
||||
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
|
||||
setTranslation(_owningAvatar->getSkeletonPosition());
|
||||
static const glm::quat refOrientation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
|
|
@ -29,7 +29,8 @@ public:
|
|||
|
||||
virtual void initJointStates(QVector<JointState> states);
|
||||
|
||||
void simulate(float deltaTime, bool fullUpdate = true);
|
||||
virtual void simulate(float deltaTime, bool fullUpdate = true);
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform);
|
||||
|
||||
void renderIKConstraints(gpu::Batch& batch);
|
||||
|
||||
|
|
|
@ -341,17 +341,16 @@ glm::mat4 Rig::getJointVisibleTransform(int jointIndex) const {
|
|||
return maybeCauterizeHead(jointIndex).getVisibleTransform();
|
||||
}
|
||||
|
||||
void Rig::simulateInternal(float deltaTime, glm::mat4 parentTransform, const glm::vec3& worldPosition, const glm::quat& worldRotation) {
|
||||
glm::vec3 front = worldRotation * IDENTITY_FRONT;
|
||||
glm::vec3 delta = worldPosition - _lastPosition ;
|
||||
float forwardSpeed = glm::dot(delta, front) / deltaTime;
|
||||
float rotationalSpeed = glm::angle(front, _lastFront) / deltaTime;
|
||||
bool isWalking = std::abs(forwardSpeed) > 0.01;
|
||||
bool isTurning = std::abs(rotationalSpeed) > 0.5;
|
||||
void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation) {
|
||||
|
||||
// Crude, until we have blending:
|
||||
const float EXPECTED_INTERVAL = 1.0f / 60.0f;
|
||||
if (deltaTime >= EXPECTED_INTERVAL) {
|
||||
if (_enableRig) {
|
||||
glm::vec3 front = worldRotation * IDENTITY_FRONT;
|
||||
float forwardSpeed = glm::dot(worldVelocity, front);
|
||||
float rotationalSpeed = glm::angle(front, _lastFront) / deltaTime;
|
||||
bool isWalking = std::abs(forwardSpeed) > 0.01;
|
||||
bool isTurning = std::abs(rotationalSpeed) > 0.5;
|
||||
|
||||
// Crude, until we have blending:
|
||||
isTurning = isTurning && !isWalking; // Only one of walk/turn, walk wins.
|
||||
isTurning = false; // FIXME
|
||||
bool isIdle = !isWalking && !isTurning;
|
||||
|
@ -361,26 +360,23 @@ void Rig::simulateInternal(float deltaTime, glm::mat4 parentTransform, const glm
|
|||
QString toStop = singleRole(_isWalking && !isWalking, _isTurning && !isTurning, _isIdle && !isIdle);
|
||||
if (!toStop.isEmpty()) {
|
||||
//qCDebug(animation) << "isTurning" << isTurning << "fronts" << front << _lastFront << glm::angle(front, _lastFront) << rotationalSpeed;
|
||||
//stopAnimationByRole(toStop);
|
||||
stopAnimationByRole(toStop);
|
||||
}
|
||||
QString newRole = singleRole(isWalking && !_isWalking, isTurning && !_isTurning, isIdle && !_isIdle);
|
||||
if (!newRole.isEmpty()) {
|
||||
//startAnimationByRole(newRole);
|
||||
qCDebug(animation) << deltaTime << ":" /*<< _lastPosition << worldPosition << "=>" */<< delta << "." << front << "=> " << forwardSpeed << newRole;
|
||||
/*if (newRole == "idle") {
|
||||
qCDebug(animation) << deltaTime << ":" << _lastPosition << worldPosition << "=>" << delta;
|
||||
}*/
|
||||
startAnimationByRole(newRole);
|
||||
qCDebug(animation) << deltaTime << ":" << worldVelocity << "." << front << "=> " << forwardSpeed << newRole;
|
||||
}
|
||||
|
||||
_lastPosition = worldPosition;
|
||||
_positions[(++_positionIndex) % _positions.count()] = worldPosition; // exp. alt. to above line
|
||||
_lastFront = front;
|
||||
_isWalking = isWalking;
|
||||
_isTurning = isTurning;
|
||||
_isIdle = isIdle;
|
||||
}
|
||||
}
|
||||
|
||||
// update animations
|
||||
void Rig::updateAnimations(float deltaTime, glm::mat4 parentTransform) {
|
||||
foreach (const AnimationHandlePointer& handle, _runningAnimations) {
|
||||
handle->simulate(deltaTime);
|
||||
}
|
||||
|
|
|
@ -108,7 +108,10 @@ public:
|
|||
void setJointTransform(int jointIndex, glm::mat4 newTransform);
|
||||
glm::mat4 getJointVisibleTransform(int jointIndex) const;
|
||||
void setJointVisibleTransform(int jointIndex, glm::mat4 newTransform);
|
||||
void simulateInternal(float deltaTime, glm::mat4 parentTransform, const glm::vec3& worldPosition, const glm::quat& worldRotation);
|
||||
// Start or stop animations as needed.
|
||||
void computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation);
|
||||
// Regardless of who started the animations or how many, update the joints.
|
||||
void updateAnimations(float deltaTime, glm::mat4 parentTransform);
|
||||
bool setJointPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation, bool useRotation,
|
||||
int lastFreeIndex, bool allIntermediatesFree, const glm::vec3& alignment, float priority,
|
||||
const QVector<int>& freeLineage, glm::mat4 parentTransform);
|
||||
|
@ -132,6 +135,7 @@ public:
|
|||
virtual bool getIsFirstPerson() const { return _isFirstPerson; }
|
||||
|
||||
bool getJointsAreDirty() { return _jointsAreDirty; }
|
||||
void setEnableRig(bool isEnabled) { _enableRig = isEnabled; }
|
||||
|
||||
protected:
|
||||
QVector<JointState> _jointStates;
|
||||
|
@ -146,15 +150,12 @@ public:
|
|||
bool _jointsAreDirty = false;
|
||||
int _neckJointIndex = -1;
|
||||
|
||||
bool _enableRig;
|
||||
bool _isWalking;
|
||||
bool _isTurning;
|
||||
bool _isIdle;
|
||||
glm::vec3 _lastFront;
|
||||
glm::vec3 _lastPosition;
|
||||
// or, experimentally...
|
||||
QVector<glm::vec3> _positions = QVector<glm::vec3>(4);
|
||||
QVector<float> _timeIntervals = QVector<float>(4);
|
||||
int _positionIndex;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__Rig__) */
|
||||
|
|
|
@ -1087,7 +1087,6 @@ void AvatarData::setJointMappingsFromNetworkReply() {
|
|||
}
|
||||
|
||||
networkReply->deleteLater();
|
||||
emit jointMappingLoaded();
|
||||
}
|
||||
|
||||
void AvatarData::sendAvatarDataPacket() {
|
||||
|
|
|
@ -307,9 +307,6 @@ public:
|
|||
|
||||
bool shouldDie() const { return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS; }
|
||||
|
||||
signals:
|
||||
void jointMappingLoaded(); // So that test cases or anyone waiting on asynchronous loading can be informed.
|
||||
|
||||
public slots:
|
||||
void sendAvatarDataPacket();
|
||||
void sendIdentityPacket();
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
set(TARGET_NAME entities-renderer)
|
||||
|
||||
AUTOSCRIBE_SHADER_LIB(gpu model render)
|
||||
|
||||
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
|
||||
setup_hifi_library(Widgets OpenGL Network Script)
|
||||
|
||||
|
|
|
@ -14,22 +14,141 @@
|
|||
#include <DeferredLightingEffect.h>
|
||||
#include <PerfStat.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include "EntitiesRendererLogging.h"
|
||||
|
||||
#include "RenderableParticleEffectEntityItem.h"
|
||||
|
||||
#include "untextured_particle_vert.h"
|
||||
#include "untextured_particle_frag.h"
|
||||
#include "textured_particle_vert.h"
|
||||
#include "textured_particle_frag.h"
|
||||
|
||||
class ParticlePayload {
|
||||
public:
|
||||
typedef render::Payload<ParticlePayload> Payload;
|
||||
typedef Payload::DataPointer Pointer;
|
||||
typedef RenderableParticleEffectEntityItem::Vertex Vertex;
|
||||
|
||||
ParticlePayload() : _vertexFormat(std::make_shared<gpu::Stream::Format>()),
|
||||
_vertexBuffer(std::make_shared<gpu::Buffer>()),
|
||||
_indexBuffer(std::make_shared<gpu::Buffer>()) {
|
||||
_vertexFormat->setAttribute(gpu::Stream::POSITION, 0, gpu::Element::VEC3F_XYZ, 0);
|
||||
_vertexFormat->setAttribute(gpu::Stream::TEXCOORD, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV), offsetof(Vertex, uv));
|
||||
_vertexFormat->setAttribute(gpu::Stream::COLOR, 0, gpu::Element::COLOR_RGBA_32, offsetof(Vertex, rgba));
|
||||
}
|
||||
|
||||
void setPipeline(gpu::PipelinePointer pipeline) { _pipeline = pipeline; }
|
||||
const gpu::PipelinePointer& getPipeline() const { return _pipeline; }
|
||||
|
||||
const Transform& getModelTransform() const { return _modelTransform; }
|
||||
void setModelTransform(const Transform& modelTransform) { _modelTransform = modelTransform; }
|
||||
|
||||
const AABox& getBound() const { return _bound; }
|
||||
void setBound(AABox& bound) { _bound = bound; }
|
||||
|
||||
gpu::BufferPointer getVertexBuffer() { return _vertexBuffer; }
|
||||
const gpu::BufferPointer& getVertexBuffer() const { return _vertexBuffer; }
|
||||
|
||||
gpu::BufferPointer getIndexBuffer() { return _indexBuffer; }
|
||||
const gpu::BufferPointer& getIndexBuffer() const { return _indexBuffer; }
|
||||
|
||||
void setTexture(gpu::TexturePointer texture) { _texture = texture; }
|
||||
const gpu::TexturePointer& getTexture() const { return _texture; }
|
||||
|
||||
bool getVisibleFlag() const { return _visibleFlag; }
|
||||
void setVisibleFlag(bool visibleFlag) { _visibleFlag = visibleFlag; }
|
||||
|
||||
void render(RenderArgs* args) const {
|
||||
assert(_pipeline);
|
||||
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
batch.setPipeline(_pipeline);
|
||||
|
||||
if (_texture) {
|
||||
batch.setResourceTexture(0, _texture);
|
||||
}
|
||||
|
||||
batch.setModelTransform(_modelTransform);
|
||||
batch.setInputFormat(_vertexFormat);
|
||||
batch.setInputBuffer(0, _vertexBuffer, 0, sizeof(Vertex));
|
||||
batch.setIndexBuffer(gpu::UINT16, _indexBuffer, 0);
|
||||
|
||||
auto numIndices = _indexBuffer->getSize() / sizeof(uint16_t);
|
||||
batch.drawIndexed(gpu::TRIANGLES, numIndices);
|
||||
}
|
||||
|
||||
protected:
|
||||
Transform _modelTransform;
|
||||
AABox _bound;
|
||||
gpu::PipelinePointer _pipeline;
|
||||
gpu::Stream::FormatPointer _vertexFormat;
|
||||
gpu::BufferPointer _vertexBuffer;
|
||||
gpu::BufferPointer _indexBuffer;
|
||||
gpu::TexturePointer _texture;
|
||||
bool _visibleFlag = true;
|
||||
};
|
||||
|
||||
namespace render {
|
||||
template <>
|
||||
const ItemKey payloadGetKey(const ParticlePayload::Pointer& payload) {
|
||||
if (payload->getVisibleFlag()) {
|
||||
return ItemKey::Builder::transparentShape();
|
||||
} else {
|
||||
return ItemKey::Builder().withInvisible().build();
|
||||
}
|
||||
}
|
||||
|
||||
template <>
|
||||
const Item::Bound payloadGetBound(const ParticlePayload::Pointer& payload) {
|
||||
return payload->getBound();
|
||||
}
|
||||
|
||||
template <>
|
||||
void payloadRender(const ParticlePayload::Pointer& payload, RenderArgs* args) {
|
||||
payload->render(args);
|
||||
}
|
||||
}
|
||||
|
||||
gpu::PipelinePointer RenderableParticleEffectEntityItem::_texturedPipeline;
|
||||
gpu::PipelinePointer RenderableParticleEffectEntityItem::_untexturedPipeline;
|
||||
|
||||
EntityItemPointer RenderableParticleEffectEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||
return std::make_shared<RenderableParticleEffectEntityItem>(entityID, properties);
|
||||
}
|
||||
|
||||
RenderableParticleEffectEntityItem::RenderableParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
ParticleEffectEntityItem(entityItemID, properties) {
|
||||
_cacheID = DependencyManager::get<GeometryCache>()->allocateID();
|
||||
|
||||
// lazy creation of particle system pipeline
|
||||
if (!_untexturedPipeline && !_texturedPipeline) {
|
||||
createPipelines();
|
||||
}
|
||||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
|
||||
Q_ASSERT(getType() == EntityTypes::ParticleEffect);
|
||||
PerformanceTimer perfTimer("RenderableParticleEffectEntityItem::render");
|
||||
bool RenderableParticleEffectEntityItem::addToScene(EntityItemPointer self,
|
||||
render::ScenePointer scene,
|
||||
render::PendingChanges& pendingChanges) {
|
||||
|
||||
auto particlePayload = std::shared_ptr<ParticlePayload>(new ParticlePayload());
|
||||
particlePayload->setPipeline(_untexturedPipeline);
|
||||
_renderItemId = scene->allocateID();
|
||||
auto renderData = ParticlePayload::Pointer(particlePayload);
|
||||
auto renderPayload = render::PayloadPointer(new ParticlePayload::Payload(renderData));
|
||||
pendingChanges.resetItem(_renderItemId, renderPayload);
|
||||
_scene = scene;
|
||||
return true;
|
||||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::removeFromScene(EntityItemPointer self,
|
||||
render::ScenePointer scene,
|
||||
render::PendingChanges& pendingChanges) {
|
||||
pendingChanges.removeItem(_renderItemId);
|
||||
_scene = nullptr;
|
||||
};
|
||||
|
||||
void RenderableParticleEffectEntityItem::update(const quint64& now) {
|
||||
ParticleEffectEntityItem::update(now);
|
||||
|
||||
if (_texturesChangedFlag) {
|
||||
if (_textures.isEmpty()) {
|
||||
|
@ -42,71 +161,155 @@ void RenderableParticleEffectEntityItem::render(RenderArgs* args) {
|
|||
_texturesChangedFlag = false;
|
||||
}
|
||||
|
||||
bool textured = _texture && _texture->isLoaded();
|
||||
updateQuads(args, textured);
|
||||
|
||||
Q_ASSERT(args->_batch);
|
||||
gpu::Batch& batch = *args->_batch;
|
||||
if (textured) {
|
||||
batch.setResourceTexture(0, _texture->getGPUTexture());
|
||||
}
|
||||
batch.setModelTransform(getTransformToCenter());
|
||||
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(batch, textured);
|
||||
DependencyManager::get<GeometryCache>()->renderVertices(batch, gpu::QUADS, _cacheID);
|
||||
};
|
||||
updateRenderItem();
|
||||
}
|
||||
|
||||
static glm::vec3 zSortAxis;
|
||||
static bool zSort(const glm::vec3& rhs, const glm::vec3& lhs) {
|
||||
return glm::dot(rhs, ::zSortAxis) > glm::dot(lhs, ::zSortAxis);
|
||||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::updateQuads(RenderArgs* args, bool textured) {
|
||||
float particleRadius = getParticleRadius();
|
||||
glm::vec4 particleColor(toGlm(getXColor()), getLocalRenderAlpha());
|
||||
|
||||
glm::vec3 upOffset = args->_viewFrustum->getUp() * particleRadius;
|
||||
glm::vec3 rightOffset = args->_viewFrustum->getRight() * particleRadius;
|
||||
|
||||
QVector<glm::vec3> vertices;
|
||||
QVector<glm::vec3> positions;
|
||||
QVector<glm::vec2> textureCoords;
|
||||
vertices.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
|
||||
|
||||
if (textured) {
|
||||
textureCoords.reserve(getLivingParticleCount() * VERTS_PER_PARTICLE);
|
||||
}
|
||||
positions.reserve(getLivingParticleCount());
|
||||
|
||||
|
||||
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
|
||||
positions.append(_particlePositions[i]);
|
||||
if (textured) {
|
||||
textureCoords.append(glm::vec2(0, 1));
|
||||
textureCoords.append(glm::vec2(1, 1));
|
||||
textureCoords.append(glm::vec2(1, 0));
|
||||
textureCoords.append(glm::vec2(0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
// sort particles back to front
|
||||
::zSortAxis = args->_viewFrustum->getDirection();
|
||||
qSort(positions.begin(), positions.end(), zSort);
|
||||
|
||||
for (int i = 0; i < positions.size(); i++) {
|
||||
glm::vec3 pos = (textured) ? positions[i] : _particlePositions[i];
|
||||
|
||||
// generate corners of quad aligned to face the camera.
|
||||
vertices.append(pos + rightOffset + upOffset);
|
||||
vertices.append(pos - rightOffset + upOffset);
|
||||
vertices.append(pos - rightOffset - upOffset);
|
||||
vertices.append(pos + rightOffset - upOffset);
|
||||
|
||||
}
|
||||
|
||||
if (textured) {
|
||||
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, textureCoords, particleColor);
|
||||
} else {
|
||||
DependencyManager::get<GeometryCache>()->updateVertices(_cacheID, vertices, particleColor);
|
||||
}
|
||||
uint32_t toRGBA(uint8_t r, uint8_t g, uint8_t b, uint8_t a) {
|
||||
return ((uint32_t)r | (uint32_t)g << 8 | (uint32_t)b << 16 | (uint32_t)a << 24);
|
||||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::updateRenderItem() {
|
||||
|
||||
if (!_scene) {
|
||||
return;
|
||||
}
|
||||
|
||||
float particleRadius = getParticleRadius();
|
||||
auto xcolor = getXColor();
|
||||
auto alpha = (uint8_t)(glm::clamp(getLocalRenderAlpha(), 0.0f, 1.0f) * 255.0f);
|
||||
auto rgba = toRGBA(xcolor.red, xcolor.green, xcolor.blue, alpha);
|
||||
|
||||
// make a copy of each particle position
|
||||
std::vector<glm::vec3> positions;
|
||||
positions.reserve(getLivingParticleCount());
|
||||
for (quint32 i = _particleHeadIndex; i != _particleTailIndex; i = (i + 1) % _maxParticles) {
|
||||
positions.push_back(_particlePositions[i]);
|
||||
}
|
||||
|
||||
// sort particles back to front
|
||||
// NOTE: this is view frustum might be one frame out of date.
|
||||
auto frustum = AbstractViewStateInterface::instance()->getCurrentViewFrustum();
|
||||
::zSortAxis = frustum->getDirection();
|
||||
qSort(positions.begin(), positions.end(), zSort);
|
||||
|
||||
// allocate vertices
|
||||
_vertices.clear();
|
||||
|
||||
// build vertices from particle positions
|
||||
const glm::vec3 upOffset = frustum->getUp() * particleRadius;
|
||||
const glm::vec3 rightOffset = frustum->getRight() * particleRadius;
|
||||
for (auto&& pos : positions) {
|
||||
// generate corners of quad aligned to face the camera.
|
||||
_vertices.emplace_back(pos + rightOffset + upOffset, glm::vec2(1.0f, 1.0f), rgba);
|
||||
_vertices.emplace_back(pos - rightOffset + upOffset, glm::vec2(0.0f, 1.0f), rgba);
|
||||
_vertices.emplace_back(pos - rightOffset - upOffset, glm::vec2(0.0f, 0.0f), rgba);
|
||||
_vertices.emplace_back(pos + rightOffset - upOffset, glm::vec2(1.0f, 0.0f), rgba);
|
||||
}
|
||||
|
||||
render::PendingChanges pendingChanges;
|
||||
pendingChanges.updateItem<ParticlePayload>(_renderItemId, [&](ParticlePayload& payload) {
|
||||
|
||||
// update vertex buffer
|
||||
auto vertexBuffer = payload.getVertexBuffer();
|
||||
size_t numBytes = sizeof(Vertex) * _vertices.size();
|
||||
vertexBuffer->resize(numBytes);
|
||||
gpu::Byte* data = vertexBuffer->editData();
|
||||
memcpy(data, &(_vertices[0]), numBytes);
|
||||
|
||||
// FIXME, don't update index buffer if num particles has not changed.
|
||||
// update index buffer
|
||||
auto indexBuffer = payload.getIndexBuffer();
|
||||
const size_t NUM_VERTS_PER_PARTICLE = 4;
|
||||
const size_t NUM_INDICES_PER_PARTICLE = 6;
|
||||
auto numQuads = (_vertices.size() / NUM_VERTS_PER_PARTICLE);
|
||||
numBytes = sizeof(uint16_t) * numQuads * NUM_INDICES_PER_PARTICLE;
|
||||
indexBuffer->resize(numBytes);
|
||||
data = indexBuffer->editData();
|
||||
auto indexPtr = reinterpret_cast<uint16_t*>(data);
|
||||
for (size_t i = 0; i < numQuads; ++i) {
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 0] = i * NUM_VERTS_PER_PARTICLE + 0;
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 1] = i * NUM_VERTS_PER_PARTICLE + 1;
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 2] = i * NUM_VERTS_PER_PARTICLE + 3;
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 3] = i * NUM_VERTS_PER_PARTICLE + 1;
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 4] = i * NUM_VERTS_PER_PARTICLE + 2;
|
||||
indexPtr[i * NUM_INDICES_PER_PARTICLE + 5] = i * NUM_VERTS_PER_PARTICLE + 3;
|
||||
}
|
||||
|
||||
// update transform
|
||||
glm::quat rot = _transform.getRotation();
|
||||
glm::vec3 pos = _transform.getTranslation();
|
||||
Transform t;
|
||||
t.setRotation(rot);
|
||||
t.setTranslation(pos);
|
||||
payload.setModelTransform(t);
|
||||
|
||||
// transform _particleMinBound and _particleMaxBound corners into world coords
|
||||
glm::vec3 d = _particleMaxBound - _particleMinBound;
|
||||
const size_t NUM_BOX_CORNERS = 8;
|
||||
glm::vec3 corners[NUM_BOX_CORNERS] = {
|
||||
pos + rot * (_particleMinBound + glm::vec3(0.0f, 0.0f, 0.0f)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(d.x, 0.0f, 0.0f)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(0.0f, d.y, 0.0f)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(d.x, d.y, 0.0f)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(0.0f, 0.0f, d.z)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(d.x, 0.0f, d.z)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(0.0f, d.y, d.z)),
|
||||
pos + rot * (_particleMinBound + glm::vec3(d.x, d.y, d.z))
|
||||
};
|
||||
glm::vec3 min(FLT_MAX, FLT_MAX, FLT_MAX);
|
||||
glm::vec3 max = -min;
|
||||
for (size_t i = 0; i < NUM_BOX_CORNERS; i++) {
|
||||
min.x = std::min(min.x, corners[i].x);
|
||||
min.y = std::min(min.y, corners[i].y);
|
||||
min.z = std::min(min.z, corners[i].z);
|
||||
max.x = std::max(max.x, corners[i].x);
|
||||
max.y = std::max(max.y, corners[i].y);
|
||||
max.z = std::max(max.z, corners[i].z);
|
||||
}
|
||||
AABox bound(min, max - min);
|
||||
payload.setBound(bound);
|
||||
|
||||
bool textured = _texture && _texture->isLoaded();
|
||||
if (textured) {
|
||||
payload.setTexture(_texture->getGPUTexture());
|
||||
payload.setPipeline(_texturedPipeline);
|
||||
} else {
|
||||
payload.setTexture(nullptr);
|
||||
payload.setPipeline(_untexturedPipeline);
|
||||
}
|
||||
});
|
||||
|
||||
_scene->enqueuePendingChanges(pendingChanges);
|
||||
}
|
||||
|
||||
void RenderableParticleEffectEntityItem::createPipelines() {
|
||||
if (!_untexturedPipeline) {
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
state->setCullMode(gpu::State::CULL_BACK);
|
||||
state->setDepthTest(true, true, gpu::LESS_EQUAL);
|
||||
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
|
||||
gpu::State::INV_SRC_ALPHA, gpu::State::FACTOR_ALPHA,
|
||||
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
|
||||
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(untextured_particle_vert)));
|
||||
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(untextured_particle_frag)));
|
||||
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
|
||||
_untexturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
if (!_texturedPipeline) {
|
||||
auto state = std::make_shared<gpu::State>();
|
||||
state->setCullMode(gpu::State::CULL_BACK);
|
||||
state->setDepthTest(true, true, gpu::LESS_EQUAL);
|
||||
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
|
||||
gpu::State::INV_SRC_ALPHA, gpu::State::FACTOR_ALPHA,
|
||||
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
|
||||
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(textured_particle_vert)));
|
||||
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
|
||||
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
|
||||
_texturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,20 +16,35 @@
|
|||
#include "RenderableEntityItem.h"
|
||||
|
||||
class RenderableParticleEffectEntityItem : public ParticleEffectEntityItem {
|
||||
friend class ParticlePayload;
|
||||
public:
|
||||
static EntityItemPointer factory(const EntityItemID& entityID, const EntityItemProperties& properties);
|
||||
RenderableParticleEffectEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
|
||||
virtual void render(RenderArgs* args);
|
||||
|
||||
void updateQuads(RenderArgs* args, bool textured);
|
||||
virtual void update(const quint64& now) override;
|
||||
|
||||
SIMPLE_RENDERABLE();
|
||||
void updateRenderItem();
|
||||
|
||||
virtual bool addToScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges);
|
||||
virtual void removeFromScene(EntityItemPointer self, render::ScenePointer scene, render::PendingChanges& pendingChanges);
|
||||
|
||||
protected:
|
||||
render::ItemID _renderItemId;
|
||||
|
||||
int _cacheID;
|
||||
const int VERTS_PER_PARTICLE = 4;
|
||||
struct Vertex {
|
||||
Vertex(glm::vec3 xyzIn, glm::vec2 uvIn, uint32_t rgbaIn) : xyz(xyzIn), uv(uvIn), rgba(rgbaIn) {}
|
||||
glm::vec3 xyz;
|
||||
glm::vec2 uv;
|
||||
uint32_t rgba;
|
||||
};
|
||||
|
||||
static void createPipelines();
|
||||
|
||||
std::vector<Vertex> _vertices;
|
||||
static gpu::PipelinePointer _untexturedPipeline;
|
||||
static gpu::PipelinePointer _texturedPipeline;
|
||||
|
||||
render::ScenePointer _scene;
|
||||
NetworkTexturePointer _texture;
|
||||
};
|
||||
|
||||
|
|
20
libraries/entities-renderer/src/textured_particle.slf
Normal file
20
libraries/entities-renderer/src/textured_particle.slf
Normal file
|
@ -0,0 +1,20 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// fragment shader
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform sampler2D colorMap;
|
||||
|
||||
varying vec4 varColor;
|
||||
varying vec2 varTexCoord;
|
||||
|
||||
void main(void) {
|
||||
vec4 color = texture2D(colorMap, varTexCoord);
|
||||
gl_FragColor = color * varColor;
|
||||
}
|
28
libraries/entities-renderer/src/textured_particle.slv
Normal file
28
libraries/entities-renderer/src/textured_particle.slv
Normal file
|
@ -0,0 +1,28 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// particle vertex shader
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec4 varColor;
|
||||
varying vec2 varTexCoord;
|
||||
|
||||
void main(void) {
|
||||
// pass along the color & uvs to fragment shader
|
||||
varColor = gl_Color;
|
||||
varTexCoord = gl_MultiTexCoord0.xy;
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
|
||||
}
|
16
libraries/entities-renderer/src/untextured_particle.slf
Normal file
16
libraries/entities-renderer/src/untextured_particle.slf
Normal file
|
@ -0,0 +1,16 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
// fragment shader
|
||||
//
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
varying vec4 varColor;
|
||||
|
||||
void main(void) {
|
||||
gl_FragColor = varColor;
|
||||
}
|
24
libraries/entities-renderer/src/untextured_particle.slv
Normal file
24
libraries/entities-renderer/src/untextured_particle.slv
Normal file
|
@ -0,0 +1,24 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// particle vertex shader
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec4 varColor;
|
||||
|
||||
void main(void) {
|
||||
// pass along the diffuse color
|
||||
varColor = gl_Color;
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
<$transformModelToClipPos(cam, obj, gl_Vertex, gl_Position)$>
|
||||
}
|
|
@ -39,6 +39,7 @@
|
|||
#include "EntityTree.h"
|
||||
#include "EntityTreeElement.h"
|
||||
#include "EntitiesLogging.h"
|
||||
#include "EntityScriptingInterface.h"
|
||||
#include "ParticleEffectEntityItem.h"
|
||||
|
||||
const xColor ParticleEffectEntityItem::DEFAULT_COLOR = { 255, 255, 255 };
|
||||
|
@ -92,6 +93,75 @@ ParticleEffectEntityItem::ParticleEffectEntityItem(const EntityItemID& entityIte
|
|||
ParticleEffectEntityItem::~ParticleEffectEntityItem() {
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setDimensions(const glm::vec3& value) {
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setLifespan(float lifespan) {
|
||||
_lifespan = lifespan;
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setEmitDirection(glm::vec3 emitDirection) {
|
||||
_emitDirection = glm::normalize(emitDirection);
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setEmitStrength(float emitStrength) {
|
||||
_emitStrength = emitStrength;
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setLocalGravity(float localGravity) {
|
||||
_localGravity = localGravity;
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::setParticleRadius(float particleRadius) {
|
||||
_particleRadius = particleRadius;
|
||||
computeAndUpdateDimensions();
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::computeAndUpdateDimensions() {
|
||||
|
||||
const float t = _lifespan * 1.1f; // add 10% extra time, to account for incremental timer accumulation error.
|
||||
const float MAX_RANDOM_FACTOR = (0.5f * 0.25f);
|
||||
const float maxOffset = (MAX_RANDOM_FACTOR * _emitStrength) + _particleRadius;
|
||||
|
||||
// bounds for x and z is easy to compute because there is no at^2 term.
|
||||
float xMax = (_emitDirection.x * _emitStrength + maxOffset) * t;
|
||||
float xMin = (_emitDirection.x * _emitStrength - maxOffset) * t;
|
||||
|
||||
float zMax = (_emitDirection.z * _emitStrength + maxOffset) * t;
|
||||
float zMin = (_emitDirection.z * _emitStrength - maxOffset) * t;
|
||||
|
||||
// yEnd is where the particle will end.
|
||||
float a = _localGravity;
|
||||
float atSquared = a * t * t;
|
||||
float v = _emitDirection.y * _emitStrength + maxOffset;
|
||||
float vt = v * t;
|
||||
float yEnd = 0.5f * atSquared + vt;
|
||||
|
||||
// yApex is where the particle is at it's apex.
|
||||
float yApexT = (-v / a);
|
||||
float yApex = 0.0f;
|
||||
|
||||
// only set apex if it's within the lifespan of the particle.
|
||||
if (yApexT >= 0.0f && yApexT <= t) {
|
||||
yApex = -(v * v) / (2.0f * a);
|
||||
}
|
||||
|
||||
float yMax = std::max(yApex, yEnd);
|
||||
float yMin = std::min(yApex, yEnd);
|
||||
|
||||
// times 2 because dimensions are diameters not radii.
|
||||
glm::vec3 dims(2.0f * std::max(fabs(xMin), fabs(xMax)),
|
||||
2.0f * std::max(fabs(yMin), fabs(yMax)),
|
||||
2.0f * std::max(fabs(zMin), fabs(zMax)));
|
||||
|
||||
EntityItem::setDimensions(dims);
|
||||
}
|
||||
|
||||
EntityItemProperties ParticleEffectEntityItem::getProperties() const {
|
||||
EntityItemProperties properties = EntityItem::getProperties(); // get the properties from our base class
|
||||
|
||||
|
@ -245,7 +315,7 @@ bool ParticleEffectEntityItem::isAnimatingSomething() const {
|
|||
}
|
||||
|
||||
bool ParticleEffectEntityItem::needsToCallUpdate() const {
|
||||
return isAnimatingSomething() ? true : EntityItem::needsToCallUpdate();
|
||||
return true;
|
||||
}
|
||||
|
||||
void ParticleEffectEntityItem::update(const quint64& now) {
|
||||
|
@ -260,13 +330,6 @@ void ParticleEffectEntityItem::update(const quint64& now) {
|
|||
|
||||
if (isAnimatingSomething()) {
|
||||
stepSimulation(deltaTime);
|
||||
|
||||
// update the dimensions
|
||||
glm::vec3 dims;
|
||||
dims.x = glm::max(glm::abs(_particleMinBound.x), glm::abs(_particleMaxBound.x)) * 2.0f;
|
||||
dims.y = glm::max(glm::abs(_particleMinBound.y), glm::abs(_particleMaxBound.y)) * 2.0f;
|
||||
dims.z = glm::max(glm::abs(_particleMinBound.z), glm::abs(_particleMaxBound.z)) * 2.0f;
|
||||
setDimensions(dims);
|
||||
}
|
||||
|
||||
EntityItem::update(now); // let our base class handle it's updates...
|
||||
|
@ -319,7 +382,7 @@ void ParticleEffectEntityItem::setAnimationSettings(const QString& value) {
|
|||
qCDebug(entities) << "ParticleEffectEntityItem::setAnimationSettings() calling setAnimationFrameIndex()...";
|
||||
qCDebug(entities) << " settings:" << value;
|
||||
qCDebug(entities) << " settingsMap[frameIndex]:" << settingsMap["frameIndex"];
|
||||
qCDebug(entities" frameIndex: %20.5f", frameIndex);
|
||||
qCDebug(entities, " frameIndex: %20.5f", frameIndex);
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
|
@ -86,12 +86,14 @@ public:
|
|||
void setAnimationLastFrame(float lastFrame) { _animationLoop.setLastFrame(lastFrame); }
|
||||
float getAnimationLastFrame() const { return _animationLoop.getLastFrame(); }
|
||||
|
||||
virtual void setDimensions(const glm::vec3& value) override;
|
||||
|
||||
static const quint32 DEFAULT_MAX_PARTICLES;
|
||||
void setMaxParticles(quint32 maxParticles);
|
||||
quint32 getMaxParticles() const { return _maxParticles; }
|
||||
|
||||
static const float DEFAULT_LIFESPAN;
|
||||
void setLifespan(float lifespan) { _lifespan = lifespan; }
|
||||
void setLifespan(float lifespan);
|
||||
float getLifespan() const { return _lifespan; }
|
||||
|
||||
static const float DEFAULT_EMIT_RATE;
|
||||
|
@ -99,21 +101,23 @@ public:
|
|||
float getEmitRate() const { return _emitRate; }
|
||||
|
||||
static const glm::vec3 DEFAULT_EMIT_DIRECTION;
|
||||
void setEmitDirection(glm::vec3 emitDirection) { _emitDirection = glm::normalize(emitDirection); }
|
||||
void setEmitDirection(glm::vec3 emitDirection);
|
||||
const glm::vec3& getEmitDirection() const { return _emitDirection; }
|
||||
|
||||
static const float DEFAULT_EMIT_STRENGTH;
|
||||
void setEmitStrength(float emitStrength) { _emitStrength = emitStrength; }
|
||||
void setEmitStrength(float emitStrength);
|
||||
float getEmitStrength() const { return _emitStrength; }
|
||||
|
||||
static const float DEFAULT_LOCAL_GRAVITY;
|
||||
void setLocalGravity(float localGravity) { _localGravity = localGravity; }
|
||||
void setLocalGravity(float localGravity);
|
||||
float getLocalGravity() const { return _localGravity; }
|
||||
|
||||
static const float DEFAULT_PARTICLE_RADIUS;
|
||||
void setParticleRadius(float particleRadius) { _particleRadius = particleRadius; }
|
||||
void setParticleRadius(float particleRadius);
|
||||
float getParticleRadius() const { return _particleRadius; }
|
||||
|
||||
void computeAndUpdateDimensions();
|
||||
|
||||
bool getAnimationIsPlaying() const { return _animationLoop.isRunning(); }
|
||||
float getAnimationFrameIndex() const { return _animationLoop.getFrameIndex(); }
|
||||
float getAnimationFPS() const { return _animationLoop.getFPS(); }
|
||||
|
|
|
@ -194,7 +194,10 @@ void OBJFace::addFrom(const OBJFace* face, int index) { // add using data from f
|
|||
}
|
||||
|
||||
bool OBJReader::isValidTexture(const QByteArray &filename) {
|
||||
QUrl candidateUrl = url->resolved(QUrl(filename));
|
||||
if (!_url) {
|
||||
return false;
|
||||
}
|
||||
QUrl candidateUrl = _url->resolved(QUrl(filename));
|
||||
QNetworkReply *netReply = request(candidateUrl, true);
|
||||
bool isValid = netReply->isFinished() && (netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200);
|
||||
netReply->deleteLater();
|
||||
|
@ -241,7 +244,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
|
|||
} else if ((token == "map_Kd") || (token == "map_Ks")) {
|
||||
QByteArray filename = QUrl(tokenizer.getLineAsDatum()).fileName().toUtf8();
|
||||
if (filename.endsWith(".tga")) {
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: currently ignoring tga texture " << filename << " in " << url;
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: currently ignoring tga texture " << filename << " in " << _url;
|
||||
break;
|
||||
}
|
||||
if (isValidTexture(filename)) {
|
||||
|
@ -251,7 +254,7 @@ void OBJReader::parseMaterialLibrary(QIODevice* device) {
|
|||
currentMaterial.specularTextureFilename = filename;
|
||||
}
|
||||
} else {
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " ignoring missing texture " << filename;
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << _url << " ignoring missing texture " << filename;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -315,7 +318,7 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
|
|||
QByteArray groupName = tokenizer.getDatum();
|
||||
currentGroup = groupName;
|
||||
//qCDebug(modelformat) << "new group:" << groupName;
|
||||
} else if (token == "mtllib") {
|
||||
} else if (token == "mtllib" && _url) {
|
||||
if (tokenizer.nextToken() != OBJTokenizer::DATUM_TOKEN) {
|
||||
break;
|
||||
}
|
||||
|
@ -324,13 +327,15 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
|
|||
break; // Some files use mtllib over and over again for the same libraryName
|
||||
}
|
||||
librariesSeen[libraryName] = true;
|
||||
QUrl libraryUrl = url->resolved(QUrl(libraryName).fileName()); // Throw away any path part of libraryName, and merge against original url.
|
||||
// Throw away any path part of libraryName, and merge against original url.
|
||||
QUrl libraryUrl = _url->resolved(QUrl(libraryName).fileName());
|
||||
qCDebug(modelformat) << "OBJ Reader new library:" << libraryName << " at:" << libraryUrl;
|
||||
QNetworkReply* netReply = request(libraryUrl, false);
|
||||
if (netReply->isFinished() && (netReply->attribute(QNetworkRequest::HttpStatusCodeAttribute).toInt() == 200)) {
|
||||
parseMaterialLibrary(netReply);
|
||||
} else {
|
||||
qCDebug(modelformat) << "OBJ Reader " << libraryName << " did not answer. Got " << netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
|
||||
qCDebug(modelformat) << "OBJ Reader " << libraryName << " did not answer. Got "
|
||||
<< netReply->attribute(QNetworkRequest::HttpReasonPhraseAttribute).toString();
|
||||
}
|
||||
netReply->deleteLater();
|
||||
} else if (token == "usemtl") {
|
||||
|
@ -405,10 +410,10 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
|
|||
OBJTokenizer tokenizer(device);
|
||||
float scaleGuess = 1.0f;
|
||||
|
||||
this->url = url;
|
||||
_url = url;
|
||||
geometry.meshExtents.reset();
|
||||
geometry.meshes.append(FBXMesh());
|
||||
|
||||
|
||||
try {
|
||||
// call parseOBJGroup as long as it's returning true. Each successful call will
|
||||
// add a new meshPart to the geometry's single mesh.
|
||||
|
@ -416,7 +421,7 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
|
|||
|
||||
FBXMesh& mesh = geometry.meshes[0];
|
||||
mesh.meshIndex = 0;
|
||||
|
||||
|
||||
geometry.joints.resize(1);
|
||||
geometry.joints[0].isFree = false;
|
||||
geometry.joints[0].parentIndex = -1;
|
||||
|
@ -437,37 +442,44 @@ FBXGeometry OBJReader::readOBJ(QIODevice* device, const QVariantHash& mapping, Q
|
|||
0, 0, 1, 0,
|
||||
0, 0, 0, 1);
|
||||
mesh.clusters.append(cluster);
|
||||
|
||||
// Some .obj files use the convention that a group with uv coordinates that doesn't define a material, should use a texture with the same basename as the .obj file.
|
||||
QString filename = url->fileName();
|
||||
int extIndex = filename.lastIndexOf('.'); // by construction, this does not fail
|
||||
QString basename = filename.remove(extIndex + 1, sizeof("obj"));
|
||||
OBJMaterial& preDefinedMaterial = materials[SMART_DEFAULT_MATERIAL_NAME];
|
||||
preDefinedMaterial.diffuseColor = glm::vec3(1.0f);
|
||||
QVector<QByteArray> extensions = {"jpg", "jpeg", "png", "tga"};
|
||||
QByteArray base = basename.toUtf8(), textName = "";
|
||||
for (int i = 0; i < extensions.count(); i++) {
|
||||
QByteArray candidateString = base + extensions[i];
|
||||
if (isValidTexture(candidateString)) {
|
||||
textName = candidateString;
|
||||
break;
|
||||
|
||||
// Some .obj files use the convention that a group with uv coordinates that doesn't define a material, should use
|
||||
// a texture with the same basename as the .obj file.
|
||||
if (url) {
|
||||
QString filename = url->fileName();
|
||||
int extIndex = filename.lastIndexOf('.'); // by construction, this does not fail
|
||||
QString basename = filename.remove(extIndex + 1, sizeof("obj"));
|
||||
OBJMaterial& preDefinedMaterial = materials[SMART_DEFAULT_MATERIAL_NAME];
|
||||
preDefinedMaterial.diffuseColor = glm::vec3(1.0f);
|
||||
QVector<QByteArray> extensions = {"jpg", "jpeg", "png", "tga"};
|
||||
QByteArray base = basename.toUtf8(), textName = "";
|
||||
for (int i = 0; i < extensions.count(); i++) {
|
||||
QByteArray candidateString = base + extensions[i];
|
||||
if (isValidTexture(candidateString)) {
|
||||
textName = candidateString;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!textName.isEmpty()) {
|
||||
preDefinedMaterial.diffuseTextureFilename = textName;
|
||||
}
|
||||
materials[SMART_DEFAULT_MATERIAL_NAME] = preDefinedMaterial;
|
||||
}
|
||||
if (!textName.isEmpty()) {
|
||||
preDefinedMaterial.diffuseTextureFilename = textName;
|
||||
}
|
||||
materials[SMART_DEFAULT_MATERIAL_NAME] = preDefinedMaterial;
|
||||
|
||||
|
||||
for (int i = 0, meshPartCount = 0; i < mesh.parts.count(); i++, meshPartCount++) {
|
||||
FBXMeshPart& meshPart = mesh.parts[i];
|
||||
FaceGroup faceGroup = faceGroups[meshPartCount];
|
||||
OBJFace leadFace = faceGroup[0]; // All the faces in the same group will have the same name and material.
|
||||
QString groupMaterialName = leadFace.materialName;
|
||||
if (groupMaterialName.isEmpty() && (leadFace.textureUVIndices.count() > 0)) {
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " needs a texture that isn't specified. Using default mechanism.";
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
|
||||
<< " needs a texture that isn't specified. Using default mechanism.";
|
||||
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
|
||||
} else if (!groupMaterialName.isEmpty() && !materials.contains(groupMaterialName)) {
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url << " specifies a material " << groupMaterialName << " that is not defined. Using default mechanism.";
|
||||
qCDebug(modelformat) << "OBJ Reader WARNING: " << url
|
||||
<< " specifies a material " << groupMaterialName
|
||||
<< " that is not defined. Using default mechanism.";
|
||||
groupMaterialName = SMART_DEFAULT_MATERIAL_NAME;
|
||||
}
|
||||
if (!groupMaterialName.isEmpty()) {
|
||||
|
|
|
@ -69,12 +69,13 @@ public:
|
|||
QVector<FaceGroup> faceGroups;
|
||||
QString currentMaterialName;
|
||||
QHash<QString, OBJMaterial> materials;
|
||||
QUrl* url;
|
||||
|
||||
|
||||
QNetworkReply* request(QUrl& url, bool isTest);
|
||||
FBXGeometry readOBJ(const QByteArray& model, const QVariantHash& mapping);
|
||||
FBXGeometry readOBJ(QIODevice* device, const QVariantHash& mapping, QUrl* url);
|
||||
private:
|
||||
QUrl* _url = nullptr;
|
||||
|
||||
QHash<QByteArray, bool> librariesSeen;
|
||||
bool parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mapping, FBXGeometry& geometry, float& scaleGuess);
|
||||
void parseMaterialLibrary(QIODevice* device);
|
||||
|
@ -83,4 +84,4 @@ private:
|
|||
|
||||
// What are these utilities doing here? One is used by fbx loading code in VHACD Utils, and the other a general debugging utility.
|
||||
void setMeshPartDefaults(FBXMeshPart& meshPart, QString materialID);
|
||||
void fbxDebugDump(const FBXGeometry& fbxgeo);
|
||||
void fbxDebugDump(const FBXGeometry& fbxgeo);
|
||||
|
|
255
libraries/render-utils/src/AmbientOcclusionEffect.cpp
Normal file
255
libraries/render-utils/src/AmbientOcclusionEffect.cpp
Normal file
|
@ -0,0 +1,255 @@
|
|||
//
|
||||
// AmbientOcclusionEffect.cpp
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Niraj Venkat on 7/15/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
|
||||
#include <gpu/GPUConfig.h>
|
||||
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
#include <glm/gtc/random.hpp>
|
||||
|
||||
#include <PathUtils.h>
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "gpu/StandardShaderLib.h"
|
||||
#include "AmbientOcclusionEffect.h"
|
||||
#include "TextureCache.h"
|
||||
#include "FramebufferCache.h"
|
||||
#include "DependencyManager.h"
|
||||
#include "ViewFrustum.h"
|
||||
#include "GeometryCache.h"
|
||||
|
||||
#include "ambient_occlusion_vert.h"
|
||||
#include "ambient_occlusion_frag.h"
|
||||
#include "gaussian_blur_vertical_vert.h"
|
||||
#include "gaussian_blur_horizontal_vert.h"
|
||||
#include "gaussian_blur_frag.h"
|
||||
#include "occlusion_blend_frag.h"
|
||||
|
||||
|
||||
AmbientOcclusion::AmbientOcclusion() {
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& AmbientOcclusion::getOcclusionPipeline() {
|
||||
if (!_occlusionPipeline) {
|
||||
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(ambient_occlusion_vert)));
|
||||
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(ambient_occlusion_frag)));
|
||||
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("depthTexture"), 0));
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("normalTexture"), 1));
|
||||
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
_gScaleLoc = program->getUniforms().findLocation("g_scale");
|
||||
_gBiasLoc = program->getUniforms().findLocation("g_bias");
|
||||
_gSampleRadiusLoc = program->getUniforms().findLocation("g_sample_rad");
|
||||
_gIntensityLoc = program->getUniforms().findLocation("g_intensity");
|
||||
_bufferWidthLoc = program->getUniforms().findLocation("bufferWidth");
|
||||
_bufferHeightLoc = program->getUniforms().findLocation("bufferHeight");
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
|
||||
// Blend on transparent
|
||||
state->setBlendFunction(false,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
|
||||
|
||||
// Link the occlusion FBO to texture
|
||||
_occlusionBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
|
||||
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
|
||||
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
|
||||
auto width = _occlusionBuffer->getWidth();
|
||||
auto height = _occlusionBuffer->getHeight();
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
|
||||
_occlusionTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_occlusionPipeline.reset(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
return _occlusionPipeline;
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& AmbientOcclusion::getVBlurPipeline() {
|
||||
if (!_vBlurPipeline) {
|
||||
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(gaussian_blur_vertical_vert)));
|
||||
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(gaussian_blur_frag)));
|
||||
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
|
||||
// Blend on transparent
|
||||
state->setBlendFunction(false,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
|
||||
|
||||
// Link the horizontal blur FBO to texture
|
||||
_vBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
|
||||
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
|
||||
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
|
||||
auto width = _vBlurBuffer->getWidth();
|
||||
auto height = _vBlurBuffer->getHeight();
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
|
||||
_vBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_vBlurPipeline.reset(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
return _vBlurPipeline;
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& AmbientOcclusion::getHBlurPipeline() {
|
||||
if (!_hBlurPipeline) {
|
||||
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(gaussian_blur_horizontal_vert)));
|
||||
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(gaussian_blur_frag)));
|
||||
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
|
||||
// Blend on transparent
|
||||
state->setBlendFunction(false,
|
||||
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
|
||||
gpu::State::DEST_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ZERO);
|
||||
|
||||
// Link the horizontal blur FBO to texture
|
||||
_hBlurBuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_RGBA_32,
|
||||
DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width(), DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height()));
|
||||
auto format = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
|
||||
auto width = _hBlurBuffer->getWidth();
|
||||
auto height = _hBlurBuffer->getHeight();
|
||||
auto defaultSampler = gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT);
|
||||
_hBlurTexture = gpu::TexturePointer(gpu::Texture::create2D(format, width, height, defaultSampler));
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_hBlurPipeline.reset(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
return _hBlurPipeline;
|
||||
}
|
||||
|
||||
const gpu::PipelinePointer& AmbientOcclusion::getBlendPipeline() {
|
||||
if (!_blendPipeline) {
|
||||
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(ambient_occlusion_vert)));
|
||||
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(occlusion_blend_frag)));
|
||||
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
|
||||
|
||||
gpu::Shader::BindingSet slotBindings;
|
||||
slotBindings.insert(gpu::Shader::Binding(std::string("blurredOcclusionTexture"), 0));
|
||||
|
||||
gpu::Shader::makeProgram(*program, slotBindings);
|
||||
|
||||
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
|
||||
|
||||
state->setDepthTest(false, false, gpu::LESS_EQUAL);
|
||||
|
||||
// Blend on transparent
|
||||
state->setBlendFunction(true,
|
||||
gpu::State::SRC_COLOR, gpu::State::BLEND_OP_ADD, gpu::State::DEST_COLOR);
|
||||
|
||||
// Good to go add the brand new pipeline
|
||||
_blendPipeline.reset(gpu::Pipeline::create(program, state));
|
||||
}
|
||||
return _blendPipeline;
|
||||
}
|
||||
|
||||
void AmbientOcclusion::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
|
||||
assert(renderContext->args);
|
||||
assert(renderContext->args->_viewFrustum);
|
||||
RenderArgs* args = renderContext->args;
|
||||
auto& scene = sceneContext->_scene;
|
||||
|
||||
gpu::Batch batch;
|
||||
|
||||
glm::mat4 projMat;
|
||||
Transform viewMat;
|
||||
args->_viewFrustum->evalProjectionMatrix(projMat);
|
||||
args->_viewFrustum->evalViewTransform(viewMat);
|
||||
batch.setProjectionTransform(projMat);
|
||||
batch.setViewTransform(viewMat);
|
||||
batch.setModelTransform(Transform());
|
||||
|
||||
// Occlusion step
|
||||
getOcclusionPipeline();
|
||||
batch.setResourceTexture(0, DependencyManager::get<FramebufferCache>()->getPrimaryDepthTexture());
|
||||
batch.setResourceTexture(1, DependencyManager::get<FramebufferCache>()->getPrimaryNormalTexture());
|
||||
_occlusionBuffer->setRenderBuffer(0, _occlusionTexture);
|
||||
batch.setFramebuffer(_occlusionBuffer);
|
||||
|
||||
// Occlusion uniforms
|
||||
g_scale = 1.0f;
|
||||
g_bias = 1.0f;
|
||||
g_sample_rad = 1.0f;
|
||||
g_intensity = 1.0f;
|
||||
|
||||
// Bind the first gpu::Pipeline we need - for calculating occlusion buffer
|
||||
batch.setPipeline(getOcclusionPipeline());
|
||||
batch._glUniform1f(_gScaleLoc, g_scale);
|
||||
batch._glUniform1f(_gBiasLoc, g_bias);
|
||||
batch._glUniform1f(_gSampleRadiusLoc, g_sample_rad);
|
||||
batch._glUniform1f(_gIntensityLoc, g_intensity);
|
||||
batch._glUniform1f(_bufferWidthLoc, DependencyManager::get<FramebufferCache>()->getFrameBufferSize().width());
|
||||
batch._glUniform1f(_bufferHeightLoc, DependencyManager::get<FramebufferCache>()->getFrameBufferSize().height());
|
||||
|
||||
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
|
||||
glm::vec2 bottomLeft(-1.0f, -1.0f);
|
||||
glm::vec2 topRight(1.0f, 1.0f);
|
||||
glm::vec2 texCoordTopLeft(0.0f, 0.0f);
|
||||
glm::vec2 texCoordBottomRight(1.0f, 1.0f);
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
// Vertical blur step
|
||||
getVBlurPipeline();
|
||||
batch.setResourceTexture(0, _occlusionTexture);
|
||||
_vBlurBuffer->setRenderBuffer(0, _vBlurTexture);
|
||||
batch.setFramebuffer(_vBlurBuffer);
|
||||
|
||||
// Bind the second gpu::Pipeline we need - for calculating blur buffer
|
||||
batch.setPipeline(getVBlurPipeline());
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
// Horizontal blur step
|
||||
getHBlurPipeline();
|
||||
batch.setResourceTexture(0, _vBlurTexture);
|
||||
_hBlurBuffer->setRenderBuffer(0, _hBlurTexture);
|
||||
batch.setFramebuffer(_hBlurBuffer);
|
||||
|
||||
// Bind the third gpu::Pipeline we need - for calculating blur buffer
|
||||
batch.setPipeline(getHBlurPipeline());
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
// Blend step
|
||||
getBlendPipeline();
|
||||
batch.setResourceTexture(0, _hBlurTexture);
|
||||
batch.setFramebuffer(DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer());
|
||||
|
||||
// Bind the fourth gpu::Pipeline we need - for blending the primary color buffer with blurred occlusion texture
|
||||
batch.setPipeline(getBlendPipeline());
|
||||
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, texCoordTopLeft, texCoordBottomRight, color);
|
||||
|
||||
// Ready to render
|
||||
args->_context->syncCache();
|
||||
args->_context->render((batch));
|
||||
}
|
61
libraries/render-utils/src/AmbientOcclusionEffect.h
Normal file
61
libraries/render-utils/src/AmbientOcclusionEffect.h
Normal file
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// AmbientOcclusionEffect.h
|
||||
// libraries/render-utils/src/
|
||||
//
|
||||
// Created by Niraj Venkat on 7/15/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AmbientOcclusionEffect_h
|
||||
#define hifi_AmbientOcclusionEffect_h
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "render/DrawTask.h"
|
||||
|
||||
class AmbientOcclusion {
|
||||
public:
|
||||
|
||||
AmbientOcclusion();
|
||||
|
||||
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
typedef render::Job::Model<AmbientOcclusion> JobModel;
|
||||
|
||||
const gpu::PipelinePointer& getOcclusionPipeline();
|
||||
const gpu::PipelinePointer& getHBlurPipeline();
|
||||
const gpu::PipelinePointer& getVBlurPipeline();
|
||||
const gpu::PipelinePointer& getBlendPipeline();
|
||||
|
||||
private:
|
||||
|
||||
// Uniforms for AO
|
||||
gpu::int32 _gScaleLoc;
|
||||
gpu::int32 _gBiasLoc;
|
||||
gpu::int32 _gSampleRadiusLoc;
|
||||
gpu::int32 _gIntensityLoc;
|
||||
gpu::int32 _bufferWidthLoc;
|
||||
gpu::int32 _bufferHeightLoc;
|
||||
float g_scale;
|
||||
float g_bias;
|
||||
float g_sample_rad;
|
||||
float g_intensity;
|
||||
|
||||
gpu::PipelinePointer _occlusionPipeline;
|
||||
gpu::PipelinePointer _hBlurPipeline;
|
||||
gpu::PipelinePointer _vBlurPipeline;
|
||||
gpu::PipelinePointer _blendPipeline;
|
||||
|
||||
gpu::FramebufferPointer _occlusionBuffer;
|
||||
gpu::FramebufferPointer _hBlurBuffer;
|
||||
gpu::FramebufferPointer _vBlurBuffer;
|
||||
|
||||
gpu::TexturePointer _occlusionTexture;
|
||||
gpu::TexturePointer _hBlurTexture;
|
||||
gpu::TexturePointer _vBlurTexture;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_AmbientOcclusionEffect_h
|
|
@ -1325,12 +1325,16 @@ void Model::simulate(float deltaTime, bool fullUpdate) {
|
|||
}
|
||||
}
|
||||
|
||||
//virtual
|
||||
void Model::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
_rig->updateAnimations(deltaTime, parentTransform);
|
||||
}
|
||||
void Model::simulateInternal(float deltaTime) {
|
||||
// update the world space transforms for all joints
|
||||
|
||||
const FBXGeometry& geometry = _geometry->getFBXGeometry();
|
||||
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
|
||||
_rig->simulateInternal(deltaTime, parentTransform, getTranslation(), getRotation());
|
||||
updateRig(deltaTime, parentTransform);
|
||||
|
||||
glm::mat4 modelToWorld = glm::mat4_cast(_rotation);
|
||||
for (int i = 0; i < _meshStates.size(); i++) {
|
||||
|
|
|
@ -269,6 +269,7 @@ protected:
|
|||
void snapToRegistrationPoint();
|
||||
|
||||
void simulateInternal(float deltaTime);
|
||||
virtual void updateRig(float deltaTime, glm::mat4 parentTransform);
|
||||
|
||||
/// Updates the state of the joint at the specified index.
|
||||
virtual void updateJointState(int index);
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
#include "TextureCache.h"
|
||||
|
||||
#include "render/DrawStatus.h"
|
||||
#include "AmbientOcclusionEffect.h"
|
||||
|
||||
#include "overlay3D_vert.h"
|
||||
#include "overlay3D_frag.h"
|
||||
|
@ -80,6 +81,11 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
|
|||
_jobs.push_back(Job(new DrawLight::JobModel("DrawLight")));
|
||||
_jobs.push_back(Job(new RenderDeferred::JobModel("RenderDeferred")));
|
||||
_jobs.push_back(Job(new ResolveDeferred::JobModel("ResolveDeferred")));
|
||||
_jobs.push_back(Job(new AmbientOcclusion::JobModel("AmbientOcclusion")));
|
||||
|
||||
_jobs.back().setEnabled(false);
|
||||
_occlusionJobIndex = _jobs.size() - 1;
|
||||
|
||||
_jobs.push_back(Job(new FetchItems::JobModel("FetchTransparent",
|
||||
FetchItems(
|
||||
ItemFilter::Builder::transparentShape().withoutLayered(),
|
||||
|
@ -93,10 +99,13 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
|
|||
_jobs.push_back(Job(new DrawTransparentDeferred::JobModel("TransparentDeferred", _jobs.back().getOutput())));
|
||||
|
||||
_jobs.push_back(Job(new render::DrawStatus::JobModel("DrawStatus", renderedOpaques)));
|
||||
|
||||
|
||||
_jobs.back().setEnabled(false);
|
||||
_drawStatusJobIndex = _jobs.size() - 1;
|
||||
|
||||
_jobs.push_back(Job(new DrawOverlay3D::JobModel("DrawOverlay3D")));
|
||||
|
||||
_jobs.push_back(Job(new ResetGLState::JobModel()));
|
||||
|
||||
// Give ourselves 3 frmaes of timer queries
|
||||
|
@ -125,6 +134,9 @@ void RenderDeferredTask::run(const SceneContextPointer& sceneContext, const Rend
|
|||
// Make sure we turn the displayItemStatus on/off
|
||||
setDrawItemStatus(renderContext->_drawItemStatus);
|
||||
|
||||
// TODO: turn on/off AO through menu item
|
||||
setOcclusionStatus(renderContext->_occlusionStatus);
|
||||
|
||||
renderContext->args->_context->syncCache();
|
||||
|
||||
for (auto job : _jobs) {
|
||||
|
|
|
@ -82,6 +82,11 @@ public:
|
|||
void setDrawItemStatus(bool draw) { if (_drawStatusJobIndex >= 0) { _jobs[_drawStatusJobIndex].setEnabled(draw); } }
|
||||
bool doDrawItemStatus() const { if (_drawStatusJobIndex >= 0) { return _jobs[_drawStatusJobIndex].isEnabled(); } else { return false; } }
|
||||
|
||||
int _occlusionJobIndex = -1;
|
||||
|
||||
void setOcclusionStatus(bool draw) { if (_occlusionJobIndex >= 0) { _jobs[_occlusionJobIndex].setEnabled(draw); } }
|
||||
bool doOcclusionStatus() const { if (_occlusionJobIndex >= 0) { return _jobs[_occlusionJobIndex].isEnabled(); } else { return false; } }
|
||||
|
||||
virtual void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
|
||||
|
||||
|
||||
|
|
109
libraries/render-utils/src/ambient_occlusion.slf
Normal file
109
libraries/render-utils/src/ambient_occlusion.slf
Normal file
|
@ -0,0 +1,109 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// ambient_occlusion.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/15/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredBufferWrite.slh@>
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
|
||||
uniform sampler2D depthTexture;
|
||||
uniform sampler2D normalTexture;
|
||||
|
||||
uniform float g_scale;
|
||||
uniform float g_bias;
|
||||
uniform float g_sample_rad;
|
||||
uniform float g_intensity;
|
||||
uniform float bufferWidth;
|
||||
uniform float bufferHeight;
|
||||
|
||||
#define SAMPLE_COUNT 4
|
||||
|
||||
float getRandom(vec2 uv) {
|
||||
return fract(sin(dot(uv.xy ,vec2(12.9898,78.233))) * 43758.5453);
|
||||
}
|
||||
|
||||
void main(void) {
|
||||
vec3 sampleKernel[4] = { vec3(0.2, 0.0, 0.0),
|
||||
vec3(0.0, 0.2, 0.0),
|
||||
vec3(0.0, 0.0, 0.2),
|
||||
vec3(0.2, 0.2, 0.2) };
|
||||
|
||||
TransformCamera cam = getTransformCamera();
|
||||
TransformObject obj = getTransformObject();
|
||||
|
||||
vec3 eyeDir = vec3(0.0, 0.0, -3.0);
|
||||
vec3 cameraPositionWorldSpace;
|
||||
<$transformEyeToWorldDir(cam, eyeDir, cameraPositionWorldSpace)$>
|
||||
|
||||
vec4 depthColor = texture2D(depthTexture, varTexcoord);
|
||||
|
||||
// z in non linear range [0,1]
|
||||
float depthVal = depthColor.r;
|
||||
// conversion into NDC [-1,1]
|
||||
float zNDC = depthVal * 2.0 - 1.0;
|
||||
float n = 1.0; // the near plane
|
||||
float f = 30.0; // the far plane
|
||||
float l = -1.0; // left
|
||||
float r = 1.0; // right
|
||||
float b = -1.0; // bottom
|
||||
float t = 1.0; // top
|
||||
|
||||
// conversion into eye space
|
||||
float zEye = 2*f*n / (zNDC*(f-n)-(f+n));
|
||||
// Converting from pixel coordinates to NDC
|
||||
float xNDC = gl_FragCoord.x/bufferWidth * 2.0 - 1.0;
|
||||
float yNDC = gl_FragCoord.y/bufferHeight * 2.0 - 1.0;
|
||||
// Unprojecting X and Y from NDC to eye space
|
||||
float xEye = -zEye*(xNDC*(r-l)+(r+l))/(2.0*n);
|
||||
float yEye = -zEye*(yNDC*(t-b)+(t+b))/(2.0*n);
|
||||
vec3 currentFragEyeSpace = vec3(xEye, yEye, zEye);
|
||||
vec3 currentFragWorldSpace;
|
||||
<$transformEyeToWorldDir(cam, currentFragEyeSpace, currentFragWorldSpace)$>
|
||||
|
||||
vec3 cameraToPositionRay = normalize(currentFragWorldSpace - cameraPositionWorldSpace);
|
||||
vec3 origin = cameraToPositionRay * depthVal + cameraPositionWorldSpace;
|
||||
|
||||
vec3 normal = normalize(texture2D(normalTexture, varTexcoord).xyz);
|
||||
//normal = normalize(normal * normalMatrix);
|
||||
|
||||
vec3 rvec = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx)) * 2.0 - 1.0;
|
||||
vec3 tangent = normalize(rvec - normal * dot(rvec, normal));
|
||||
vec3 bitangent = cross(normal, tangent);
|
||||
mat3 tbn = mat3(tangent, bitangent, normal);
|
||||
|
||||
float occlusion = 0.0;
|
||||
|
||||
for (int i = 0; i < SAMPLE_COUNT; ++i) {
|
||||
vec3 samplePos = origin + (tbn * sampleKernel[i]) * g_sample_rad;
|
||||
vec4 offset = cam._projectionViewUntranslated * vec4(samplePos, 1.0);
|
||||
|
||||
offset.xy = (offset.xy / offset.w) * 0.5 + 0.5;
|
||||
float depth = length(samplePos - cameraPositionWorldSpace);
|
||||
|
||||
float sampleDepthVal = texture2D(depthTexture, offset.xy).r;
|
||||
|
||||
float rangeDelta = abs(depthVal - sampleDepthVal);
|
||||
float rangeCheck = smoothstep(0.0, 1.0, g_sample_rad / rangeDelta);
|
||||
|
||||
occlusion += rangeCheck * step(sampleDepthVal, depth);
|
||||
}
|
||||
|
||||
occlusion = 1.0 - occlusion / float(SAMPLE_COUNT);
|
||||
occlusion = clamp(pow(occlusion, g_intensity), 0.0, 1.0);
|
||||
gl_FragColor = vec4(vec3(occlusion), 1.0);
|
||||
}
|
||||
|
24
libraries/render-utils/src/ambient_occlusion.slv
Normal file
24
libraries/render-utils/src/ambient_occlusion.slv
Normal file
|
@ -0,0 +1,24 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// ambient_occlusion.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/15/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
|
||||
void main(void) {
|
||||
varTexcoord = gl_MultiTexCoord0.xy;
|
||||
gl_Position = gl_Vertex;
|
||||
}
|
42
libraries/render-utils/src/gaussian_blur.slf
Normal file
42
libraries/render-utils/src/gaussian_blur.slf
Normal file
|
@ -0,0 +1,42 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// gaussian_blur.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredBufferWrite.slh@>
|
||||
|
||||
// the interpolated normal
|
||||
//varying vec4 interpolatedNormal;
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
varying vec2 varBlurTexcoords[14];
|
||||
|
||||
uniform sampler2D occlusionTexture;
|
||||
|
||||
void main(void) {
|
||||
gl_FragColor = vec4(0.0);
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[0])*0.0044299121055113265;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[1])*0.00895781211794;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[2])*0.0215963866053;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[3])*0.0443683338718;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[4])*0.0776744219933;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[5])*0.115876621105;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[6])*0.147308056121;
|
||||
gl_FragColor += texture2D(occlusionTexture, varTexcoord)*0.159576912161;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[7])*0.147308056121;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[8])*0.115876621105;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[9])*0.0776744219933;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[10])*0.0443683338718;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[11])*0.0215963866053;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[12])*0.00895781211794;
|
||||
gl_FragColor += texture2D(occlusionTexture, varBlurTexcoords[13])*0.0044299121055113265;
|
||||
}
|
41
libraries/render-utils/src/gaussian_blur_horizontal.slv
Normal file
41
libraries/render-utils/src/gaussian_blur_horizontal.slv
Normal file
|
@ -0,0 +1,41 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// guassian_blur_horizontal.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
varying vec2 varBlurTexcoords[14];
|
||||
|
||||
void main(void) {
|
||||
varTexcoord = gl_MultiTexCoord0.xy;
|
||||
gl_Position = gl_Vertex;
|
||||
|
||||
varBlurTexcoords[0] = varTexcoord + vec2(-0.028, 0.0);
|
||||
varBlurTexcoords[1] = varTexcoord + vec2(-0.024, 0.0);
|
||||
varBlurTexcoords[2] = varTexcoord + vec2(-0.020, 0.0);
|
||||
varBlurTexcoords[3] = varTexcoord + vec2(-0.016, 0.0);
|
||||
varBlurTexcoords[4] = varTexcoord + vec2(-0.012, 0.0);
|
||||
varBlurTexcoords[5] = varTexcoord + vec2(-0.008, 0.0);
|
||||
varBlurTexcoords[6] = varTexcoord + vec2(-0.004, 0.0);
|
||||
varBlurTexcoords[7] = varTexcoord + vec2(0.004, 0.0);
|
||||
varBlurTexcoords[8] = varTexcoord + vec2(0.008, 0.0);
|
||||
varBlurTexcoords[9] = varTexcoord + vec2(0.012, 0.0);
|
||||
varBlurTexcoords[10] = varTexcoord + vec2(0.016, 0.0);
|
||||
varBlurTexcoords[11] = varTexcoord + vec2(0.020, 0.0);
|
||||
varBlurTexcoords[12] = varTexcoord + vec2(0.024, 0.0);
|
||||
varBlurTexcoords[13] = varTexcoord + vec2(0.028, 0.0);
|
||||
}
|
||||
|
41
libraries/render-utils/src/gaussian_blur_vertical.slv
Normal file
41
libraries/render-utils/src/gaussian_blur_vertical.slv
Normal file
|
@ -0,0 +1,41 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// guassian_blur_vertical.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/17/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include gpu/Transform.slh@>
|
||||
|
||||
<$declareStandardTransform()$>
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
varying vec2 varBlurTexcoords[14];
|
||||
|
||||
void main(void) {
|
||||
varTexcoord = gl_MultiTexCoord0.xy;
|
||||
gl_Position = gl_Vertex;
|
||||
|
||||
varBlurTexcoords[0] = varTexcoord + vec2(0.0, -0.028);
|
||||
varBlurTexcoords[1] = varTexcoord + vec2(0.0, -0.024);
|
||||
varBlurTexcoords[2] = varTexcoord + vec2(0.0, -0.020);
|
||||
varBlurTexcoords[3] = varTexcoord + vec2(0.0, -0.016);
|
||||
varBlurTexcoords[4] = varTexcoord + vec2(0.0, -0.012);
|
||||
varBlurTexcoords[5] = varTexcoord + vec2(0.0, -0.008);
|
||||
varBlurTexcoords[6] = varTexcoord + vec2(0.0, -0.004);
|
||||
varBlurTexcoords[7] = varTexcoord + vec2(0.0, 0.004);
|
||||
varBlurTexcoords[8] = varTexcoord + vec2(0.0, 0.008);
|
||||
varBlurTexcoords[9] = varTexcoord + vec2(0.0, 0.012);
|
||||
varBlurTexcoords[10] = varTexcoord + vec2(0.0, 0.016);
|
||||
varBlurTexcoords[11] = varTexcoord + vec2(0.0, 0.020);
|
||||
varBlurTexcoords[12] = varTexcoord + vec2(0.0, 0.024);
|
||||
varBlurTexcoords[13] = varTexcoord + vec2(0.0, 0.028);
|
||||
}
|
||||
|
29
libraries/render-utils/src/occlusion_blend.slf
Normal file
29
libraries/render-utils/src/occlusion_blend.slf
Normal file
|
@ -0,0 +1,29 @@
|
|||
<@include gpu/Config.slh@>
|
||||
<$VERSION_HEADER$>
|
||||
// Generated on <$_SCRIBE_DATE$>
|
||||
//
|
||||
// occlusion_blend.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Niraj Venkat on 7/20/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
<@include DeferredBufferWrite.slh@>
|
||||
|
||||
varying vec2 varTexcoord;
|
||||
|
||||
uniform sampler2D blurredOcclusionTexture;
|
||||
|
||||
void main(void) {
|
||||
vec4 occlusionColor = texture2D(blurredOcclusionTexture, varTexcoord);
|
||||
|
||||
if(occlusionColor.r > 0.8 && occlusionColor.r <= 1.0) {
|
||||
gl_FragColor = vec4(vec3(0.0), 0.0);
|
||||
} else {
|
||||
gl_FragColor = vec4(vec3(occlusionColor.r), 1.0);
|
||||
}
|
||||
}
|
|
@ -51,6 +51,8 @@ public:
|
|||
|
||||
bool _drawItemStatus = false;
|
||||
|
||||
bool _occlusionStatus = false;
|
||||
|
||||
RenderContext() {}
|
||||
};
|
||||
typedef std::shared_ptr<RenderContext> RenderContextPointer;
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Declare dependencies
|
||||
macro (setup_testcase_dependencies)
|
||||
# link in the shared libraries
|
||||
link_hifi_libraries(shared animation gpu fbx model avatars networking audio)
|
||||
link_hifi_libraries(shared animation gpu fbx model)
|
||||
|
||||
copy_dlls_beside_windows_executable()
|
||||
endmacro ()
|
|
@ -40,11 +40,9 @@
|
|||
*/
|
||||
|
||||
#include <iostream>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include "AvatarData.h"
|
||||
#include "OBJReader.h"
|
||||
#include "FBXReader.h"
|
||||
#include "OBJReader.h"
|
||||
|
||||
#include "AvatarRig.h" // We might later test Rig vs AvatarRig separately, but for now, we're concentrating on the main use case.
|
||||
#include "RigTests.h"
|
||||
|
@ -52,24 +50,7 @@
|
|||
QTEST_MAIN(RigTests)
|
||||
|
||||
void RigTests::initTestCase() {
|
||||
|
||||
// There are two good ways we could organize this:
|
||||
// 1. Create a MyAvatar the same way that Interface does, and poke at it.
|
||||
// We can't do that because MyAvatar (and even Avatar) are in interface, not a library, and our build system won't allow that dependency.
|
||||
// 2. Create just the minimum skeleton in the most direct way possible, using only very basic library APIs (such as fbx).
|
||||
// I don't think we can do that because not everything we need is exposed directly from, e.g., the fst and fbx readers.
|
||||
// So here we do neither. Using as much as we can from AvatarData (which is in the avatar and further requires network and audio), and
|
||||
// duplicating whatever other code we need from (My)Avatar. Ugh. We may refactor that later, but right now, cleaning this up is not on our critical path.
|
||||
|
||||
// Joint mapping from fst. FIXME: Do we need this???
|
||||
/*auto avatar = std::make_shared<AvatarData>();
|
||||
QEventLoop loop; // Create an event loop that will quit when we get the finished signal
|
||||
QObject::connect(avatar.get(), &AvatarData::jointMappingLoaded, &loop, &QEventLoop::quit);
|
||||
avatar->setSkeletonModelURL(QUrl("https://hifi-public.s3.amazonaws.com/marketplace/contents/4a690585-3fa3-499e-9f8b-fd1226e561b1/e47e6898027aa40f1beb6adecc6a7db5.fst")); // Zach fst
|
||||
loop.exec();*/ // Blocking all further tests until signalled.
|
||||
|
||||
// Joint geometry from fbx.
|
||||
#define FROM_FILE "/Users/howardstearns/howardHiFi/Zack.fbx"
|
||||
//#define FROM_FILE "/Users/howardstearns/howardHiFi/Zack.fbx"
|
||||
#ifdef FROM_FILE
|
||||
QFile file(FROM_FILE);
|
||||
QCOMPARE(file.open(QIODevice::ReadOnly), true);
|
||||
|
@ -81,8 +62,7 @@ void RigTests::initTestCase() {
|
|||
QCOMPARE(fbxHttpCode, 200);
|
||||
FBXGeometry geometry = readFBX(reply->readAll(), QVariantHash());
|
||||
#endif
|
||||
//QCOMPARE(geometry.joints.count(), avatar->getJointNames().count());
|
||||
|
||||
|
||||
QVector<JointState> jointStates;
|
||||
for (int i = 0; i < geometry.joints.size(); ++i) {
|
||||
// Note that if the geometry is stack allocated and goes away, so will the joints. Hence the heap copy here.
|
||||
|
@ -97,24 +77,24 @@ void RigTests::initTestCase() {
|
|||
std::cout << "Rig is ready " << geometry.joints.count() << " joints " << std::endl;
|
||||
}
|
||||
|
||||
void reportJoint(int index, JointState joint) { // Handy for debugging
|
||||
static void reportJoint(int index, JointState joint) { // Handy for debugging
|
||||
std::cout << "\n";
|
||||
std::cout << index << " " << joint.getFBXJoint().name.toUtf8().data() << "\n";
|
||||
std::cout << " pos:" << joint.getPosition() << "/" << joint.getPositionInParentFrame() << " from " << joint.getParentIndex() << "\n";
|
||||
std::cout << " rot:" << safeEulerAngles(joint.getRotation()) << "/" << safeEulerAngles(joint.getRotationInParentFrame()) << "/" << safeEulerAngles(joint.getRotationInBindFrame()) << "\n";
|
||||
std::cout << "\n";
|
||||
}
|
||||
void reportByName(RigPointer rig, const QString& name) {
|
||||
static void reportByName(RigPointer rig, const QString& name) {
|
||||
int jointIndex = rig->indexOfJoint(name);
|
||||
reportJoint(jointIndex, rig->getJointState(jointIndex));
|
||||
}
|
||||
void reportAll(RigPointer rig) {
|
||||
static void reportAll(RigPointer rig) {
|
||||
for (int i = 0; i < rig->getJointStateCount(); i++) {
|
||||
JointState joint = rig->getJointState(i);
|
||||
reportJoint(i, joint);
|
||||
}
|
||||
}
|
||||
void reportSome(RigPointer rig) {
|
||||
static void reportSome(RigPointer rig) {
|
||||
QString names[] = {"Head", "Neck", "RightShoulder", "RightArm", "RightForeArm", "RightHand", "Spine2", "Spine1", "Spine", "Hips", "RightUpLeg", "RightLeg", "RightFoot", "RightToeBase", "RightToe_End"};
|
||||
for (auto name : names) {
|
||||
reportByName(rig, name);
|
|
@ -86,7 +86,6 @@ public:
|
|||
AddressBar,
|
||||
AlignForearmsWithWrists,
|
||||
AlternateIK,
|
||||
AmbientOcclusion,
|
||||
Animations,
|
||||
Atmosphere,
|
||||
Attachments,
|
||||
|
@ -111,6 +110,7 @@ public:
|
|||
ControlWithSpeech,
|
||||
CopyAddress,
|
||||
CopyPath,
|
||||
DebugAmbientOcclusion,
|
||||
DecreaseAvatarSize,
|
||||
DeleteBookmark,
|
||||
DisableActivityLogger,
|
||||
|
|
Loading…
Reference in a new issue