merge rig into cleanup (removal of legacy shapes)

Conflicts:
	interface/src/avatar/Avatar.h
	tests/physics/src/ShapeColliderTests.cpp
	tests/physics/src/ShapeColliderTests.h
This commit is contained in:
Andrew Meadows 2015-07-31 13:40:51 -07:00
commit 661a916ebd
104 changed files with 7188 additions and 1673 deletions

View file

@ -927,9 +927,9 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "use_stdev_for_desired_calc";
_streamSettings._useStDevForJitterCalc = audioBufferGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
if (_streamSettings._useStDevForJitterCalc) {
qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using stdev method for jitter calc if dynamic jitter buffers enabled";
} else {
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
qDebug() << "Using max-gap method for jitter calc if dynamic jitter buffers enabled";
}
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "window_starve_threshold";

View file

@ -38,5 +38,19 @@ if (WIN32)
find_package_handle_standard_args(GLEW DEFAULT_MSG GLEW_INCLUDE_DIRS GLEW_LIBRARIES GLEW_DLL_PATH)
add_paths_to_fixup_libs(${GLEW_DLL_PATH})
elseif (APPLE)
else ()
find_path(GLEW_INCLUDE_DIR GL/glew.h)
find_library(GLEW_LIBRARY NAMES GLEW glew32 glew glew32s PATH_SUFFIXES lib64)
set(GLEW_INCLUDE_DIRS ${GLEW_INCLUDE_DIR})
set(GLEW_LIBRARIES ${GLEW_LIBRARY})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(GLEW
REQUIRED_VARS GLEW_INCLUDE_DIR GLEW_LIBRARY)
mark_as_advanced(GLEW_INCLUDE_DIR GLEW_LIBRARY)
endif ()

View file

@ -0,0 +1,38 @@
#
# FindconnexionClient.cmake
#
# Once done this will define
#
# 3DCONNEXIONCLIENT_INCLUDE_DIRS
#
# Created on 10/06/2015 by Marcel Verhagen
# Copyright 2015 High Fidelity, Inc.
#
# Distributed under the Apache License, Version 2.0.
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
#
# setup hints for 3DCONNEXIONCLIENT search
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("connexionclient")
if (APPLE)
find_library(3DconnexionClient 3DconnexionClient)
if(EXISTS ${3DconnexionClient})
set(CONNEXIONCLIENT_FOUND true)
set(CONNEXIONCLIENT_INCLUDE_DIR ${3DconnexionClient})
set(CONNEXIONCLIENT_LIBRARY ${3DconnexionClient})
set_target_properties(${TARGET_NAME} PROPERTIES LINK_FLAGS "-weak_framework 3DconnexionClient")
message(STATUS "Found 3Dconnexion")
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIR CONNEXIONCLIENT_LIBRARY)
endif()
endif()
if (WIN32)
find_path(CONNEXIONCLIENT_INCLUDE_DIRS I3dMouseParams.h PATH_SUFFIXES Inc HINTS ${CONNEXIONCLIENT_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(connexionClient DEFAULT_MSG CONNEXIONCLIENT_INCLUDE_DIRS)
mark_as_advanced(CONNEXIONCLIENT_INCLUDE_DIRS CONNEXIONCLIENT_SEARCH_DIRS)
endif()

View file

@ -291,22 +291,18 @@ var toolBar = (function () {
var RESIZE_TIMEOUT = 120000; // 2 minutes
var RESIZE_MAX_CHECKS = RESIZE_TIMEOUT / RESIZE_INTERVAL;
function addModel(url) {
var position;
var entityID = createNewEntity({
type: "Model",
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
}, false);
position = Vec3.sum(MyAvatar.position, Vec3.multiply(Quat.getFront(MyAvatar.orientation), SPAWN_DISTANCE));
if (position.x > 0 && position.y > 0 && position.z > 0) {
var entityId = Entities.addEntity({
type: "Model",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
modelURL: url
});
if (entityID) {
print("Model added: " + url);
var checkCount = 0;
function resize() {
var entityProperties = Entities.getEntityProperties(entityId);
var entityProperties = Entities.getEntityProperties(entityID);
var naturalDimensions = entityProperties.naturalDimensions;
checkCount++;
@ -318,21 +314,41 @@ var toolBar = (function () {
print("Resize failed: timed out waiting for model (" + url + ") to load");
}
} else {
Entities.editEntity(entityId, { dimensions: naturalDimensions });
Entities.editEntity(entityID, { dimensions: naturalDimensions });
// Reset selection so that the selection overlays will be updated
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
}
}
selectionManager.setSelections([entityId]);
selectionManager.setSelections([entityID]);
Script.setTimeout(resize, RESIZE_INTERVAL);
} else {
Window.alert("Can't add model: Model would be out of bounds.");
}
}
function createNewEntity(properties, dragOnCreate) {
// Default to true if not passed in
dragOnCreate = dragOnCreate == undefined ? true : dragOnCreate;
var dimensions = properties.dimensions ? properties.dimensions : DEFAULT_DIMENSIONS;
var position = getPositionToCreateEntity();
var entityID = null;
if (position != null) {
position = grid.snapToSurface(grid.snapToGrid(position, false, dimensions), dimensions),
properties.position = position;
entityID = Entities.addEntity(properties);
if (dragOnCreate) {
placingEntityID = entityID;
}
} else {
Window.alert("Can't create " + properties.type + ": " + properties.type + " would be out of bounds.");
}
return entityID;
}
var newModelButtonDown = false;
var browseMarketplaceButtonDown = false;
that.mousePressEvent = function (event) {
@ -363,127 +379,82 @@ var toolBar = (function () {
}
if (newCubeButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Box",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Box",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
Window.alert("Can't create box: Box would be out of bounds.");
}
return true;
}
if (newSphereButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Sphere",
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Sphere",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: DEFAULT_DIMENSIONS,
color: { red: 255, green: 0, blue: 0 }
});
} else {
Window.alert("Can't create sphere: Sphere would be out of bounds.");
}
return true;
}
if (newLightButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Light",
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Light",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_LIGHT_DIMENSIONS), DEFAULT_LIGHT_DIMENSIONS),
dimensions: DEFAULT_LIGHT_DIMENSIONS,
isSpotlight: false,
color: { red: 150, green: 150, blue: 150 },
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
constantAttenuation: 1,
linearAttenuation: 0,
quadraticAttenuation: 0,
exponent: 0,
cutoff: 180, // in degrees
});
} else {
Window.alert("Can't create Light: Light would be out of bounds.");
}
return true;
}
if (newTextButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Text",
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Text",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 0.65, y: 0.3, z: 0.01 },
backgroundColor: { red: 64, green: 64, blue: 64 },
textColor: { red: 255, green: 255, blue: 255 },
text: "some text",
lineHeight: 0.06
});
} else {
Window.alert("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newWebButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Web",
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Web",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
sourceUrl: "https://highfidelity.com/",
});
} else {
Window.alert("Can't create Web Entity: would be out of bounds.");
}
return true;
}
if (newZoneButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "Zone",
dimensions: { x: 10, y: 10, z: 10 },
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "Zone",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
});
} else {
Window.alert("Can't create box: Text would be out of bounds.");
}
return true;
}
if (newPolyVoxButton === toolBar.clicked(clickedOverlay)) {
var position = getPositionToCreateEntity();
createNewEntity({
type: "PolyVox",
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
if (position.x > 0 && position.y > 0 && position.z > 0) {
placingEntityID = Entities.addEntity({
type: "PolyVox",
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS),
DEFAULT_DIMENSIONS),
dimensions: { x: 10, y: 10, z: 10 },
voxelVolumeSize: {x:16, y:16, z:16},
voxelSurfaceStyle: 1
});
} else {
Window.alert("Can't create PolyVox: would be out of bounds.");
}
return true;
}
@ -666,7 +637,7 @@ function handleIdleMouse() {
idleMouseTimerId = null;
if (isActive) {
highlightEntityUnderCursor(lastMousePosition, true);
}
}
}
function highlightEntityUnderCursor(position, accurateRay) {
@ -837,15 +808,15 @@ function setupModelMenus() {
}
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Entity List...", shortcutKey: "CTRL+META+L", afterItem: "Models" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Large Models", shortcutKey: "CTRL+META+L",
afterItem: "Entity List...", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Small Models", shortcutKey: "CTRL+META+S",
afterItem: "Allow Selecting of Large Models", isCheckable: true, isChecked: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Allow Selecting of Lights", shortcutKey: "CTRL+SHIFT+META+L",
afterItem: "Allow Selecting of Small Models", isCheckable: true });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities In Box", shortcutKey: "CTRL+SHIFT+META+A",
afterItem: "Allow Selecting of Lights" });
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
Menu.addMenuItem({ menuName: "Edit", menuItemName: "Select All Entities Touching Box", shortcutKey: "CTRL+SHIFT+META+T",
afterItem: "Select All Entities In Box" });
Menu.addMenuItem({ menuName: "File", menuItemName: "Models", isSeparator: true, beforeItem: "Settings" });
@ -962,7 +933,7 @@ function selectAllEtitiesInCurrentSelectionBox(keepIfTouching) {
entities.splice(i, 1);
--i;
}
}
}
}
selectionManager.setSelections(entities);
}
@ -1038,17 +1009,29 @@ function handeMenuEvent(menuItem) {
tooltip.show(false);
}
// This function tries to find a reasonable position to place a new entity based on the camera
// position. If a reasonable position within the world bounds can't be found, `null` will
// be returned. The returned position will also take into account grid snapping settings.
function getPositionToCreateEntity() {
var distance = cameraManager.enabled ? cameraManager.zoomDistance : DEFAULT_ENTITY_DRAG_DROP_DISTANCE;
var direction = Quat.getFront(Camera.orientation);
var offset = Vec3.multiply(distance, direction);
var position = Vec3.sum(Camera.position, offset);
var placementPosition = Vec3.sum(Camera.position, offset);
position.x = Math.max(0, position.x);
position.y = Math.max(0, position.y);
position.z = Math.max(0, position.z);
var cameraPosition = Camera.position;
return position;
var cameraOutOfBounds = cameraPosition.x < 0 || cameraPosition.y < 0 || cameraPosition.z < 0;
var placementOutOfBounds = placementPosition.x < 0 || placementPosition.y < 0 || placementPosition.z < 0;
if (cameraOutOfBounds && placementOutOfBounds) {
return null;
}
placementPosition.x = Math.max(0, placementPosition.x);
placementPosition.y = Math.max(0, placementPosition.y);
placementPosition.z = Math.max(0, placementPosition.z);
return placementPosition;
}
function importSVO(importURL) {
@ -1064,17 +1047,18 @@ function importSVO(importURL) {
if (success) {
var VERY_LARGE = 10000;
var position = { x: 0, y: 0, z: 0};
var position = { x: 0, y: 0, z: 0 };
if (Clipboard.getClipboardContentsLargestDimension() < VERY_LARGE) {
position = getPositionToCreateEntity();
}
if (position.x > 0 && position.y > 0 && position.z > 0) {
if (position != null) {
var pastedEntityIDs = Clipboard.pasteEntities(position);
if (isActive) {
selectionManager.setSelections(pastedEntityIDs);
}
Window.raiseMainWindow();
Window.raiseMainWindow();
} else {
Window.alert("Can't import objects: objects would be out of bounds.");
}
@ -1264,7 +1248,7 @@ PropertiesTool = function(opts) {
if (data.properties.keyLightDirection !== undefined) {
data.properties.keyLightDirection = Vec3.fromPolar(
data.properties.keyLightDirection.x * DEGREES_TO_RADIANS, data.properties.keyLightDirection.y * DEGREES_TO_RADIANS);
}
}
Entities.editEntity(selectionManager.selections[0], data.properties);
if (data.properties.name != undefined) {
entityListTool.sendUpdate();
@ -1360,8 +1344,8 @@ PropertiesTool = function(opts) {
var properties = selectionManager.savedProperties[selectionManager.selections[i]];
if (properties.type == "Zone") {
var centerOfZone = properties.boundingBox.center;
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
var atmosphereCenter = { x: centerOfZone.x,
y: centerOfZone.y - properties.atmosphere.innerRadius,
z: centerOfZone.z };
Entities.editEntity(selectionManager.selections[i], {

View file

@ -0,0 +1,28 @@
//
// hitEffect.js
// examples
//
// Created by Eric Levin on July 20, 2015
// Copyright 2015 High Fidelity, Inc.
//
// An example of how to toggle a screen-space hit effect using the Scene global object.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
var hitEffectEnabled = false;
toggleHitEffect();
function toggleHitEffect() {
Script.setTimeout(function() {
hitEffectEnabled = !hitEffectEnabled;
Scene.setEngineDisplayHitEffect(hitEffectEnabled);
toggleHitEffect();
}, 1000);
}

View file

@ -0,0 +1,288 @@
//
// satellite.js
// games
//
// Created by Bridget Went 7/1/2015.
// Copyright 2015 High Fidelity, Inc.
//
// A game to bring a satellite model into orbit around an animated earth model .
// - Double click to create a new satellite
// - Click on the satellite, drag a vector arrow to specify initial velocity
// - Release mouse to launch the active satellite
// - Orbital movement is calculated using equations of gravitational physics
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('../utilities/tools/vector.js');
var URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/";
SatelliteGame = function() {
var MAX_RANGE = 50.0;
var Y_AXIS = {
x: 0,
y: 1,
z: 0
}
var LIFETIME = 6000;
var ERROR_THRESH = 20.0;
// Create the spinning earth model
var EARTH_SIZE = 20.0;
var CLOUDS_OFFSET = 0.5;
var SPIN = 0.1;
var ZONE_DIM = 100.0;
var LIGHT_INTENSITY = 1.5;
Earth = function(position, size) {
this.earth = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "earth.fbx",
position: position,
dimensions: {
x: size,
y: size,
z: size
},
rotation: Quat.angleAxis(180, {
x: 1,
y: 0,
z: 0
}),
angularVelocity: {
x: 0.00,
y: 0.5 * SPIN,
z: 0.00
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: 6000,
collisionsWillMove: false,
visible: true
});
this.clouds = Entities.addEntity({
type: "Model",
shapeType: 'sphere',
modelURL: URL + "clouds.fbx?i=2",
position: position,
dimensions: {
x: size + CLOUDS_OFFSET,
y: size + CLOUDS_OFFSET,
z: size + CLOUDS_OFFSET
},
angularVelocity: {
x: 0.00,
y: SPIN,
z: 0.00
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false,
visible: true
});
this.zone = Entities.addEntity({
type: "Zone",
position: position,
dimensions: {
x: ZONE_DIM,
y: ZONE_DIM,
z: ZONE_DIM
},
keyLightDirection: Vec3.normalize(Vec3.subtract(position, Camera.getPosition())),
keyLightIntensity: LIGHT_INTENSITY
});
this.cleanup = function() {
Entities.deleteEntity(this.clouds);
Entities.deleteEntity(this.earth);
Entities.deleteEntity(this.zone);
}
}
// Create earth model
var center = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var distance = Vec3.length(Vec3.subtract(center, Camera.getPosition()));
var earth = new Earth(center, EARTH_SIZE);
var satellites = [];
var SATELLITE_SIZE = 2.0;
var launched = false;
var activeSatellite;
var PERIOD = 4.0;
var LARGE_BODY_MASS = 16000.0;
var SMALL_BODY_MASS = LARGE_BODY_MASS * 0.000000333;
Satellite = function(position, planetCenter) {
// The Satellite class
this.launched = false;
this.startPosition = position;
this.readyToLaunch = false;
this.radius = Vec3.length(Vec3.subtract(position, planetCenter));
this.satellite = Entities.addEntity({
type: "Model",
modelURL: "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/satellite/satellite.fbx",
position: this.startPosition,
dimensions: {
x: SATELLITE_SIZE,
y: SATELLITE_SIZE,
z: SATELLITE_SIZE
},
angularDamping: 0.0,
damping: 0.0,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false,
});
this.getProperties = function() {
return Entities.getEntityProperties(this.satellite);
}
this.launch = function() {
var prop = Entities.getEntityProperties(this.satellite);
var between = Vec3.subtract(planetCenter, prop.position);
var radius = Vec3.length(between);
this.gravity = (4.0 * Math.PI * Math.PI * Math.pow(radius, 3.0)) / (LARGE_BODY_MASS * PERIOD * PERIOD);
var initialVelocity = Vec3.normalize(Vec3.cross(between, Y_AXIS));
initialVelocity = Vec3.multiply(Math.sqrt((this.gravity * LARGE_BODY_MASS) / radius), initialVelocity);
initialVelocity = Vec3.multiply(this.arrow.magnitude, initialVelocity);
initialVelocity = Vec3.multiply(Vec3.length(initialVelocity), this.arrow.direction);
Entities.editEntity(this.satellite, {
velocity: initialVelocity
});
this.launched = true;
};
this.update = function(deltaTime) {
var prop = Entities.getEntityProperties(this.satellite);
var between = Vec3.subtract(prop.position, planetCenter);
var radius = Vec3.length(between);
var acceleration = -(this.gravity * LARGE_BODY_MASS) * Math.pow(radius, (-2.0));
var speed = acceleration * deltaTime;
var vel = Vec3.multiply(speed, Vec3.normalize(between));
var newVelocity = Vec3.sum(prop.velocity, vel);
var newPos = Vec3.sum(prop.position, Vec3.multiply(newVelocity, deltaTime));
Entities.editEntity(this.satellite, {
velocity: newVelocity,
position: newPos
});
};
}
function mouseDoublePressEvent(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(pickRay.direction, distance);
var point = Vec3.sum(Camera.getPosition(), addVector);
// Create a new satellite
activeSatellite = new Satellite(point, center);
satellites.push(activeSatellite);
}
function mousePressEvent(event) {
if (!activeSatellite) {
return;
}
// Reset label
if (activeSatellite.arrow) {
activeSatellite.arrow.deleteLabel();
}
var statsPosition = Vec3.sum(Camera.getPosition(), Vec3.multiply(MAX_RANGE * 0.4, Quat.getFront(Camera.getOrientation())));
var pickRay = Camera.computePickRay(event.x, event.y)
var rayPickResult = Entities.findRayIntersection(pickRay, true);
if (rayPickResult.entityID === activeSatellite.satellite) {
// Create a draggable vector arrow at satellite position
activeSatellite.arrow = new VectorArrow(distance, true, "INITIAL VELOCITY", statsPosition);
activeSatellite.arrow.onMousePressEvent(event);
activeSatellite.arrow.isDragging = true;
}
}
function mouseMoveEvent(event) {
if (!activeSatellite || !activeSatellite.arrow || !activeSatellite.arrow.isDragging) {
return;
}
activeSatellite.arrow.onMouseMoveEvent(event);
}
function mouseReleaseEvent(event) {
if (!activeSatellite || !activeSatellite.arrow || !activeSatellite.arrow.isDragging) {
return;
}
activeSatellite.arrow.onMouseReleaseEvent(event);
activeSatellite.launch();
activeSatellite.arrow.cleanup();
}
var counter = 0.0;
var CHECK_ENERGY_PERIOD = 500;
function update(deltaTime) {
if (!activeSatellite) {
return;
}
// Update all satellites
for (var i = 0; i < satellites.length; i++) {
if (!satellites[i].launched) {
continue;
}
satellites[i].update(deltaTime);
}
counter++;
if (counter % CHECK_ENERGY_PERIOD == 0) {
var prop = activeSatellite.getProperties();
var error = calcEnergyError(prop.position, Vec3.length(prop.velocity));
if (Math.abs(error) <= ERROR_THRESH) {
activeSatellite.arrow.editLabel("Nice job! The satellite has reached a stable orbit.");
} else {
activeSatellite.arrow.editLabel("Try again! The satellite is in an unstable orbit.");
}
}
}
this.endGame = function() {
for (var i = 0; i < satellites.length; i++) {
Entities.deleteEntity(satellites[i].satellite);
satellites[i].arrow.cleanup();
}
earth.cleanup();
}
function calcEnergyError(pos, vel) {
//Calculate total energy error for active satellite's orbital motion
var radius = activeSatellite.radius;
var gravity = (4.0 * Math.PI * Math.PI * Math.pow(radius, 3.0)) / (LARGE_BODY_MASS * PERIOD * PERIOD);
var initialVelocityCalculated = Math.sqrt((gravity * LARGE_BODY_MASS) / radius);
var totalEnergy = 0.5 * LARGE_BODY_MASS * Math.pow(initialVelocityCalculated, 2.0) - ((gravity * LARGE_BODY_MASS * SMALL_BODY_MASS) / radius);
var measuredEnergy = 0.5 * LARGE_BODY_MASS * Math.pow(vel, 2.0) - ((gravity * LARGE_BODY_MASS * SMALL_BODY_MASS) / Vec3.length(Vec3.subtract(pos, center)));
var error = ((measuredEnergy - totalEnergy) / totalEnergy) * 100;
return error;
}
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseDoublePressEvent.connect(mouseDoublePressEvent);
Controller.mouseMoveEvent.connect(mouseMoveEvent);
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
Script.update.connect(update);
Script.scriptEnding.connect(this.endGame);
}

View file

@ -0,0 +1,669 @@
//
// solarsystem.js
// games
//
// Created by Bridget Went, 5/28/15.
// Copyright 2015 High Fidelity, Inc.
//
// The start to a project to build a virtual physics classroom to simulate the solar system, gravity, and orbital physics.
// - A sun with oribiting planets is created in front of the user
// - UI elements allow for adjusting the period, gravity, trails, and energy recalculations
// - Click "PAUSE" to pause the animation and show planet labels
// - In this mode, double-click a planet label to zoom in on that planet
// -Double-clicking on earth label initiates satellite orbiter game
// -Press "TAB" to toggle back to solar system view
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
Script.include('../utilities/tools/cookies.js');
Script.include('games/satellite.js');
var BASE_URL = "https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/planets/";
var NUM_PLANETS = 8;
var trailsEnabled = true;
var energyConserved = true;
var planetView = false;
var earthView = false;
var satelliteGame;
var PANEL_X = 850;
var PANEL_Y = 600;
var BUTTON_SIZE = 20;
var PADDING = 20;
var DAMPING = 0.0;
var LIFETIME = 6000;
var ERROR_THRESH = 2.0;
var TIME_STEP = 70.0;
var MAX_POINTS_PER_LINE = 5;
var LINE_DIM = 10;
var LINE_WIDTH = 3.0;
var line;
var planetLines = [];
var trails = [];
var BOUNDS = 200;
// Alert user to move if they are too close to domain bounds
if (MyAvatar.position.x < BOUNDS || MyAvatar.position.x > TREE_SCALE - BOUNDS || MyAvatar.position.y < BOUNDS || MyAvatar.position.y > TREE_SCALE - BOUNDS || MyAvatar.position.z < BOUNDS || MyAvatar.position.z > TREE_SCALE - BOUNDS) {
Window.alert("Please move at least 200m away from domain bounds.");
return;
}
// Save intiial avatar and camera position
var startingPosition = MyAvatar.position;
var startFrame = Window.location.href;
// Place the sun
var MAX_RANGE = 80.0;
var SUN_SIZE = 8.0;
var center = Vec3.sum(startingPosition, Vec3.multiply(MAX_RANGE, Quat.getFront(Camera.getOrientation())));
var theSun = Entities.addEntity({
type: "Model",
modelURL: BASE_URL + "sun.fbx",
position: center,
dimensions: {
x: SUN_SIZE,
y: SUN_SIZE,
z: SUN_SIZE
},
angularDamping: DAMPING,
damping: DAMPING,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: false
});
var planets = [];
var planet_properties = [];
// Reference values
var radius = 7.0;
var T_ref = 1.0;
var size = 1.0;
var M = 250.0;
var m = M * 0.000000333;
var G = (Math.pow(radius, 3.0) / Math.pow((T_ref / (2.0 * Math.PI)), 2.0)) / M;
var G_ref = G;
// Adjust size and distance as number of planets increases
var DELTA_RADIUS = 1.8;
var DELTA_SIZE = 0.2;
function initPlanets() {
for (var i = 0; i < NUM_PLANETS; ++i) {
var v0 = Math.sqrt((G * M) / radius);
var T = (2.0 * Math.PI) * Math.sqrt(Math.pow(radius, 3.0) / (G * M));
if (i == 0) {
var color = {
red: 255,
green: 255,
blue: 255
};
} else if (i == 1) {
var color = {
red: 255,
green: 160,
blue: 110
};
} else if (i == 2) {
var color = {
red: 10,
green: 150,
blue: 160
};
} else if (i == 3) {
var color = {
red: 180,
green: 70,
blue: 10
};
} else if (i == 4) {
var color = {
red: 250,
green: 140,
blue: 0
};
} else if (i == 5) {
var color = {
red: 235,
green: 215,
blue: 0
};
} else if (i == 6) {
var color = {
red: 135,
green: 205,
blue: 240
};
} else if (i == 7) {
var color = {
red: 30,
green: 140,
blue: 255
};
}
var prop = {
radius: radius,
position: Vec3.sum(center, {
x: radius,
y: 0.0,
z: 0.0
}),
lineColor: color,
period: T,
dimensions: size,
velocity: Vec3.multiply(v0, Vec3.normalize({
x: 0,
y: -0.2,
z: 0.9
}))
};
planet_properties.push(prop);
planets.push(Entities.addEntity({
type: "Model",
modelURL: BASE_URL + (i + 1) + ".fbx",
position: prop.position,
dimensions: {
x: prop.dimensions,
y: prop.dimensions,
z: prop.dimensions
},
velocity: prop.velocity,
angularDamping: DAMPING,
damping: DAMPING,
ignoreCollisions: false,
lifetime: LIFETIME,
collisionsWillMove: true,
}));
radius *= DELTA_RADIUS;
size += DELTA_SIZE;
}
}
// Initialize planets
initPlanets();
var labels = [];
var labelLines = [];
var labelsShowing = false;
var LABEL_X = 8.0;
var LABEL_Y = 3.0;
var LABEL_Z = 1.0;
var LABEL_DIST = 8.0;
var TEXT_HEIGHT = 1.0;
var sunLabel;
function showLabels() {
labelsShowing = true;
for (var i = 0; i < NUM_PLANETS; i++) {
var properties = planet_properties[i];
var text;
if (i == 0) {
text = "Mercury";
} else if (i == 1) {
text = "Venus";
} else if (i == 2) {
text = "Earth";
} else if (i == 3) {
text = "Mars";
} else if (i == 4) {
text = "Jupiter";
} else if (i == 5) {
text = "Saturn";
} else if (i == 6) {
text = "Uranus";
} else if (i == 7) {
text = "Neptune";
}
text = text + " Speed: " + Vec3.length(properties.velocity).toFixed(2);
var labelPos = Vec3.sum(planet_properties[i].position, {
x: 0.0,
y: LABEL_DIST,
z: LABEL_DIST
});
var linePos = planet_properties[i].position;
labelLines.push(Entities.addEntity({
type: "Line",
position: linePos,
dimensions: {
x: 20,
y: 20,
z: 20
},
lineWidth: 3.0,
color: {
red: 255,
green: 255,
blue: 255
},
linePoints: [{
x: 0,
y: 0,
z: 0
}, computeLocalPoint(linePos, labelPos)]
}));
labels.push(Entities.addEntity({
type: "Text",
text: text,
lineHeight: TEXT_HEIGHT,
dimensions: {
x: LABEL_X,
y: LABEL_Y,
z: LABEL_Z
},
position: labelPos,
backgroundColor: {
red: 10,
green: 10,
blue: 10
},
textColor: {
red: 255,
green: 255,
blue: 255
},
faceCamera: true
}));
}
}
function hideLabels() {
labelsShowing = false;
Entities.deleteEntity(sunLabel);
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.deleteEntity(labelLines[i]);
Entities.deleteEntity(labels[i]);
}
labels = [];
labelLines = [];
}
var time = 0.0;
var elapsed;
var counter = 0;
var dt = 1.0 / TIME_STEP;
function update(deltaTime) {
if (paused) {
return;
}
deltaTime = dt;
time++;
for (var i = 0; i < NUM_PLANETS; ++i) {
var properties = planet_properties[i];
var between = Vec3.subtract(properties.position, center);
var speed = getAcceleration(properties.radius) * deltaTime;
var vel = Vec3.multiply(speed, Vec3.normalize(between));
// Update velocity and position
properties.velocity = Vec3.sum(properties.velocity, vel);
properties.position = Vec3.sum(properties.position, Vec3.multiply(properties.velocity, deltaTime));
Entities.editEntity(planets[i], {
velocity: properties.velocity,
position: properties.position
});
// Create new or update current trail
if (trailsEnabled) {
var lineStack = planetLines[i];
var point = properties.position;
var prop = Entities.getEntityProperties(lineStack[lineStack.length - 1]);
var linePos = prop.position;
trails[i].push(computeLocalPoint(linePos, point));
Entities.editEntity(lineStack[lineStack.length - 1], {
linePoints: trails[i]
});
if (trails[i].length === MAX_POINTS_PER_LINE) {
trails[i] = newLine(lineStack, point, properties.period, properties.lineColor);
}
}
// Measure total energy every 10 updates, recalibrate velocity if necessary
if (energyConserved) {
if (counter % 10 === 0) {
var error = calcEnergyError(planets[i], properties.radius, properties.v0, properties.velocity, properties.position);
if (Math.abs(error) >= ERROR_THRESH) {
var speed = adjustVelocity(planets[i], properties.position);
properties.velocity = Vec3.multiply(speed, Vec3.normalize(properties.velocity));
}
}
}
}
counter++;
if (time % TIME_STEP == 0) {
elapsed++;
}
}
function computeLocalPoint(linePos, worldPoint) {
var localPoint = Vec3.subtract(worldPoint, linePos);
return localPoint;
}
function getAcceleration(radius) {
var acc = -(G * M) * Math.pow(radius, (-2.0));
return acc;
}
// Create a new trail
function resetTrails(planetIndex) {
elapsed = 0.0;
var properties = planet_properties[planetIndex];
var trail = [];
var lineStack = [];
//add the first line to both the line entity stack and the trail
trails.push(newLine(lineStack, properties.position, properties.period, properties.lineColor));
planetLines.push(lineStack);
}
// Create a new line
function newLine(lineStack, point, period, color) {
if (elapsed < period) {
var line = Entities.addEntity({
position: point,
type: "Line",
color: color,
dimensions: {
x: LINE_DIM,
y: LINE_DIM,
z: LINE_DIM
},
lifetime: LIFETIME,
lineWidth: LINE_WIDTH
});
lineStack.push(line);
} else {
// Begin overwriting first lines after one full revolution (one period)
var firstLine = lineStack.shift();
Entities.editEntity(firstLine, {
position: point,
linePoints: [{
x: 0.0,
y: 0.0,
z: 0.0
}]
});
lineStack.push(firstLine);
}
var points = [];
points.push(computeLocalPoint(point, point));
return points;
}
// Measure energy error, recalculate velocity to return to initial net energy
var totalEnergy;
var measuredEnergy;
var measuredPE;
function calcEnergyError(planet, radius, v0, v, pos) {
totalEnergy = 0.5 * M * Math.pow(v0, 2.0) - ((G * M * m) / radius);
measuredEnergy = 0.5 * M * Math.pow(v, 2.0) - ((G * M * m) / Vec3.length(Vec3.subtract(center, pos)));
var error = ((measuredEnergy - totalEnergy) / totalEnergy) * 100;
return error;
}
function adjustVelocity(planet, pos) {
var measuredPE = -(G * M * m) / Vec3.length(Vec3.subtract(center, pos));
return Math.sqrt(2 * (totalEnergy - measuredPE) / M);
}
// Allow user to toggle pausing the model, switch to planet view
var pauseButton = Overlays.addOverlay("text", {
backgroundColor: {
red: 200,
green: 200,
blue: 255
},
text: "Pause",
x: PANEL_X,
y: PANEL_Y - 30,
width: 70,
height: 20,
alpha: 1.0,
backgroundAlpha: 0.5,
visible: true
});
var paused = false;
function mousePressEvent(event) {
var clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y
});
if (clickedOverlay == pauseButton) {
paused = !paused;
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.editEntity(planets[i], {
velocity: {
x: 0.0,
y: 0.0,
z: 0.0
}
});
}
if (paused && !labelsShowing) {
Overlays.editOverlay(pauseButton, {
text: "Paused",
backgroundColor: {
red: 255,
green: 50,
blue: 50
}
});
showLabels();
}
if (paused == false && labelsShowing) {
Overlays.editOverlay(pauseButton, {
text: "Pause",
backgroundColor: {
red: 200,
green: 200,
blue: 255
}
});
hideLabels();
}
planetView = false;
}
}
function keyPressEvent(event) {
// Jump back to solar system view
if (event.text == "TAB" && planetView) {
if (earthView) {
satelliteGame.endGame();
earthView = false;
}
MyAvatar.position = startingPosition;
}
}
function mouseDoublePressEvent(event) {
if (earthView) {
return;
}
var pickRay = Camera.computePickRay(event.x, event.y)
var rayPickResult = Entities.findRayIntersection(pickRay, true);
for (var i = 0; i < NUM_PLANETS; ++i) {
if (rayPickResult.entityID === labels[i]) {
planetView = true;
if (i == 2) {
MyAvatar.position = Vec3.sum(center, {
x: 200,
y: 200,
z: 200
});
Camera.setPosition(Vec3.sum(center, {
x: 200,
y: 200,
z: 200
}));
earthView = true;
satelliteGame = new SatelliteGame();
} else {
MyAvatar.position = Vec3.sum({
x: 0.0,
y: 0.0,
z: 3.0
}, planet_properties[i].position);
Camera.lookAt(planet_properties[i].position);
}
break;
}
}
}
// Create UI panel
var panel = new Panel(PANEL_X, PANEL_Y);
var panelItems = [];
var g_multiplier = 1.0;
panelItems.push(panel.newSlider("Adjust Gravitational Force: ", 0.1, 5.0,
function(value) {
g_multiplier = value;
G = G_ref * g_multiplier;
},
function() {
return g_multiplier;
},
function(value) {
return value.toFixed(1) + "x";
}));
var period_multiplier = 1.0;
var last_alpha = period_multiplier;
panelItems.push(panel.newSlider("Adjust Orbital Period: ", 0.1, 3.0,
function(value) {
period_multiplier = value;
changePeriod(period_multiplier);
},
function() {
return period_multiplier;
},
function(value) {
return (value).toFixed(2) + "x";
}));
panelItems.push(panel.newCheckbox("Leave Trails: ",
function(value) {
trailsEnabled = value;
if (trailsEnabled) {
for (var i = 0; i < NUM_PLANETS; ++i) {
resetTrails(i);
}
//if trails are off and we've already created trails, remove existing trails
} else if (planetLines.length != 0) {
for (var i = 0; i < NUM_PLANETS; ++i) {
for (var j = 0; j < planetLines[i].length; ++j) {
Entities.deleteEntity(planetLines[i][j]);
}
planetLines[i] = [];
}
}
},
function() {
return trailsEnabled;
},
function(value) {
return value;
}));
panelItems.push(panel.newCheckbox("Energy Error Calculations: ",
function(value) {
energyConserved = value;
},
function() {
return energyConserved;
},
function(value) {
return value;
}));
// Update global G constant, period, poke velocity to new value
function changePeriod(alpha) {
var ratio = last_alpha / alpha;
G = Math.pow(ratio, 2.0) * G;
for (var i = 0; i < NUM_PLANETS; ++i) {
var properties = planet_properties[i];
properties.period = ratio * properties.period;
properties.velocity = Vec3.multiply(ratio, properties.velocity);
}
last_alpha = alpha;
}
// Clean up models, UI panels, lines, and button overlays
function scriptEnding() {
satelliteGame.endGame();
Entities.deleteEntity(theSun);
for (var i = 0; i < NUM_PLANETS; ++i) {
Entities.deleteEntity(planets[i]);
}
Menu.removeMenu("Developer > Scene");
panel.destroy();
Overlays.deleteOverlay(pauseButton);
var e = Entities.findEntities(MyAvatar.position, 16000);
for (i = 0; i < e.length; i++) {
var props = Entities.getEntityProperties(e[i]);
if (props.type === "Line" || props.type === "Text") {
Entities.deleteEntity(e[i]);
}
}
};
Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) {
return panel.mouseMoveEvent(event);
});
Controller.mousePressEvent.connect(function panelMousePressEvent(event) {
return panel.mousePressEvent(event);
});
Controller.mouseDoublePressEvent.connect(function panelMouseDoublePressEvent(event) {
return panel.mouseDoublePressEvent(event);
});
Controller.mouseReleaseEvent.connect(function(event) {
return panel.mouseReleaseEvent(event);
});
Controller.mousePressEvent.connect(mousePressEvent);
Controller.mouseDoublePressEvent.connect(mouseDoublePressEvent);
Controller.keyPressEvent.connect(keyPressEvent);
Script.scriptEnding.connect(scriptEnding);
Script.update.connect(update);

View file

@ -141,7 +141,7 @@ CameraManager = function() {
// Pick a point INITIAL_ZOOM_DISTANCE in front of the camera to use as a focal point
that.zoomDistance = INITIAL_ZOOM_DISTANCE;
that.targetZoomDistance = that.zoomDistance;
that.targetZoomDistance = that.zoomDistance + 3.0;
var focalPoint = Vec3.sum(Camera.getPosition(),
Vec3.multiply(that.zoomDistance, Quat.getFront(Camera.getOrientation())));
@ -150,6 +150,7 @@ CameraManager = function() {
var xzDist = Math.sqrt(dPos.x * dPos.x + dPos.z * dPos.z);
that.targetPitch = -Math.atan2(dPos.y, xzDist) * 180 / Math.PI;
that.targetPitch += (90 - that.targetPitch) / 3.0; // Swing camera "up" to look down at the focal point
that.targetYaw = Math.atan2(dPos.x, dPos.z) * 180 / Math.PI;
that.pitch = that.targetPitch;
that.yaw = that.targetYaw;

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,186 @@
//
// vector.js
// examples
//
// Created by Bridget Went on 7/1/15.
// Copyright 2015 High Fidelity, Inc.
//
// A template for creating vector arrows using line entities. A VectorArrow object creates a
// draggable vector arrow where the user clicked at a specified distance from the viewer.
// The relative magnitude and direction of the vector may be displayed.
//
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//
var LINE_DIMENSIONS = 100;
var LIFETIME = 6000;
var RAD_TO_DEG = 180.0 / Math.PI;
var LINE_WIDTH = 4;
var ARROW_WIDTH = 6;
var line, linePosition;
var arrow1, arrow2;
var SCALE = 0.15;
var ANGLE = 150.0;
VectorArrow = function(distance, showStats, statsTitle, statsPosition) {
this.magnitude = 0;
this.direction = {x: 0, y: 0, z: 0};
this.showStats = showStats;
this.isDragging = false;
this.newLine = function(position) {
linePosition = position;
var points = [];
line = Entities.addEntity({
position: linePosition,
type: "Line",
color: {red: 255, green: 255, blue: 255},
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
lineWidth: LINE_WIDTH,
lifetime: LIFETIME,
linePoints: []
});
arrow1 = Entities.addEntity({
position: {x: 0, y: 0, z: 0},
type: "Line",
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
color: {red: 255, green: 255, blue: 255},
lineWidth: ARROW_WIDTH,
linePoints: [],
});
arrow2 = Entities.addEntity({
position: {x: 0, y: 0, z: 0},
type: "Line",
dimensions: {
x: LINE_DIMENSIONS,
y: LINE_DIMENSIONS,
z: LINE_DIMENSIONS
},
color: {red: 255, green: 255, blue: 255},
lineWidth: ARROW_WIDTH,
linePoints: [],
});
}
var STATS_DIMENSIONS = {
x: 4.0,
y: 1.5,
z: 0.1
};
var TEXT_HEIGHT = 0.3;
this.onMousePressEvent = function(event) {
this.newLine(computeWorldPoint(event));
if (this.showStats) {
this.label = Entities.addEntity({
type: "Text",
position: statsPosition,
dimensions: STATS_DIMENSIONS,
lineHeight: TEXT_HEIGHT,
faceCamera: true
});
}
this.isDragging = true;
}
this.onMouseMoveEvent = function(event) {
if (!this.isDragging) {
return;
}
var worldPoint = computeWorldPoint(event);
var localPoint = computeLocalPoint(event, linePosition);
points = [{x: 0, y: 0, z: 0}, localPoint];
Entities.editEntity(line, { linePoints: points });
var nextOffset = Vec3.multiply(SCALE, localPoint);
var normOffset = Vec3.normalize(localPoint);
var axis = Vec3.cross(normOffset, Quat.getFront(Camera.getOrientation()) );
axis = Vec3.cross(axis, normOffset);
var rotate1 = Quat.angleAxis(ANGLE, axis);
var rotate2 = Quat.angleAxis(-ANGLE, axis);
// Rotate arrow head to follow direction of the line
Entities.editEntity(arrow1, {
visible: true,
position: worldPoint,
linePoints: [{x: 0, y: 0, z: 0}, nextOffset],
rotation: rotate1
});
Entities.editEntity(arrow2, {
visible: true,
position: worldPoint,
linePoints: [{x: 0, y: 0, z: 0}, nextOffset],
rotation: rotate2
});
this.magnitude = Vec3.length(localPoint) * 0.1;
this.direction = Vec3.normalize(Vec3.subtract(worldPoint, linePosition));
if (this.showStats) {
this.editLabel(statsTitle + " Magnitude " + this.magnitude.toFixed(2) + ", Direction: " +
this.direction.x.toFixed(2) + ", " + this.direction.y.toFixed(2) + ", " + this.direction.z.toFixed(2));
}
}
this.onMouseReleaseEvent = function() {
this.isDragging = false;
}
this.cleanup = function() {
Entities.deleteEntity(line);
Entities.deleteEntity(arrow1);
Entities.deleteEntity(arrow2);
}
this.deleteLabel = function() {
Entities.deleteEntity(this.label);
}
this.editLabel = function(str) {
if(!this.showStats) {
return;
}
Entities.editEntity(this.label, {
text: str
});
}
function computeWorldPoint(event) {
var pickRay = Camera.computePickRay(event.x, event.y);
var addVector = Vec3.multiply(pickRay.direction, distance);
return Vec3.sum(Camera.getPosition(), addVector);
}
function computeLocalPoint(event, linePosition) {
var localPoint = Vec3.subtract(computeWorldPoint(event), linePosition);
return localPoint;
}
}

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK")
set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK" "connexionClient")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -218,33 +218,13 @@ else (APPLE)
"${PROJECT_SOURCE_DIR}/resources"
$<TARGET_FILE_DIR:${TARGET_NAME}>/resources
)
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif ()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
# link target to external libraries
if (WIN32)
add_dependency_external_projects(glew)
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${GLEW_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLEW_LIBRARIES} wsock32.lib opengl32.lib Winmm.lib)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif ()
endif()
# target_link_libraries(${TARGET_NAME} wsock32.lib Winmm.lib)
target_link_libraries(${TARGET_NAME} wsock32.lib Winmm.lib)
else (WIN32)
# Nothing else required on linux apparently
endif()
endif (APPLE)

View file

@ -0,0 +1,79 @@
//
// 3DConnexion.cpp
// hifi
//
// Created by MarcelEdward Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef I3D_MOUSE_PARAMS_H
#define I3D_MOUSE_PARAMS_H
// Parameters for the 3D mouse based on the SDK from 3Dconnexion
class I3dMouseSensor {
public:
enum Speed {
SPEED_LOW = 0,
SPEED_MID,
SPEED_HIGH
};
virtual bool IsPanZoom() const = 0;
virtual bool IsRotate() const = 0;
virtual Speed GetSpeed() const = 0;
virtual void SetPanZoom(bool isPanZoom) = 0;
virtual void SetRotate(bool isRotate) = 0;
virtual void SetSpeed(Speed speed) = 0;
protected:
virtual ~I3dMouseSensor() {}
};
class I3dMouseNavigation {
public:
enum Pivot {
PIVOT_MANUAL = 0,
PIVOT_AUTO,
PIVOT_AUTO_OVERRIDE
};
enum Navigation {
NAVIGATION_OBJECT_MODE = 0,
NAVIGATION_CAMERA_MODE,
NAVIGATION_FLY_MODE,
NAVIGATION_WALK_MODE,
NAVIGATION_HELICOPTER_MODE
};
enum PivotVisibility {
PIVOT_HIDE = 0,
PIVOT_SHOW,
PIVOT_SHOW_MOVING
};
virtual Navigation GetNavigationMode() const = 0;
virtual Pivot GetPivotMode() const = 0;
virtual PivotVisibility GetPivotVisibility() const = 0;
virtual bool IsLockHorizon() const = 0;
virtual void SetLockHorizon(bool bOn) = 0;
virtual void SetNavigationMode(Navigation navigation) = 0;
virtual void SetPivotMode(Pivot pivot) = 0;
virtual void SetPivotVisibility(PivotVisibility visibility) = 0;
protected:
virtual ~I3dMouseNavigation(){}
};
class I3dMouseParam : public I3dMouseSensor, public I3dMouseNavigation {
public:
virtual ~I3dMouseParam() {}
};
#endif

View file

@ -0,0 +1,4 @@
The mac version does not require any files here. 3D connexion should be installed from
http://www.3dconnexion.eu/service/drivers.html
For windows a header file is required Inc/I3dMouseParams.h

View file

@ -115,6 +115,7 @@
#include "devices/MIDIManager.h"
#include "devices/OculusManager.h"
#include "devices/TV3DManager.h"
#include "devices/3Dconnexion.h"
#include "scripting/AccountScriptingInterface.h"
#include "scripting/AudioDeviceScriptingInterface.h"
@ -639,6 +640,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(applicationUpdater.data(), &AutoUpdater::newVersionIsAvailable, dialogsManager.data(), &DialogsManager::showUpdateDialog);
applicationUpdater->checkForUpdate();
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
ConnexionClient::init();
auto& packetReceiver = nodeList->getPacketReceiver();
packetReceiver.registerListener(PacketType::DomainConnectionDenied, this, "handleDomainConnectionDeniedPacket");
}
@ -750,6 +754,7 @@ Application::~Application() {
Leapmotion::destroy();
RealSense::destroy();
ConnexionClient::destroy();
qInstallMessageHandler(NULL); // NOTE: Do this as late as possible so we continue to get our log messages
}
@ -767,8 +772,9 @@ void Application::initializeGL() {
}
#endif
// Where the gpuContext is created and where the TRUE Backend is created and assigned
_gpuContext = std::make_shared<gpu::Context>(new gpu::GLBackend());
// Where the gpuContext is initialized and where the TRUE Backend is created and assigned
gpu::Context::init<gpu::GLBackend>();
_gpuContext = std::make_shared<gpu::Context>();
initDisplay();
qCDebug(interfaceapp, "Initialized Display.");
@ -995,7 +1001,8 @@ void Application::paintGL() {
_compositor.displayOverlayTexture(&renderArgs);
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
PROFILE_RANGE(__FUNCTION__ "/bufferSwap");
_glWidget->swapBuffers();
@ -1007,6 +1014,13 @@ void Application::paintGL() {
_frameCount++;
_numFramesSinceLastResize++;
Stats::getInstance()->setRenderDetails(renderArgs._details);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::Batch batch;
batch.resetStages();
renderArgs._context->render(batch);
}
void Application::runTests() {
@ -1480,6 +1494,7 @@ void Application::focusOutEvent(QFocusEvent* event) {
_keyboardMouseDevice.focusOutEvent(event);
SixenseManager::getInstance().focusOutEvent();
SDL2Manager::getInstance()->focusOutEvent();
ConnexionData::getInstance().focusOutEvent();
// synthesize events for keys currently pressed, since we may not get their release events
foreach (int key, _keysPressed) {
@ -3264,6 +3279,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
renderContext._maxDrawnOverlay3DItems = sceneInterface->getEngineMaxDrawnOverlay3DItems();
renderContext._drawItemStatus = sceneInterface->doEngineDisplayItemStatus();
renderContext._drawHitEffect = sceneInterface->doEngineDisplayHitEffect();
renderContext._occlusionStatus = Menu::getInstance()->isOptionChecked(MenuOption::DebugAmbientOcclusion);
@ -3381,7 +3397,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
// This was removed in commit 71e59cfa88c6563749594e25494102fe01db38e9 but could be further
// investigated in order to adapt the technique while fixing the head rendering issue,
// but the complexity of the hack suggests that a better approach
_mirrorCamera.setPosition(_myAvatar->getHead()->getEyePosition() +
_mirrorCamera.setPosition(_myAvatar->getDefaultEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_REARVIEW_DISTANCE * _myAvatar->getScale());
}
_mirrorCamera.setProjection(glm::perspective(glm::radians(fov), aspect, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP));

View file

@ -28,7 +28,7 @@ enum CameraMode
};
Q_DECLARE_METATYPE(CameraMode);
// static int cameraModeId = qRegisterMetaType<CameraMode>();
static int cameraModeId = qRegisterMetaType<CameraMode>();
class Camera : public QObject {
Q_OBJECT

View file

@ -13,7 +13,6 @@
#define hifi_GLCanvas_h
#include <QDebug>
#include <gpu/GPUConfig.h>
#include <QGLWidget>
#include <QTimer>

View file

@ -29,6 +29,7 @@
#include "devices/Faceshift.h"
#include "devices/RealSense.h"
#include "devices/SixenseManager.h"
#include "devices/3Dconnexion.h"
#include "MainWindow.h"
#include "scripting/MenuScriptingInterface.h"
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
@ -248,8 +249,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::BlueSpeechSphere, 0, true);
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::EnableCharacterController, 0, true,
avatar, SLOT(updateMotionBehavior()));
addCheckableActionToQMenuAndActionHash(avatarMenu, MenuOption::ShiftHipsForIdleAnimations, 0, false,
avatar, SLOT(updateMotionBehavior()));
MenuWrapper* viewMenu = addMenu("View");
@ -447,6 +446,11 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,
&ConnexionClient::getInstance(),
SLOT(toggleConnexion(bool)));
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::AlignForearmsWithWrists, 0, false);

View file

@ -160,6 +160,7 @@ namespace MenuOption {
const QString CenterPlayerInView = "Center Player In View";
const QString Chat = "Chat...";
const QString Collisions = "Collisions";
const QString Connexion = "Activate 3D Connexion Devices";
const QString Console = "Console...";
const QString ControlWithSpeech = "Control With Speech";
const QString CopyAddress = "Copy Address to Clipboard";
@ -272,7 +273,6 @@ namespace MenuOption {
const QString SimpleShadows = "Simple";
const QString SixenseEnabled = "Enable Hydra Support";
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
const QString ShiftHipsForIdleAnimations = "Shift hips for idle animations";
const QString Stars = "Stars";
const QString Stats = "Stats";
const QString StopAllScripts = "Stop All Scripts";

View file

@ -106,7 +106,7 @@ bool ModelPackager::loadModel() {
}
qCDebug(interfaceapp) << "Reading FBX file : " << _fbxInfo.filePath();
QByteArray fbxContents = fbx.readAll();
_geometry = readFBX(fbxContents, QVariantHash());
_geometry = readFBX(fbxContents, QVariantHash(), _fbxInfo.filePath());
// make sure we have some basic mappings
populateBasicMapping(_mapping, _fbxInfo.filePath(), _geometry);

View file

@ -14,8 +14,6 @@
#include <mutex>
#include <QElapsedTimer>
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <NumericalConstants.h>
#include <DependencyManager.h>
@ -154,6 +152,7 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
state->setDepthTest(gpu::State::DepthTest(false));
state->setAntialiasedLineEnable(true); // line smoothing also smooth points
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_starsPipeline.reset(gpu::Pipeline::create(program, state));
@ -207,8 +206,6 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
batch._glUniform1f(_timeSlot, secs);
geometryCache->renderUnitCube(batch);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
static const size_t VERTEX_STRIDE = sizeof(StarVertex);
size_t offset = offsetof(StarVertex, position);
gpu::BufferView posView(vertexBuffer, offset, vertexBuffer->getSize(), VERTEX_STRIDE, positionElement);
@ -217,14 +214,11 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
// Render the stars
batch.setPipeline(_starsPipeline);
batch._glEnable(GL_PROGRAM_POINT_SIZE_EXT);
batch._glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
batch._glEnable(GL_POINT_SMOOTH);
batch.setInputFormat(streamFormat);
batch.setInputBuffer(VERTICES_SLOT, posView);
batch.setInputBuffer(COLOR_SLOT, colView);
batch.draw(gpu::Primitive::POINTS, STARFIELD_NUM_STARS);
renderArgs->_context->render(batch);
}

View file

@ -166,7 +166,8 @@ public:
virtual void computeShapeInfo(ShapeInfo& shapeInfo);
friend class AvatarManager;
void setMotionState(AvatarMotionState* motionState) { _motionState = motionState; }
AvatarMotionState* getMotionState() { return _motionState; }
protected:
SkeletonModel _skeletonModel;
@ -195,7 +196,7 @@ protected:
glm::vec3 _worldUpDirection;
float _stringLength;
bool _moving; ///< set when position is changing
// protected methods...
glm::vec3 getBodyRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
glm::vec3 getBodyUpDirection() const { return getOrientation() * IDENTITY_UP; }
@ -220,7 +221,7 @@ protected:
virtual void updateJointMappings();
render::ItemID _renderItemID;
private:
bool _initialized;
NetworkTexturePointer _billboardTexture;
@ -228,9 +229,9 @@ private:
bool _isLookAtTarget;
void renderBillboard(RenderArgs* renderArgs);
float getBillboardSize() const;
static int _jointConesID;
int _voiceSphereID;

View file

@ -179,11 +179,11 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
// protected
void AvatarManager::removeAvatarMotionState(AvatarSharedPointer avatar) {
auto rawPointer = std::static_pointer_cast<Avatar>(avatar);
AvatarMotionState* motionState= rawPointer->_motionState;
AvatarMotionState* motionState = rawPointer->getMotionState();
if (motionState) {
// clean up physics stuff
motionState->clearObjectBackPointer();
rawPointer->_motionState = nullptr;
rawPointer->setMotionState(nullptr);
_avatarMotionStates.remove(motionState);
_motionStatesToAdd.remove(motionState);
_motionStatesToDelete.push_back(motionState);
@ -307,7 +307,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
AvatarHash::iterator avatarItr = _avatarHash.find(id);
if (avatarItr != _avatarHash.end()) {
auto avatar = std::static_pointer_cast<Avatar>(avatarItr.value());
AvatarMotionState* motionState = avatar->_motionState;
AvatarMotionState* motionState = avatar->getMotionState();
if (motionState) {
motionState->addDirtyFlags(EntityItem::DIRTY_SHAPE);
} else {
@ -316,7 +316,7 @@ void AvatarManager::updateAvatarPhysicsShape(const QUuid& id) {
btCollisionShape* shape = ObjectMotionState::getShapeManager()->getShape(shapeInfo);
if (shape) {
AvatarMotionState* motionState = new AvatarMotionState(avatar.get(), shape);
avatar->_motionState = motionState;
avatar->setMotionState(motionState);
_motionStatesToAdd.insert(motionState);
_avatarMotionStates.insert(motionState);
}

View file

@ -25,6 +25,7 @@ FaceModel::FaceModel(Head* owningHead, RigPointer rig) :
void FaceModel::simulate(float deltaTime, bool fullUpdate) {
updateGeometry();
Avatar* owningAvatar = static_cast<Avatar*>(_owningHead->_owningAvatar);
glm::vec3 neckPosition;
if (!owningAvatar->getSkeletonModel().getNeckPosition(neckPosition)) {
@ -37,15 +38,20 @@ void FaceModel::simulate(float deltaTime, bool fullUpdate) {
}
setRotation(neckParentRotation);
setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningHead->getScale());
setPupilDilation(_owningHead->getPupilDilation());
setBlendshapeCoefficients(_owningHead->getBlendshapeCoefficients());
// FIXME - this is very expensive, we shouldn't do it if we don't have to
//invalidCalculatedMeshBoxes();
if (isActive()) {
setOffset(-_geometry->getFBXGeometry().neckPivot);
for (int i = 0; i < _rig->getJointStateCount(); i++) {
maybeUpdateNeckAndEyeRotation(i);
}
Model::simulateInternal(deltaTime);
}
}
@ -84,7 +90,7 @@ void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentSta
joint.rotation, DEFAULT_PRIORITY);
}
void FaceModel::updateJointState(int index) {
void FaceModel::maybeUpdateNeckAndEyeRotation(int index) {
const JointState& state = _rig->getJointState(index);
const FBXJoint& joint = state.getFBXJoint();
const FBXGeometry& geometry = _geometry->getFBXGeometry();
@ -99,9 +105,6 @@ void FaceModel::updateJointState(int index) {
maybeUpdateEyeRotation(this, parentState, joint, index);
}
}
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
_rig->updateFaceJointState(index, parentTransform);
}
bool FaceModel::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {

View file

@ -19,23 +19,23 @@ class Head;
/// A face formed from a linear mix of blendshapes according to a set of coefficients.
class FaceModel : public Model {
Q_OBJECT
public:
FaceModel(Head* owningHead, RigPointer rig);
virtual void simulate(float deltaTime, bool fullUpdate = true);
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index);
virtual void maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, int index);
virtual void updateJointState(int index);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index);
void maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, int index);
void maybeUpdateNeckAndEyeRotation(int index);
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
private:
Head* _owningHead;
};

View file

@ -97,7 +97,6 @@ MyAvatar::MyAvatar(RigPointer rig) :
_lookAtTargetAvatar(),
_shouldRender(true),
_billboardValid(false),
_feetTouchFloor(true),
_eyeContactTarget(LEFT_EYE),
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
@ -159,9 +158,6 @@ void MyAvatar::update(float deltaTime) {
head->setAudioAverageLoudness(audio->getAudioAverageInputLoudness());
simulate(deltaTime);
if (_feetTouchFloor) {
_skeletonModel.updateStandingFoot();
}
}
void MyAvatar::simulate(float deltaTime) {
@ -600,7 +596,6 @@ void MyAvatar::saveData() {
for (int i = 0; i < animationHandles.size(); i++) {
settings.setArrayIndex(i);
const AnimationHandlePointer& pointer = animationHandles.at(i);
qCDebug(interfaceapp) << "Save animation" << pointer->getURL().toString();
settings.setValue("role", pointer->getRole());
settings.setValue("url", pointer->getURL());
settings.setValue("fps", pointer->getFPS());
@ -908,15 +903,15 @@ void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
void MyAvatar::setJointData(int index, const glm::quat& rotation) {
if (QThread::currentThread() == thread()) {
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
_skeletonModel.setJointState(index, true, rotation, SCRIPT_PRIORITY);
_rig->setJointState(index, true, rotation, SCRIPT_PRIORITY);
}
}
void MyAvatar::clearJointData(int index) {
if (QThread::currentThread() == thread()) {
// HACK: ATM only JS scripts call clearJointData() on MyAvatar so we hardcode the priority
_skeletonModel.setJointState(index, false, glm::quat(), 0.0f);
_skeletonModel.clearJointAnimationPriority(index);
_rig->setJointState(index, false, glm::quat(), 0.0f);
_rig->clearJointAnimationPriority(index);
}
}
@ -927,7 +922,7 @@ void MyAvatar::clearJointsData() {
void MyAvatar::clearJointAnimationPriorities() {
int numStates = _skeletonModel.getJointStateCount();
for (int i = 0; i < numStates; ++i) {
_skeletonModel.clearJointAnimationPriority(i);
_rig->clearJointAnimationPriority(i);
}
}
@ -967,12 +962,8 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
Avatar::setSkeletonModelURL(skeletonModelURL);
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
_billboardValid = false;
if (_useFullAvatar) {
_skeletonModel.setVisibleInScene(_prevShouldDrawHead, scene);
} else {
_skeletonModel.setVisibleInScene(true, scene);
}
_skeletonModel.setVisibleInScene(true, scene);
_headBoneSet.clear();
}
void MyAvatar::useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName) {
@ -1076,11 +1067,7 @@ glm::vec3 MyAvatar::getSkeletonPosition() const {
// The avatar is rotated PI about the yAxis, so we have to correct for it
// to get the skeleton offset contribution in the world-frame.
const glm::quat FLIP = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 skeletonOffset = _skeletonOffset;
if (_feetTouchFloor) {
skeletonOffset += _skeletonModel.getStandingOffset();
}
return _position + getOrientation() * FLIP * skeletonOffset;
return _position + getOrientation() * FLIP * _skeletonOffset;
}
return Avatar::getPosition();
}
@ -1182,17 +1169,45 @@ void MyAvatar::setVisibleInSceneIfReady(Model* model, render::ScenePointer scene
}
}
void MyAvatar::initHeadBones() {
int neckJointIndex = -1;
if (_skeletonModel.getGeometry()) {
neckJointIndex = _skeletonModel.getGeometry()->getFBXGeometry().neckJointIndex;
}
if (neckJointIndex == -1) {
return;
}
_headBoneSet.clear();
std::queue<int> q;
q.push(neckJointIndex);
_headBoneSet.insert(neckJointIndex);
// fbxJoints only hold links to parents not children, so we have to do a bit of extra work here.
while (q.size() > 0) {
int jointIndex = q.front();
for (int i = 0; i < _skeletonModel.getJointStateCount(); i++) {
if (jointIndex == _skeletonModel.getParentJointIndex(i)) {
_headBoneSet.insert(i);
q.push(i);
}
}
q.pop();
}
}
void MyAvatar::preRender(RenderArgs* renderArgs) {
render::ScenePointer scene = Application::getInstance()->getMain3DScene();
const bool shouldDrawHead = shouldRenderHead(renderArgs);
_skeletonModel.initWhenReady(scene);
if (_skeletonModel.initWhenReady(scene)) {
initHeadBones();
_skeletonModel.setCauterizeBoneSet(_headBoneSet);
}
if (shouldDrawHead != _prevShouldDrawHead) {
if (_useFullAvatar) {
_skeletonModel.setVisibleInScene(true, scene);
_rig->setFirstPerson(!shouldDrawHead);
_skeletonModel.setCauterizeBones(!shouldDrawHead);
} else {
getHead()->getFaceModel().setVisibleInScene(shouldDrawHead, scene);
}
@ -1548,7 +1563,6 @@ void MyAvatar::updateMotionBehavior() {
_motionBehaviors &= ~AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED;
}
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
_feetTouchFloor = menu->isOptionChecked(MenuOption::ShiftHipsForIdleAnimations);
}
//Renders sixense laser pointers for UI selection with controllers

View file

@ -39,51 +39,40 @@ public:
MyAvatar(RigPointer rig);
~MyAvatar();
QByteArray toByteArray();
void reset();
void update(float deltaTime);
void simulate(float deltaTime);
void preRender(RenderArgs* renderArgs);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
// setters
void setLeanScale(float scale) { _leanScale = scale; }
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; }
void setRealWorldFieldOfView(float realWorldFov) { _realWorldFieldOfView.set(realWorldFov); }
// getters
float getLeanScale() const { return _leanScale; }
Q_INVOKABLE glm::vec3 getDefaultEyePosition() const;
bool getShouldRenderLocally() const { return _shouldRender; }
float getRealWorldFieldOfView() { return _realWorldFieldOfView.get(); }
const QList<AnimationHandlePointer>& getAnimationHandles() const { return _rig->getAnimationHandles(); }
AnimationHandlePointer addAnimationHandle() { return _rig->createAnimationHandle(); }
void removeAnimationHandle(const AnimationHandlePointer& handle) { _rig->removeAnimationHandle(handle); }
/// Allows scripts to run animations.
Q_INVOKABLE void startAnimation(const QString& url, float fps = 30.0f, float priority = 1.0f, bool loop = false,
bool hold = false, float firstFrame = 0.0f, float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
/// Stops an animation as identified by a URL.
Q_INVOKABLE void stopAnimation(const QString& url);
/// Starts an animation by its role, using the provided URL and parameters if the avatar doesn't have a custom
/// animation for the role.
Q_INVOKABLE void startAnimationByRole(const QString& role, const QString& url = QString(), float fps = 30.0f,
float priority = 1.0f, bool loop = false, bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
float priority = 1.0f, bool loop = false, bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
/// Stops an animation identified by its role.
Q_INVOKABLE void stopAnimationByRole(const QString& role);
Q_INVOKABLE AnimationDetails getAnimationDetailsByRole(const QString& role);
Q_INVOKABLE AnimationDetails getAnimationDetails(const QString& url);
void clearJointAnimationPriorities();
// get/set avatar data
void saveData();
void loadData();
@ -94,32 +83,27 @@ public:
// Set what driving keys are being pressed to control thrust levels
void clearDriveKeys();
void setDriveKeys(int key, float val) { _driveKeys[key] = val; };
bool getDriveKeys(int key) { return _driveKeys[key] != 0.0f; };
void relayDriveKeysToCharacterController();
bool isMyAvatar() const { return true; }
eyeContactTarget getEyeContactTarget();
virtual int parseDataFromBuffer(const QByteArray& buffer);
static void sendKillAvatar();
Q_INVOKABLE glm::vec3 getTrackedHeadPosition() const { return _trackedHeadPosition; }
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
Q_INVOKABLE float getHeadFinalPitch() const { return getHead()->getFinalPitch(); }
Q_INVOKABLE float getHeadDeltaPitch() const { return getHead()->getDeltaPitch(); }
Q_INVOKABLE glm::vec3 getEyePosition() const { return getHead()->getEyePosition(); }
Q_INVOKABLE glm::vec3 getTargetAvatarPosition() const { return _targetAvatarPosition; }
AvatarWeakPointer getLookAtTargetAvatar() const { return _lookAtTargetAvatar; }
void updateLookAtTargetAvatar();
void clearLookAtTargetAvatar();
virtual void setJointRotations(QVector<glm::quat> jointRotations);
virtual void setJointData(int index, const glm::quat& rotation);
virtual void clearJointData(int index);
@ -128,7 +112,8 @@ public:
Q_INVOKABLE void useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName = QString());
Q_INVOKABLE void useHeadURL(const QUrl& headURL, const QString& modelName = QString());
Q_INVOKABLE void useBodyURL(const QUrl& bodyURL, const QString& modelName = QString());
Q_INVOKABLE void useHeadAndBodyURLs(const QUrl& headURL, const QUrl& bodyURL, const QString& headName = QString(), const QString& bodyName = QString());
Q_INVOKABLE void useHeadAndBodyURLs(const QUrl& headURL, const QUrl& bodyURL,
const QString& headName = QString(), const QString& bodyName = QString());
Q_INVOKABLE bool getUseFullAvatar() const { return _useFullAvatar; }
Q_INVOKABLE const QUrl& getFullAvatarURLFromPreferences() const { return _fullAvatarURLFromPreferences; }
@ -143,48 +128,30 @@ public:
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
virtual glm::vec3 getSkeletonPosition() const;
void updateLocalAABox();
DynamicCharacterController* getCharacterController() { return &_characterController; }
void clearJointAnimationPriorities();
glm::vec3 getScriptedMotorVelocity() const { return _scriptedMotorVelocity; }
float getScriptedMotorTimescale() const { return _scriptedMotorTimescale; }
QString getScriptedMotorFrame() const;
void setScriptedMotorVelocity(const glm::vec3& velocity);
void setScriptedMotorTimescale(float timescale);
void setScriptedMotorFrame(QString frame);
const QString& getCollisionSoundURL() {return _collisionSoundURL; }
void setCollisionSoundURL(const QString& url);
void clearScriptableSettings();
virtual void attach(const QString& modelURL, const QString& jointName = QString(),
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f,
bool allowDuplicates = false, bool useSaved = true);
/// Renders a laser pointer for UI picking
void renderLaserPointers(gpu::Batch& batch);
glm::vec3 getLaserPointerTipPosition(const PalmData* palm);
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
float getBoomLength() const { return _boomLength; }
void setBoomLength(float boomLength) { _boomLength = boomLength; }
static const float ZOOM_MIN;
static const float ZOOM_MAX;
static const float ZOOM_DEFAULT;
public slots:
void increaseSize();
void decreaseSize();
void resetSize();
void goToLocation(const glm::vec3& newPosition,
bool hasOrientation = false, const glm::quat& newOrientation = glm::quat(),
bool shouldFaceLocation = false);
@ -204,7 +171,7 @@ public slots:
void clearReferential();
bool setModelReferential(const QUuid& id);
bool setJointReferential(const QUuid& id, int jointIndex);
bool isRecording();
qint64 recorderElapsed();
void startRecording();
@ -213,7 +180,7 @@ public slots:
void loadLastRecording();
virtual void rebuildSkeletonBody();
signals:
void transformChanged();
void newCollisionSoundURL(const QUrl& url);
@ -221,6 +188,33 @@ signals:
private:
QByteArray toByteArray();
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;
virtual void renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, float glowLevel = 0.0f) override;
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; }
bool getShouldRenderLocally() const { return _shouldRender; }
bool getDriveKeys(int key) { return _driveKeys[key] != 0.0f; };
bool isMyAvatar() const { return true; }
virtual int parseDataFromBuffer(const QByteArray& buffer);
virtual glm::vec3 getSkeletonPosition() const;
glm::vec3 getScriptedMotorVelocity() const { return _scriptedMotorVelocity; }
float getScriptedMotorTimescale() const { return _scriptedMotorTimescale; }
QString getScriptedMotorFrame() const;
void setScriptedMotorVelocity(const glm::vec3& velocity);
void setScriptedMotorTimescale(float timescale);
void setScriptedMotorFrame(QString frame);
virtual void attach(const QString& modelURL, const QString& jointName = QString(),
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f,
bool allowDuplicates = false, bool useSaved = true);
void renderLaserPointers(gpu::Batch& batch);
const RecorderPointer getRecorder() const { return _recorder; }
const PlayerPointer getPlayer() const { return _player; }
bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead
@ -235,7 +229,7 @@ private:
bool _wasPushing;
bool _isPushing;
bool _isBraking;
float _boomLength;
float _trapDuration; // seconds that avatar has been trapped by collisions
@ -256,36 +250,37 @@ private:
bool _shouldRender;
bool _billboardValid;
float _oculusYawOffset;
bool _feetTouchFloor;
eyeContactTarget _eyeContactTarget;
RecorderPointer _recorder;
glm::vec3 _trackedHeadPosition;
Setting::Handle<float> _realWorldFieldOfView;
// private methods
// private methods
void updateOrientation(float deltaTime);
glm::vec3 applyKeyboardMotor(float deltaTime, const glm::vec3& velocity, bool isHovering);
glm::vec3 applyScriptedMotor(float deltaTime, const glm::vec3& velocity);
void updatePosition(float deltaTime);
void updateCollisionSound(const glm::vec3& penetration, float deltaTime, float frequency);
void maybeUpdateBillboard();
void initHeadBones();
// Avatar Preferences
bool _useFullAvatar = false;
QUrl _fullAvatarURLFromPreferences;
QUrl _headURLFromPreferences;
QUrl _skeletonURLFromPreferences;
QString _headModelName;
QString _bodyModelName;
QString _fullAvatarModelName;
RigPointer _rig;
bool _prevShouldDrawHead;
std::unordered_set<int> _headBoneSet;
};
#endif // hifi_MyAvatar_h

View file

@ -22,12 +22,6 @@
#include "Util.h"
#include "InterfaceLogging.h"
enum StandingFootState {
LEFT_FOOT,
RIGHT_FOOT,
NO_FOOT
};
SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer rig) :
Model(rig, parent),
_triangleFanID(DependencyManager::get<GeometryCache>()->allocateID()),
@ -36,9 +30,6 @@ SkeletonModel::SkeletonModel(Avatar* owningAvatar, QObject* parent, RigPointer r
_boundingCapsuleRadius(0.0f),
_boundingCapsuleHeight(0.0f),
_defaultEyeModelPosition(glm::vec3(0.0f, 0.0f, 0.0f)),
_standingFoot(NO_FOOT),
_standingOffset(0.0f),
_clampedFootPosition(0.0f),
_headClipDistance(DEFAULT_NEAR_CLIP)
{
assert(_rig);
@ -51,7 +42,7 @@ SkeletonModel::~SkeletonModel() {
void SkeletonModel::initJointStates(QVector<JointState> states) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
_boundingRadius = _rig->initJointStates(states, parentTransform, geometry.neckJointIndex);
_boundingRadius = _rig->initJointStates(states, parentTransform);
// Determine the default eye position for avatar scale = 1.0
int headJointIndex = _geometry->getFBXGeometry().headJointIndex;
@ -92,32 +83,27 @@ void SkeletonModel::initJointStates(QVector<JointState> states) {
const float PALM_PRIORITY = DEFAULT_PRIORITY;
const float LEAN_PRIORITY = DEFAULT_PRIORITY;
void SkeletonModel::updateClusterMatrices() {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::mat4 modelToWorld = glm::mat4_cast(_rotation);
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
const FBXMesh& mesh = geometry.meshes.at(i);
if (_showTrueJointTransforms) {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
state.clusterMatrices[j] =
modelToWorld * _rig->getJointTransform(cluster.jointIndex) * cluster.inverseBindMatrix;
}
} else {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
state.clusterMatrices[j] =
modelToWorld * _rig->getJointVisibleTransform(cluster.jointIndex) * cluster.inverseBindMatrix;
}
}
}
}
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
_rig->computeMotionAnimationState(deltaTime, _owningAvatar->getPosition(), _owningAvatar->getVelocity(), _owningAvatar->getOrientation());
Model::updateRig(deltaTime, parentTransform);
if (_owningAvatar->isMyAvatar()) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
Rig::HeadParameters params;
params.leanSideways = _owningAvatar->getHead()->getFinalLeanSideways();
params.leanForward = _owningAvatar->getHead()->getFinalLeanSideways();
params.torsoTwist = _owningAvatar->getHead()->getTorsoTwist();
params.localHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInLocalFrame();
params.worldHeadOrientation = _owningAvatar->getHead()->getFinalOrientationInWorldFrame();
params.eyeLookAt = _owningAvatar->getHead()->getCorrectedLookAtPosition();
params.eyeSaccade = _owningAvatar->getHead()->getSaccade();
params.leanJointIndex = geometry.leanJointIndex;
params.neckJointIndex = geometry.neckJointIndex;
params.leftEyeJointIndex = geometry.leftEyeJointIndex;
params.rightEyeJointIndex = geometry.rightEyeJointIndex;
_rig->updateFromHeadParameters(params);
}
}
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
@ -175,14 +161,6 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
restoreRightHandPosition(HAND_RESTORATION_RATE, PALM_PRIORITY);
}
}
// if (_isFirstPerson) {
// cauterizeHead();
// updateClusterMatrices();
// }
if (_rig->getJointsAreDirty()) {
updateClusterMatrices();
}
}
void SkeletonModel::renderIKConstraints(gpu::Batch& batch) {
@ -263,55 +241,6 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
}
}
void SkeletonModel::updateJointState(int index) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
const JointState joint = _rig->getJointState(index);
if (joint.getParentIndex() >= 0 && joint.getParentIndex() < _rig->getJointStateCount()) {
const JointState parentState = _rig->getJointState(joint.getParentIndex());
if (index == geometry.leanJointIndex) {
maybeUpdateLeanRotation(parentState, index);
} else if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint.getFBXJoint(), index);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(parentState, joint.getFBXJoint(), index);
}
}
_rig->updateJointState(index, parentTransform);
if (index == _geometry->getFBXGeometry().rootJointIndex) {
_rig->clearJointTransformTranslation(index);
}
}
void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, int index) {
if (!_owningAvatar->isMyAvatar()) {
return;
}
// get the rotation axes in joint space and use them to adjust the rotation
glm::vec3 xAxis(1.0f, 0.0f, 0.0f);
glm::vec3 yAxis(0.0f, 1.0f, 0.0f);
glm::vec3 zAxis(0.0f, 0.0f, 1.0f);
glm::quat inverse = glm::inverse(parentState.getRotation() * _rig->getJointDefaultRotationInParentFrame(index));
_rig->setJointRotationInConstrainedFrame(index,
glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(), inverse * zAxis)
* glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(), inverse * xAxis)
* glm::angleAxis(RADIANS_PER_DEGREE * _owningAvatar->getHead()->getTorsoTwist(), inverse * yAxis)
* _rig->getJointState(index).getFBXJoint().rotation, LEAN_PRIORITY);
}
void SkeletonModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index) {
_owningAvatar->getHead()->getFaceModel().maybeUpdateNeckRotation(parentState, joint, index);
}
void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, int index) {
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(this, parentState, joint, index);
}
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
if (jointIndex == -1 || jointIndex >= _rig->getJointStateCount()) {
return;
@ -563,65 +492,6 @@ glm::vec3 SkeletonModel::getDefaultEyeModelPosition() const {
return _owningAvatar->getScale() * _defaultEyeModelPosition;
}
/// \return offset of hips after foot animation
void SkeletonModel::updateStandingFoot() {
if (_geometry == NULL) {
return;
}
glm::vec3 offset(0.0f);
int leftFootIndex = _geometry->getFBXGeometry().leftToeJointIndex;
int rightFootIndex = _geometry->getFBXGeometry().rightToeJointIndex;
if (leftFootIndex != -1 && rightFootIndex != -1) {
glm::vec3 leftPosition, rightPosition;
getJointPosition(leftFootIndex, leftPosition);
getJointPosition(rightFootIndex, rightPosition);
int lowestFoot = (leftPosition.y < rightPosition.y) ? LEFT_FOOT : RIGHT_FOOT;
const float MIN_STEP_HEIGHT_THRESHOLD = 0.05f;
bool oneFoot = fabsf(leftPosition.y - rightPosition.y) > MIN_STEP_HEIGHT_THRESHOLD;
int currentFoot = oneFoot ? lowestFoot : _standingFoot;
if (_standingFoot == NO_FOOT) {
currentFoot = lowestFoot;
}
if (currentFoot != _standingFoot) {
if (_standingFoot == NO_FOOT) {
// pick the lowest foot
glm::vec3 lowestPosition = (currentFoot == LEFT_FOOT) ? leftPosition : rightPosition;
// we ignore zero length positions which can happen for a few frames until skeleton is fully loaded
if (glm::length(lowestPosition) > 0.0f) {
_standingFoot = currentFoot;
_clampedFootPosition = lowestPosition;
}
} else {
// swap feet
_standingFoot = currentFoot;
glm::vec3 nextPosition = leftPosition;
glm::vec3 prevPosition = rightPosition;
if (_standingFoot == RIGHT_FOOT) {
nextPosition = rightPosition;
prevPosition = leftPosition;
}
glm::vec3 oldOffset = _clampedFootPosition - prevPosition;
_clampedFootPosition = oldOffset + nextPosition;
offset = _clampedFootPosition - nextPosition;
}
} else {
glm::vec3 nextPosition = (_standingFoot == LEFT_FOOT) ? leftPosition : rightPosition;
offset = _clampedFootPosition - nextPosition;
}
// clamp the offset to not exceed some max distance
const float MAX_STEP_OFFSET = 1.0f;
float stepDistance = glm::length(offset);
if (stepDistance > MAX_STEP_OFFSET) {
offset *= (MAX_STEP_OFFSET / stepDistance);
}
}
_standingOffset = offset;
}
float DENSITY_OF_WATER = 1000.0f; // kg/m^3
float MIN_JOINT_MASS = 1.0f;
float VERY_BIG_MASS = 1.0e6f;

View file

@ -21,69 +21,69 @@ class MuscleConstraint;
/// A skeleton loaded from a model.
class SkeletonModel : public Model {
Q_OBJECT
public:
SkeletonModel(Avatar* owningAvatar, QObject* parent = nullptr, RigPointer rig = nullptr);
~SkeletonModel();
virtual void initJointStates(QVector<JointState> states);
virtual void simulate(float deltaTime, bool fullUpdate = true);
virtual void updateRig(float deltaTime, glm::mat4 parentTransform);
void renderIKConstraints(gpu::Batch& batch);
/// Returns the index of the left hand joint, or -1 if not found.
int getLeftHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().leftHandJointIndex : -1; }
/// Returns the index of the right hand joint, or -1 if not found.
int getRightHandJointIndex() const { return isActive() ? _geometry->getFBXGeometry().rightHandJointIndex : -1; }
/// Retrieve the position of the left hand
/// \return true whether or not the position was found
bool getLeftHandPosition(glm::vec3& position) const;
/// Retrieve the position of the right hand
/// \return true whether or not the position was found
bool getRightHandPosition(glm::vec3& position) const;
/// Restores some fraction of the default position of the left hand.
/// \param fraction the fraction of the default position to restore
/// \return whether or not the left hand joint was found
bool restoreLeftHandPosition(float fraction = 1.0f, float priority = 1.0f);
/// Gets the position of the left shoulder.
/// \return whether or not the left shoulder joint was found
bool getLeftShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the left hand to its last free ancestor.
float getLeftArmLength() const;
/// Restores some fraction of the default position of the right hand.
/// \param fraction the fraction of the default position to restore
/// \return whether or not the right hand joint was found
bool restoreRightHandPosition(float fraction = 1.0f, float priority = 1.0f);
/// Gets the position of the right shoulder.
/// \return whether or not the right shoulder joint was found
bool getRightShoulderPosition(glm::vec3& position) const;
/// Returns the extended length from the right hand to its first free ancestor.
float getRightArmLength() const;
/// Returns the position of the head joint.
/// \return whether or not the head was found
bool getHeadPosition(glm::vec3& headPosition) const;
/// Returns the position of the neck joint.
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint's parent from default orientation
/// \return whether or not the neck was found
bool getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const;
/// Retrieve the positions of up to two eye meshes.
/// \return whether or not both eye meshes were found
bool getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
@ -92,10 +92,6 @@ public:
/// \return whether or not the head was found.
glm::vec3 getDefaultEyeModelPosition() const;
/// skeleton offset caused by moving feet
void updateStandingFoot();
const glm::vec3& getStandingOffset() const { return _standingOffset; }
void computeBoundingShape(const FBXGeometry& geometry);
void renderBoundingCollisionShapes(gpu::Batch& batch, float alpha);
float getBoundingCapsuleRadius() const { return _boundingCapsuleRadius; }
@ -119,27 +115,14 @@ protected:
/// \param jointIndex index of joint in model
/// \param position position of joint in model-frame
void applyHandPosition(int jointIndex, const glm::vec3& position);
void applyPalmData(int jointIndex, PalmData& palm);
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
void maybeUpdateLeanRotation(const JointState& parentState, int index);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, int index);
void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, int index);
void updateClusterMatrices();
void cauterizeHead();
void initHeadBones();
void invalidateHeadBones();
private:
void renderJointConstraints(gpu::Batch& batch, int jointIndex);
void renderOrientationDirections(gpu::Batch& batch, int jointIndex,
void renderOrientationDirections(gpu::Batch& batch, int jointIndex,
glm::vec3 position, const glm::quat& orientation, float size);
struct OrientationLineIDs {
int _up;
int _front;
@ -154,7 +137,7 @@ private:
void setHandPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation);
bool getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const;
Avatar* _owningAvatar;
glm::vec3 _boundingCapsuleLocalOffset;
@ -162,9 +145,6 @@ private:
float _boundingCapsuleHeight;
glm::vec3 _defaultEyeModelPosition;
int _standingFoot;
glm::vec3 _standingOffset;
glm::vec3 _clampedFootPosition;
float _headClipDistance; // Near clip distance to use if no separate head model
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,244 @@
// 3DConnexion.h
// hifi
//
// Created by Marcel Verhagen on 09-06-15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ConnexionClient_h
#define hifi_ConnexionClient_h
#include <qobject.h>
#include <qlibrary.h>
#include "InterfaceLogging.h"
#include "Application.h"
#include "ui/UserInputMapper.h"
#ifndef HAVE_CONNEXIONCLIENT
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static void init() {};
static void destroy() {};
static bool Is3dmouseAttached() { return false; };
public slots:
void toggleConnexion(bool shouldEnable) {};
};
#endif // NOT_HAVE_CONNEXIONCLIENT
#ifdef HAVE_CONNEXIONCLIENT
// the windows connexion rawinput
#ifdef _WIN32
#include "I3dMouseParams.h"
#include <QAbstractNativeEventFilter>
#include <QAbstractEventDispatcher>
#include <Winsock2.h>
#include <windows.h>
// windows rawinput parameters
class MouseParameters : public I3dMouseParam {
public:
MouseParameters();
~MouseParameters();
// I3dmouseSensor interface
bool IsPanZoom() const;
bool IsRotate() const;
Speed GetSpeed() const;
void SetPanZoom(bool isPanZoom);
void SetRotate(bool isRotate);
void SetSpeed(Speed speed);
// I3dmouseNavigation interface
Navigation GetNavigationMode() const;
Pivot GetPivotMode() const;
PivotVisibility GetPivotVisibility() const;
bool IsLockHorizon() const;
void SetLockHorizon(bool bOn);
void SetNavigationMode(Navigation navigation);
void SetPivotMode(Pivot pivot);
void SetPivotVisibility(PivotVisibility visibility);
static bool Is3dmouseAttached();
private:
MouseParameters(const MouseParameters&);
const MouseParameters& operator = (const MouseParameters&);
Navigation fNavigation;
Pivot fPivot;
PivotVisibility fPivotVisibility;
bool fIsLockHorizon;
bool fIsPanZoom;
bool fIsRotate;
Speed fSpeed;
};
class ConnexionClient : public QObject, public QAbstractNativeEventFilter {
Q_OBJECT
public:
ConnexionClient();
~ConnexionClient();
static ConnexionClient& getInstance();
ConnexionClient* client;
static void init();
static void destroy();
static bool Is3dmouseAttached();
I3dMouseParam& MouseParams();
const I3dMouseParam& MouseParams() const;
virtual void Move3d(HANDLE device, std::vector<float>& motionData);
virtual void On3dmouseKeyDown(HANDLE device, int virtualKeyCode);
virtual void On3dmouseKeyUp(HANDLE device, int virtualKeyCode);
virtual bool nativeEventFilter(const QByteArray& eventType, void* message, long* result) Q_DECL_OVERRIDE
{
MSG* msg = static_cast< MSG * >(message);
return ConnexionClient::RawInputEventFilter(message, result);
}
public slots:
void toggleConnexion(bool shouldEnable);
signals:
void Move3d(std::vector<float>& motionData);
void On3dmouseKeyDown(int virtualKeyCode);
void On3dmouseKeyUp(int virtualKeyCode);
private:
bool InitializeRawInput(HWND hwndTarget);
static bool RawInputEventFilter(void* msg, long* result);
void OnRawInput(UINT nInputCode, HRAWINPUT hRawInput);
UINT GetRawInputBuffer(PRAWINPUT pData, PUINT pcbSize, UINT cbSizeHeader);
bool TranslateRawInputData(UINT nInputCode, PRAWINPUT pRawInput);
void On3dmouseInput();
class TInputData {
public:
TInputData() : fAxes(6) {}
bool IsZero() {
return (0.0f == fAxes[0] && 0.0f == fAxes[1] && 0.0f == fAxes[2] &&
0.0f == fAxes[3] && 0.0f == fAxes[4] && 0.0f == fAxes[5]);
}
int fTimeToLive; // For telling if the device was unplugged while sending data
bool fIsDirty;
std::vector<float> fAxes;
};
HWND fWindow;
// Data cache to handle multiple rawinput devices
std::map< HANDLE, TInputData> fDevice2Data;
std::map< HANDLE, unsigned long> fDevice2Keystate;
// 3dmouse parameters
MouseParameters f3dMouseParams; // Rotate, Pan Zoom etc.
// use to calculate distance traveled since last event
DWORD fLast3dmouseInputTime;
};
// the osx connexion api
#else
#include <glm/glm.hpp>
#include "3DconnexionClient/ConnexionClientAPI.h"
class ConnexionClient : public QObject {
Q_OBJECT
public:
static ConnexionClient& getInstance();
static bool Is3dmouseAttached();
static void init();
static void destroy();
public slots:
void toggleConnexion(bool shouldEnable);
};
#endif // __APPLE__
#endif // HAVE_CONNEXIONCLIENT
// connnects to the userinputmapper
class ConnexionData : public QObject {
Q_OBJECT
public:
static ConnexionData& getInstance();
ConnexionData();
enum PositionChannel {
POSITION_AXIS_X_POS = 1,
POSITION_AXIS_X_NEG = 2,
POSITION_AXIS_Y_POS = 3,
POSITION_AXIS_Y_NEG = 4,
POSITION_AXIS_Z_POS = 5,
POSITION_AXIS_Z_NEG = 6,
ROTATION_AXIS_X_POS = 7,
ROTATION_AXIS_X_NEG = 8,
ROTATION_AXIS_Y_POS = 9,
ROTATION_AXIS_Y_NEG = 10,
ROTATION_AXIS_Z_POS = 11,
ROTATION_AXIS_Z_NEG = 12
};
enum ButtonChannel {
BUTTON_1 = 1,
BUTTON_2 = 2,
BUTTON_3 = 3
};
typedef std::unordered_set<int> ButtonPressedMap;
typedef std::map<int, float> AxisStateMap;
float getButton(int channel) const;
float getAxis(int channel) const;
UserInputMapper::Input makeInput(ConnexionData::PositionChannel axis);
UserInputMapper::Input makeInput(ConnexionData::ButtonChannel button);
void registerToUserInputMapper(UserInputMapper& mapper);
void assignDefaultInputMapping(UserInputMapper& mapper);
void update();
void focusOutEvent();
int getDeviceID() { return _deviceID; }
void setDeviceID(int deviceID) { _deviceID = deviceID; }
QString _name;
glm::vec3 cc_position;
glm::vec3 cc_rotation;
int clientId;
void setButton(int lastButtonState);
void handleAxisEvent();
protected:
int _deviceID = 0;
ButtonPressedMap _buttonPressedMap;
AxisStateMap _axisStateMap;
};
#endif // defined(hifi_ConnexionClient_h)

View file

@ -11,16 +11,16 @@
//
#include "OculusManager.h"
#include <gpu/GPUConfig.h>
#include <glm/glm.hpp>
#include <QDesktopWidget>
#include <QGuiApplication>
#include <gpu/GPUConfig.h>
#include <QScreen>
#include <CursorManager.h>
#include <QOpenGLTimerQuery>
#include <QGLWidget>
#include <CursorManager.h>
#include <glm/glm.hpp>
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>

View file

@ -9,13 +9,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "TV3DManager.h"
#include <glm/glm.hpp>
#include <glm/gtc/type_ptr.hpp>
#include "gpu/GLBackend.h"
#include "Application.h"
#include <RenderArgs.h>
#include "TV3DManager.h"
#include "Application.h"
#include "Menu.h"
int TV3DManager::_screenWidth = 1;
@ -63,6 +64,7 @@ void TV3DManager::setFrustum(Camera& whichCamera) {
}
void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int screenHeight) {
#ifdef THIS_CURRENTLY_BROKEN_WAITING_FOR_DISPLAY_PLUGINS
if (screenHeight == 0) {
screenHeight = 1; // prevent divide by 0
}
@ -72,6 +74,7 @@ void TV3DManager::configureCamera(Camera& whichCamera, int screenWidth, int scre
setFrustum(whichCamera);
glViewport (0, 0, _screenWidth, _screenHeight); // sets drawing viewport
#endif
}
void TV3DManager::display(RenderArgs* renderArgs, Camera& whichCamera) {

View file

@ -17,6 +17,7 @@
#include <glm/glm.hpp>
class Camera;
class RenderArgs;
struct eyeFrustum {
double left;

View file

@ -117,7 +117,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
batch.setProjectionTransform(mat4());
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
batch._glBindTexture(GL_TEXTURE_2D, _uiTexture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _uiTexture);
geometryCache->renderUnitQuad(batch, glm::vec4(1));
}

View file

@ -62,7 +62,7 @@ void AnimationReader::run() {
QSharedPointer<Resource> animation = _animation.toStrongRef();
if (!animation.isNull()) {
QMetaObject::invokeMethod(animation.data(), "setGeometry",
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash())));
Q_ARG(const FBXGeometry&, readFBX(_reply->readAll(), QVariantHash(), _reply->property("url").toString())));
}
_reply->deleteLater();
}

View file

@ -176,7 +176,9 @@ void AnimationHandle::applyFrame(float frameIndex) {
safeMix(floorFrame.rotations.at(i),
ceilFrame.rotations.at(i),
frameFraction),
_priority);
_priority,
false,
_mix);
}
}
}

View file

@ -63,6 +63,7 @@ public:
void setPriority(float priority);
float getPriority() const { return _priority; }
void setMix(float mix) { _mix = mix; }
void setMaskedJoints(const QStringList& maskedJoints);
const QStringList& getMaskedJoints() const { return _maskedJoints; }
@ -119,6 +120,7 @@ private:
QString _role;
QUrl _url;
float _priority;
float _mix;
QStringList _maskedJoints;
QVector<int> _jointMappings;

View file

@ -23,24 +23,7 @@ void AvatarRig::updateJointState(int index, glm::mat4 parentTransform) {
int parentIndex = joint.parentIndex;
if (parentIndex == -1) {
state.computeTransform(parentTransform);
} else {
// guard against out-of-bounds access to _jointStates
if (joint.parentIndex >= 0 && joint.parentIndex < _jointStates.size()) {
const JointState& parentState = _jointStates.at(parentIndex);
state.computeTransform(parentState.getTransform(), parentState.getTransformChanged());
}
}
}
void AvatarRig::updateFaceJointState(int index, glm::mat4 parentTransform) {
JointState& state = _jointStates[index];
const FBXJoint& joint = state.getFBXJoint();
// compute model transforms
int parentIndex = joint.parentIndex;
if (parentIndex == -1) {
state.computeTransform(parentTransform);
clearJointTransformTranslation(index);
} else {
// guard against out-of-bounds access to _jointStates
if (joint.parentIndex >= 0 && joint.parentIndex < _jointStates.size()) {

View file

@ -22,7 +22,6 @@ class AvatarRig : public Rig {
public:
~AvatarRig() {}
virtual void updateJointState(int index, glm::mat4 parentTransform);
virtual void updateFaceJointState(int index, glm::mat4 parentTransform);
};
#endif // hifi_AvatarRig_h

View file

@ -22,7 +22,6 @@ class EntityRig : public Rig {
public:
~EntityRig() {}
virtual void updateJointState(int index, glm::mat4 parentTransform);
virtual void updateFaceJointState(int index, glm::mat4 parentTransform) { }
};
#endif // hifi_EntityRig_h

View file

@ -232,12 +232,13 @@ glm::quat JointState::computeVisibleParentRotation() const {
return _visibleRotation * glm::inverse(_fbxJoint->preRotation * _visibleRotationInConstrainedFrame * _fbxJoint->postRotation);
}
void JointState::setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain) {
void JointState::setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain, float mix) {
if (priority >= _animationPriority || _animationPriority == 0.0f) {
if (constrain && _constraint) {
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInConstrainedFrameInternal(targetRotation);
auto rotation = (mix == 1.0f) ? targetRotation : safeMix(getRotationInConstrainedFrame(), targetRotation, mix);
setRotationInConstrainedFrameInternal(rotation);
_animationPriority = priority;
}
}

View file

@ -84,7 +84,7 @@ public:
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationInBindFrame(const glm::quat& rotation, float priority, bool constrain = false);
void setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain = false);
void setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain = false, float mix = 1.0f);
void setVisibleRotationInConstrainedFrame(const glm::quat& targetRotation);
const glm::quat& getRotationInConstrainedFrame() const { return _rotationInConstrainedFrame; }
const glm::quat& getVisibleRotationInConstrainedFrame() const { return _visibleRotationInConstrainedFrame; }

View file

@ -16,6 +16,25 @@
#include "AnimationLogging.h"
#include "Rig.h"
void Rig::HeadParameters::dump() const {
qCDebug(animation, "HeadParameters =");
qCDebug(animation, " leanSideways = %0.5f", leanSideways);
qCDebug(animation, " leanForward = %0.5f", leanForward);
qCDebug(animation, " torsoTwist = %0.5f", torsoTwist);
glm::vec3 axis = glm::axis(localHeadOrientation);
float theta = glm::angle(localHeadOrientation);
qCDebug(animation, " localHeadOrientation axis = (%.5f, %.5f, %.5f), theta = %0.5f", axis.x, axis.y, axis.z, theta);
axis = glm::axis(worldHeadOrientation);
theta = glm::angle(worldHeadOrientation);
qCDebug(animation, " worldHeadOrientation axis = (%.5f, %.5f, %.5f), theta = %0.5f", axis.x, axis.y, axis.z, theta);
qCDebug(animation, " eyeLookAt = (%.5f, %.5f, %.5f)", eyeLookAt.x, eyeLookAt.y, eyeLookAt.z);
qCDebug(animation, " eyeSaccade = (%.5f, %.5f, %.5f)", eyeSaccade.x, eyeSaccade.y, eyeSaccade.z);
qCDebug(animation, " leanJointIndex = %.d", leanJointIndex);
qCDebug(animation, " neckJointIndex = %.d", neckJointIndex);
qCDebug(animation, " leftEyeJointIndex = %.d", leftEyeJointIndex);
qCDebug(animation, " rightEyeJointIndex = %.d", rightEyeJointIndex);
}
void insertSorted(QList<AnimationHandlePointer>& handles, const AnimationHandlePointer& handle) {
for (QList<AnimationHandlePointer>::iterator it = handles.begin(); it != handles.end(); it++) {
if (handle->getPriority() > (*it)->getPriority()) {
@ -124,9 +143,8 @@ void Rig::deleteAnimations() {
_animationHandles.clear();
}
float Rig::initJointStates(QVector<JointState> states, glm::mat4 parentTransform, int neckJointIndex) {
float Rig::initJointStates(QVector<JointState> states, glm::mat4 parentTransform) {
_jointStates = states;
_neckJointIndex = neckJointIndex;
initJointTransforms(parentTransform);
int numStates = _jointStates.size();
@ -142,8 +160,6 @@ float Rig::initJointStates(QVector<JointState> states, glm::mat4 parentTransform
_jointStates[i].slaveVisibleTransform();
}
initHeadBones();
return radius;
}
@ -195,8 +211,7 @@ JointState Rig::getJointState(int jointIndex) const {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return JointState();
}
// return _jointStates[jointIndex];
return maybeCauterizeHead(jointIndex);
return _jointStates[jointIndex];
}
bool Rig::getJointStateRotation(int index, glm::quat& rotation) const {
@ -270,8 +285,7 @@ bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position,
return false;
}
// position is in world-frame
// position = translation + rotation * _jointStates[jointIndex].getPosition();
position = translation + rotation * maybeCauterizeHead(jointIndex).getPosition();
position = translation + rotation * _jointStates[jointIndex].getPosition();
return true;
}
@ -280,7 +294,7 @@ bool Rig::getJointPosition(int jointIndex, glm::vec3& position) const {
return false;
}
// position is in model-frame
position = extractTranslation(maybeCauterizeHead(jointIndex).getTransform());
position = extractTranslation(_jointStates[jointIndex].getTransform());
return true;
}
@ -288,7 +302,7 @@ bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return false;
}
result = rotation * maybeCauterizeHead(jointIndex).getRotation();
result = rotation * _jointStates[jointIndex].getRotation();
return true;
}
@ -296,7 +310,7 @@ bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return false;
}
rotation = maybeCauterizeHead(jointIndex).getRotation();
rotation = _jointStates[jointIndex].getRotation();
return true;
}
@ -304,7 +318,7 @@ bool Rig::getJointCombinedRotation(int jointIndex, glm::quat& result, const glm:
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return false;
}
result = rotation * maybeCauterizeHead(jointIndex).getRotation();
result = rotation * _jointStates[jointIndex].getRotation();
return true;
}
@ -315,7 +329,7 @@ bool Rig::getVisibleJointPositionInWorldFrame(int jointIndex, glm::vec3& positio
return false;
}
// position is in world-frame
position = translation + rotation * maybeCauterizeHead(jointIndex).getVisiblePosition();
position = translation + rotation * _jointStates[jointIndex].getVisiblePosition();
return true;
}
@ -323,7 +337,7 @@ bool Rig::getVisibleJointRotationInWorldFrame(int jointIndex, glm::quat& result,
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return false;
}
result = rotation * maybeCauterizeHead(jointIndex).getVisibleRotation();
result = rotation * _jointStates[jointIndex].getVisibleRotation();
return true;
}
@ -331,24 +345,23 @@ glm::mat4 Rig::getJointTransform(int jointIndex) const {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return glm::mat4();
}
return maybeCauterizeHead(jointIndex).getTransform();
return _jointStates[jointIndex].getTransform();
}
glm::mat4 Rig::getJointVisibleTransform(int jointIndex) const {
if (jointIndex == -1 || jointIndex >= _jointStates.size()) {
return glm::mat4();
}
return maybeCauterizeHead(jointIndex).getVisibleTransform();
return _jointStates[jointIndex].getVisibleTransform();
}
void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPosition, const glm::vec3& worldVelocity, const glm::quat& worldRotation) {
if (_enableRig) {
glm::vec3 front = worldRotation * IDENTITY_FRONT;
float forwardSpeed = glm::dot(worldVelocity, front);
float rotationalSpeed = glm::angle(front, _lastFront) / deltaTime;
bool isWalking = std::abs(forwardSpeed) > 0.01;
bool isTurning = std::abs(rotationalSpeed) > 0.5;
bool isWalking = std::abs(forwardSpeed) > 0.01f;
bool isTurning = std::abs(rotationalSpeed) > 0.5f;
// Crude, until we have blending:
isTurning = isTurning && !isWalking; // Only one of walk/turn, walk wins.
@ -367,7 +380,7 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
startAnimationByRole(newRole);
qCDebug(animation) << deltaTime << ":" << worldVelocity << "." << front << "=> " << forwardSpeed << newRole;
}
_lastPosition = worldPosition;
_lastFront = front;
_isWalking = isWalking;
@ -377,7 +390,10 @@ void Rig::computeMotionAnimationState(float deltaTime, const glm::vec3& worldPos
}
void Rig::updateAnimations(float deltaTime, glm::mat4 parentTransform) {
int nAnimationsSoFar = 0;
foreach (const AnimationHandlePointer& handle, _runningAnimations) {
handle->setMix(1.0f / ++nAnimationsSoFar);
handle->setPriority(1.0);
handle->simulate(deltaTime);
}
@ -624,16 +640,16 @@ glm::vec3 Rig::getJointDefaultTranslationInConstrainedFrame(int jointIndex) {
if (jointIndex == -1 || _jointStates.isEmpty()) {
return glm::vec3();
}
return maybeCauterizeHead(jointIndex).getDefaultTranslationInConstrainedFrame();
return _jointStates[jointIndex].getDefaultTranslationInConstrainedFrame();
}
glm::quat Rig::setJointRotationInConstrainedFrame(int jointIndex, glm::quat targetRotation, float priority, bool constrain) {
glm::quat Rig::setJointRotationInConstrainedFrame(int jointIndex, glm::quat targetRotation, float priority, bool constrain, float mix) {
glm::quat endRotation;
if (jointIndex == -1 || _jointStates.isEmpty()) {
return endRotation;
}
JointState& state = _jointStates[jointIndex];
state.setRotationInConstrainedFrame(targetRotation, priority, constrain);
state.setRotationInConstrainedFrame(targetRotation, priority, constrain, mix);
endRotation = state.getRotationInConstrainedFrame();
return endRotation;
}
@ -669,53 +685,70 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
if (jointIndex == -1 || _jointStates.isEmpty()) {
return glm::quat();
}
return maybeCauterizeHead(jointIndex).getDefaultRotationInParentFrame();
return _jointStates[jointIndex].getDefaultRotationInParentFrame();
}
void Rig::initHeadBones() {
if (_neckJointIndex == -1) {
return;
}
_headBones.clear();
std::queue<int> q;
q.push(_neckJointIndex);
_headBones.push_back(_neckJointIndex);
void Rig::updateFromHeadParameters(const HeadParameters& params) {
updateLeanJoint(params.leanJointIndex, params.leanSideways, params.leanForward, params.torsoTwist);
updateNeckJoint(params.neckJointIndex, params.localHeadOrientation, params.leanSideways, params.leanForward, params.torsoTwist);
updateEyeJoint(params.leftEyeJointIndex, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
updateEyeJoint(params.rightEyeJointIndex, params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
}
// fbxJoints only hold links to parents not children, so we have to do a bit of extra work here.
while (q.size() > 0) {
int jointIndex = q.front();
for (int i = 0; i < _jointStates.size(); i++) {
const FBXJoint& fbxJoint = _jointStates[i].getFBXJoint();
if (jointIndex == fbxJoint.parentIndex) {
_headBones.push_back(i);
q.push(i);
}
}
q.pop();
void Rig::updateLeanJoint(int index, float leanSideways, float leanForward, float torsoTwist) {
if (index >= 0 && _jointStates[index].getParentIndex() >= 0) {
auto& parentState = _jointStates[_jointStates[index].getParentIndex()];
// get the rotation axes in joint space and use them to adjust the rotation
glm::vec3 xAxis(1.0f, 0.0f, 0.0f);
glm::vec3 yAxis(0.0f, 1.0f, 0.0f);
glm::vec3 zAxis(0.0f, 0.0f, 1.0f);
glm::quat inverse = glm::inverse(parentState.getRotation() * getJointDefaultRotationInParentFrame(index));
setJointRotationInConstrainedFrame(index,
glm::angleAxis(- RADIANS_PER_DEGREE * leanSideways, inverse * zAxis) *
glm::angleAxis(- RADIANS_PER_DEGREE * leanForward, inverse * xAxis) *
glm::angleAxis(RADIANS_PER_DEGREE * torsoTwist, inverse * yAxis) *
getJointState(index).getFBXJoint().rotation, DEFAULT_PRIORITY);
}
}
JointState Rig::maybeCauterizeHead(int jointIndex) const {
// if (_headBones.contains(jointIndex)) {
// XXX fix this... make _headBones a hash? add a flag to JointState?
if (_neckJointIndex != -1 &&
_isFirstPerson &&
std::find(_headBones.begin(), _headBones.end(), jointIndex) != _headBones.end()) {
glm::vec4 trans = _jointStates[jointIndex].getTransform()[3];
glm::vec4 zero(0, 0, 0, 0);
glm::mat4 newXform(zero, zero, zero, trans);
JointState jointStateCopy = _jointStates[jointIndex];
jointStateCopy.setTransform(newXform);
jointStateCopy.setVisibleTransform(newXform);
return jointStateCopy;
} else {
return _jointStates[jointIndex];
void Rig::updateNeckJoint(int index, const glm::quat& localHeadOrientation, float leanSideways, float leanForward, float torsoTwist) {
if (index >= 0 && _jointStates[index].getParentIndex() >= 0) {
auto& parentState = _jointStates[_jointStates[index].getParentIndex()];
auto joint = _jointStates[index].getFBXJoint();
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() *
glm::translate(getJointDefaultTranslationInConstrainedFrame(index)) *
joint.preTransform * glm::mat4_cast(joint.preRotation)));
glm::vec3 pitchYawRoll = safeEulerAngles(localHeadOrientation);
glm::vec3 lean = glm::radians(glm::vec3(leanForward, torsoTwist, leanSideways));
pitchYawRoll -= lean;
setJointRotationInConstrainedFrame(index,
glm::angleAxis(-pitchYawRoll.z, glm::normalize(inverse * axes[2])) *
glm::angleAxis(pitchYawRoll.y, glm::normalize(inverse * axes[1])) *
glm::angleAxis(-pitchYawRoll.x, glm::normalize(inverse * axes[0])) *
joint.rotation, DEFAULT_PRIORITY);
}
}
void Rig::setFirstPerson(bool isFirstPerson) {
if (_isFirstPerson != isFirstPerson) {
_isFirstPerson = isFirstPerson;
_jointsAreDirty = true;
void Rig::updateEyeJoint(int index, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade) {
if (index >= 0 && _jointStates[index].getParentIndex() >= 0) {
auto& parentState = _jointStates[_jointStates[index].getParentIndex()];
auto joint = _jointStates[index].getFBXJoint();
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
glm::mat4 inverse = glm::inverse(parentState.getTransform() *
glm::translate(getJointDefaultTranslationInConstrainedFrame(index)) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(worldHeadOrientation * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAtDelta = lookAt;
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(lookAtDelta + glm::length(lookAtDelta) * saccade, 1.0f));
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
float angle = glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE);
glm::quat rot = glm::angleAxis(angle, glm::axis(between));
setJointRotationInConstrainedFrame(index, rot * joint.rotation, DEFAULT_PRIORITY);
}
}

View file

@ -47,11 +47,26 @@ typedef std::shared_ptr<AnimationHandle> AnimationHandlePointer;
class Rig;
typedef std::shared_ptr<Rig> RigPointer;
class Rig : public QObject, public std::enable_shared_from_this<Rig> {
public:
struct HeadParameters {
float leanSideways = 0.0f; // degrees
float leanForward = 0.0f; // degrees
float torsoTwist = 0.0f; // degrees
glm::quat localHeadOrientation = glm::quat();
glm::quat worldHeadOrientation = glm::quat();
glm::vec3 eyeLookAt = glm::vec3(); // world space
glm::vec3 eyeSaccade = glm::vec3(); // world space
int leanJointIndex = -1;
int neckJointIndex = -1;
int leftEyeJointIndex = -1;
int rightEyeJointIndex = -1;
void dump() const;
};
virtual ~Rig() {}
RigPointer getRigPointer() { return shared_from_this(); }
@ -75,7 +90,7 @@ public:
float priority = 1.0f, bool loop = false, bool hold = false, float firstFrame = 0.0f,
float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList(), bool startAutomatically = false);
float initJointStates(QVector<JointState> states, glm::mat4 parentTransform, int neckJointIndex);
float initJointStates(QVector<JointState> states, glm::mat4 parentTransform);
bool jointStatesEmpty() { return _jointStates.isEmpty(); };
int getJointStateCount() const { return _jointStates.size(); }
int indexOfJoint(const QString& jointName) ;
@ -124,32 +139,27 @@ public:
glm::quat setJointRotationInBindFrame(int jointIndex, const glm::quat& rotation, float priority, bool constrain = false);
glm::vec3 getJointDefaultTranslationInConstrainedFrame(int jointIndex);
glm::quat setJointRotationInConstrainedFrame(int jointIndex, glm::quat targetRotation,
float priority, bool constrain = false);
float priority, bool constrain = false, float mix = 1.0f);
glm::quat getJointDefaultRotationInParentFrame(int jointIndex);
void updateVisibleJointStates();
virtual void updateJointState(int index, glm::mat4 parentTransform) = 0;
virtual void updateFaceJointState(int index, glm::mat4 parentTransform) = 0;
virtual void setFirstPerson(bool isFirstPerson);
virtual bool getIsFirstPerson() const { return _isFirstPerson; }
bool getJointsAreDirty() { return _jointsAreDirty; }
void setEnableRig(bool isEnabled) { _enableRig = isEnabled; }
void updateFromHeadParameters(const HeadParameters& params);
protected:
void updateLeanJoint(int index, float leanSideways, float leanForward, float torsoTwist);
void updateNeckJoint(int index, const glm::quat& localHeadOrientation, float leanSideways, float leanForward, float torsoTwist);
void updateEyeJoint(int index, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
QVector<JointState> _jointStates;
QList<AnimationHandlePointer> _animationHandles;
QList<AnimationHandlePointer> _runningAnimations;
JointState maybeCauterizeHead(int jointIndex) const;
void initHeadBones();
bool _isFirstPerson = false;
std::vector<int> _headBones;
bool _jointsAreDirty = false;
int _neckJointIndex = -1;
bool _enableRig;
bool _isWalking;
bool _isTurning;

View file

@ -35,7 +35,7 @@
#include <PolyVoxCore/Material.h>
#include "model/Geometry.h"
#include "gpu/GLBackend.h"
#include "gpu/Context.h"
#include "EntityTreeRenderer.h"
#include "RenderablePolyVoxEntityItem.h"

View file

@ -8,7 +8,6 @@
#include "RenderableWebEntityItem.h"
#include <gpu/GPUConfig.h>
#include <QMouseEvent>
#include <QQuickItem>
#include <QQuickWindow>
@ -24,7 +23,7 @@
#include <GLMHelpers.h>
#include <PathUtils.h>
#include <TextureCache.h>
#include <gpu/GLBackend.h>
#include <gpu/Context.h>
#include "EntityTreeRenderer.h"
@ -178,8 +177,7 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
batch.setModelTransform(getTransformToCenter());
bool textured = false, culled = false, emissive = false;
if (_texture) {
batch._glActiveTexture(GL_TEXTURE0);
batch._glBindTexture(GL_TEXTURE_2D, _texture);
batch._glActiveBindTexture(GL_TEXTURE0, GL_TEXTURE_2D, _texture);
textured = emissive = true;
}

View file

@ -155,9 +155,9 @@ void ParticleEffectEntityItem::computeAndUpdateDimensions() {
float yMin = std::min(yApex, yEnd);
// times 2 because dimensions are diameters not radii.
glm::vec3 dims(2.0f * std::max(fabs(xMin), fabs(xMax)),
2.0f * std::max(fabs(yMin), fabs(yMax)),
2.0f * std::max(fabs(zMin), fabs(zMax)));
glm::vec3 dims(2.0f * std::max(fabsf(xMin), fabsf(xMax)),
2.0f * std::max(fabsf(yMin), fabsf(yMax)),
2.0f * std::max(fabsf(zMin), fabsf(zMax)));
EntityItem::setDimensions(dims);
}

View file

@ -17,6 +17,7 @@
#include <QTextStream>
#include <QtDebug>
#include <QtEndian>
#include <QFileInfo>
#include <glm/gtc/quaternion.hpp>
#include <glm/gtx/quaternion.hpp>
@ -1452,9 +1453,21 @@ void buildModelMesh(ExtractedMesh& extracted) {
}
#endif // USE_MODEL_MESH
QByteArray fileOnUrl(const QByteArray& filenameString, const QString& url) {
QString path = QFileInfo(url).path();
QByteArray filename = filenameString;
QFileInfo checkFile(path + "/" + filename.replace('\\', '/'));
//check if the file exists at the RelativeFileName
if (checkFile.exists() && checkFile.isFile()) {
filename = filename.replace('\\', '/');
} else {
// there is no texture at the fbx dir with the filename added. Assume it is in the fbx dir.
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
}
return filename;
}
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QHash<QString, ExtractedMesh> meshes;
QHash<QString, QString> modelIDsToNames;
QHash<QString, int> meshIDsToMeshIndices;
@ -1779,9 +1792,8 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
TextureParam tex;
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
// trim off any path information
QByteArray filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
textureFilenames.insert(getID(object.properties), filename);
} else if (subobject.name == "TextureName") {
// trim the name from the timestamp
@ -1855,7 +1867,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
foreach (const FBXNode& subobject, object.children) {
if (subobject.name == "RelativeFilename") {
filename = subobject.properties.at(0).toByteArray();
filename = filename.mid(qMax(filename.lastIndexOf('\\'), filename.lastIndexOf('/')) + 1);
filename = fileOnUrl(filename, url);
} else if (subobject.name == "Content" && !subobject.properties.isEmpty()) {
content = subobject.properties.at(0).toByteArray();
@ -2686,12 +2698,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping,
return geometry;
}
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
QBuffer buffer(const_cast<QByteArray*>(&model));
buffer.open(QIODevice::ReadOnly);
return readFBX(&buffer, mapping, loadLightmaps, lightmapLevel);
return readFBX(&buffer, mapping, url, loadLightmaps, lightmapLevel);
}
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, loadLightmaps, lightmapLevel);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url, bool loadLightmaps, float lightmapLevel) {
return extractFBXGeometry(parseFBX(device), mapping, url, loadLightmaps, lightmapLevel);
}

View file

@ -267,10 +267,10 @@ Q_DECLARE_METATYPE(FBXGeometry)
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(const QByteArray& model, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
/// Reads FBX geometry from the supplied model and mapping data.
/// \exception QString if an error occurs in parsing
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, bool loadLightmaps = true, float lightmapLevel = 1.0f);
FBXGeometry readFBX(QIODevice* device, const QVariantHash& mapping, const QString& url = "", bool loadLightmaps = true, float lightmapLevel = 1.0f);
#endif // hifi_FBXReader_h

View file

@ -305,7 +305,8 @@ bool OBJReader::parseOBJGroup(OBJTokenizer& tokenizer, const QVariantHash& mappi
}
QByteArray token = tokenizer.getDatum();
//qCDebug(modelformat) << token;
if (token == "g") {
// we don't support separate objects in the same file, so treat "o" the same as "g".
if (token == "g" || token == "o") {
if (sawG) {
// we've encountered the beginning of the next group.
tokenizer.pushBackToken(OBJTokenizer::DATUM_TOKEN);

View file

@ -21,23 +21,26 @@ elseif (WIN32)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
# note that this will also enable NSIGHT profilers in all the projects linking gpu
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_include_directories(${TARGET_NAME} PUBLIC ${NSIGHT_INCLUDE_DIRS})
target_compile_definitions(${TARGET_NAME} PUBLIC NSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif ()
endif()
elseif (ANDROID)
target_link_libraries(${TARGET_NAME} "-lGLESv3" "-lEGL")
else ()
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLEW_INCLUDE_DIRS})
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif ()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
target_include_directories(${TARGET_NAME} PUBLIC ${OPENGL_INCLUDE_DIR})
target_link_libraries(${TARGET_NAME} "${GLEW_LIBRARIES}" "${OPENGL_LIBRARY}")
endif (APPLE)

View file

@ -8,13 +8,9 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <string.h>
#include "Batch.h"
#include "GPUConfig.h"
#include <QDebug>
#include <GLMHelpers.h>
#if defined(NSIGHT_FOUND)
#include "nvToolsExt.h"
@ -288,15 +284,7 @@ void Batch::getQuery(const QueryPointer& query) {
_params.push_back(_queries.cache(query));
}
void push_back(Batch::Params& params, const vec3& v) {
params.push_back(v.x);
params.push_back(v.y);
params.push_back(v.z);
void Batch::resetStages() {
ADD_COMMAND(resetStages);
}
void push_back(Batch::Params& params, const vec4& v) {
params.push_back(v.x);
params.push_back(v.y);
params.push_back(v.z);
params.push_back(v.a);
}

View file

@ -94,7 +94,7 @@ public:
void setResourceTexture(uint32 slot, const TexturePointer& view);
void setResourceTexture(uint32 slot, const TextureView& view); // not a command, just a shortcut from a TextureView
// Framebuffer Stage
// Ouput Stage
void setFramebuffer(const FramebufferPointer& framebuffer);
// Clear framebuffer layers
@ -113,34 +113,17 @@ public:
void endQuery(const QueryPointer& query);
void getQuery(const QueryPointer& query);
// Reset the stage caches and states
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
// For now, instead of calling the raw gl Call, use the equivalent call on the batch so the call is beeing recorded
// THe implementation of these functions is in GLBackend.cpp
void _glEnable(unsigned int cap);
void _glDisable(unsigned int cap);
void _glActiveBindTexture(unsigned int unit, unsigned int target, unsigned int texture);
void _glEnableClientState(unsigned int array);
void _glDisableClientState(unsigned int array);
void _glCullFace(unsigned int mode);
void _glAlphaFunc(unsigned int func, float ref);
void _glDepthFunc(unsigned int func);
void _glDepthMask(unsigned char flag);
void _glDepthRange(float zNear, float zFar);
void _glBindBuffer(unsigned int target, unsigned int buffer);
void _glBindTexture(unsigned int target, unsigned int texture);
void _glActiveTexture(unsigned int texture);
void _glTexParameteri(unsigned int target, unsigned int pname, int param);
void _glDrawBuffers(int n, const unsigned int* bufs);
void _glUseProgram(unsigned int program);
void _glUniform1i(int location, int v0);
void _glUniform1f(int location, float v0);
void _glUniform2f(int location, float v0, float v1);
@ -150,9 +133,6 @@ public:
void _glUniform4iv(int location, int count, const int* value);
void _glUniformMatrix4fv(int location, int count, unsigned char transpose, const float* value);
void _glEnableVertexAttribArray(int location);
void _glDisableVertexAttribArray(int location);
void _glColor4f(float red, float green, float blue, float alpha);
void _glLineWidth(float width);
@ -186,31 +166,13 @@ public:
COMMAND_endQuery,
COMMAND_getQuery,
COMMAND_resetStages,
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
COMMAND_glEnable,
COMMAND_glDisable,
COMMAND_glActiveBindTexture,
COMMAND_glEnableClientState,
COMMAND_glDisableClientState,
COMMAND_glCullFace,
COMMAND_glAlphaFunc,
COMMAND_glDepthFunc,
COMMAND_glDepthMask,
COMMAND_glDepthRange,
COMMAND_glBindBuffer,
COMMAND_glBindTexture,
COMMAND_glActiveTexture,
COMMAND_glTexParameteri,
COMMAND_glDrawBuffers,
COMMAND_glUseProgram,
COMMAND_glUniform1i,
COMMAND_glUniform1f,
COMMAND_glUniform2f,
@ -220,9 +182,6 @@ public:
COMMAND_glUniform4iv,
COMMAND_glUniformMatrix4fv,
COMMAND_glEnableVertexAttribArray,
COMMAND_glDisableVertexAttribArray,
COMMAND_glColor4f,
COMMAND_glLineWidth,

View file

@ -21,10 +21,9 @@
<@def VERSION_HEADER #version 120
#extension GL_EXT_gpu_shader4 : enable@>
<@else@>
<@def GPU_FEATURE_PROFILE GPU_LEGACY@>
<@def GPU_TRANSFORM_PROFILE GPU_LEGACY@>
<@def VERSION_HEADER #version 120
#extension GL_EXT_gpu_shader4 : enable@>
<@def GPU_FEATURE_PROFILE GPU_CORE@>
<@def GPU_TRANSFORM_PROFILE GPU_CORE@>
<@def VERSION_HEADER #version 430 compatibility@>
<@endif@>
<@endif@>

View file

@ -10,13 +10,16 @@
//
#include "Context.h"
// this include should disappear! as soon as the gpu::Context is in place
#include "GLBackend.h"
using namespace gpu;
Context::Context(Backend* backend) :
_backend(backend) {
Context::CreateBackend Context::_createBackendCallback = nullptr;
Context::MakeProgram Context::_makeProgramCallback = nullptr;
std::once_flag Context::_initialized;
Context::Context() {
if (_createBackendCallback) {
_backend.reset(_createBackendCallback());
}
}
Context::Context(const Context& context) {
@ -26,8 +29,8 @@ Context::~Context() {
}
bool Context::makeProgram(Shader& shader, const Shader::BindingSet& bindings) {
if (shader.isProgram()) {
return GLBackend::makeProgram(shader, bindings);
if (shader.isProgram() && _makeProgramCallback) {
return _makeProgramCallback(shader, bindings);
}
return false;
}
@ -45,3 +48,4 @@ void Context::syncCache() {
void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
}

View file

@ -12,6 +12,7 @@
#define hifi_gpu_Context_h
#include <assert.h>
#include <mutex>
#include "Batch.h"
@ -26,13 +27,12 @@ namespace gpu {
class Backend {
public:
virtual~ Backend() {};
virtual void render(Batch& batch) = 0;
virtual void syncCache() = 0;
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) = 0;
class TransformObject {
public:
Mat4 _model;
@ -118,7 +118,21 @@ protected:
class Context {
public:
Context(Backend* backend);
typedef Backend* (*CreateBackend)();
typedef bool (*MakeProgram)(Shader& shader, const Shader::BindingSet& bindings);
// This one call must happen before any context is created or used (Shader::MakeProgram) in order to setup the Backend and any singleton data needed
template <class T>
static void init() {
std::call_once(_initialized, [] {
_createBackendCallback = T::createBackend;
_makeProgramCallback = T::makeProgram;
T::init();
});
}
Context();
~Context();
void render(Batch& batch);
@ -132,13 +146,17 @@ public:
protected:
Context(const Context& context);
std::unique_ptr<Backend> _backend;
// This function can only be called by "static Shader::makeProgram()"
// makeProgramShader(...) make a program shader ready to be used in a Batch.
// It compiles the sub shaders, link them and defines the Slots and their bindings.
// If the shader passed is not a program, nothing happens.
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings);
std::unique_ptr<Backend> _backend;
static CreateBackend _createBackendCallback;
static MakeProgram _makeProgramCallback;
static std::once_flag _initialized;
friend class Shader;
};

View file

@ -8,9 +8,9 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <mutex>
#include "GPULogging.h"
#include "GLBackendShared.h"
#include <mutex>
#include <glm/gtc/type_ptr.hpp>
using namespace gpu;
@ -46,28 +46,10 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_endQuery),
(&::gpu::GLBackend::do_getQuery),
(&::gpu::GLBackend::do_glEnable),
(&::gpu::GLBackend::do_glDisable),
(&::gpu::GLBackend::do_resetStages),
(&::gpu::GLBackend::do_glEnableClientState),
(&::gpu::GLBackend::do_glDisableClientState),
(&::gpu::GLBackend::do_glActiveBindTexture),
(&::gpu::GLBackend::do_glCullFace),
(&::gpu::GLBackend::do_glAlphaFunc),
(&::gpu::GLBackend::do_glDepthFunc),
(&::gpu::GLBackend::do_glDepthMask),
(&::gpu::GLBackend::do_glDepthRange),
(&::gpu::GLBackend::do_glBindBuffer),
(&::gpu::GLBackend::do_glBindTexture),
(&::gpu::GLBackend::do_glActiveTexture),
(&::gpu::GLBackend::do_glTexParameteri),
(&::gpu::GLBackend::do_glDrawBuffers),
(&::gpu::GLBackend::do_glUseProgram),
(&::gpu::GLBackend::do_glUniform1i),
(&::gpu::GLBackend::do_glUniform1f),
(&::gpu::GLBackend::do_glUniform2f),
@ -77,19 +59,11 @@ GLBackend::CommandCall GLBackend::_commandCalls[Batch::NUM_COMMANDS] =
(&::gpu::GLBackend::do_glUniform4iv),
(&::gpu::GLBackend::do_glUniformMatrix4fv),
(&::gpu::GLBackend::do_glEnableVertexAttribArray),
(&::gpu::GLBackend::do_glDisableVertexAttribArray),
(&::gpu::GLBackend::do_glColor4f),
(&::gpu::GLBackend::do_glLineWidth),
};
GLBackend::GLBackend() :
_input(),
_transform(),
_pipeline(),
_output()
{
void GLBackend::init() {
static std::once_flag once;
std::call_once(once, [] {
qCDebug(gpulogging) << "GL Version: " << QString((const char*) glGetString(GL_VERSION));
@ -115,6 +89,13 @@ GLBackend::GLBackend() :
#endif
#if defined(Q_OS_LINUX)
GLenum err = glewInit();
if (GLEW_OK != err) {
/* Problem: glewInit failed, something is seriously wrong. */
qCDebug(gpulogging, "Error: %s\n", glewGetErrorString(err));
}
qCDebug(gpulogging, "Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
// TODO: Write the correct code for Linux...
/* if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
@ -122,12 +103,25 @@ GLBackend::GLBackend() :
}*/
#endif
});
}
Backend* GLBackend::createBackend() {
return new GLBackend();
}
GLBackend::GLBackend() :
_input(),
_transform(),
_pipeline(),
_output()
{
initInput();
initTransform();
}
GLBackend::~GLBackend() {
resetStages();
killInput();
killTransform();
}
@ -246,6 +240,22 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void GLBackend::do_resetStages(Batch& batch, uint32 paramOffset) {
resetStages();
}
void GLBackend::resetStages() {
resetInputStage();
resetPipelineStage();
resetTransformStage();
resetUniformStage();
resetResourceStage();
resetOutputStage();
resetQueryStage();
(void) CHECK_GL_ERROR();
}
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
@ -255,211 +265,24 @@ void GLBackend::do_drawIndexedInstanced(Batch& batch, uint32 paramOffset) {
//#define DO_IT_NOW(call, offset) runLastCommand();
#define DO_IT_NOW(call, offset)
void Batch::_glEnable(GLenum cap) {
ADD_COMMAND_GL(glEnable);
_params.push_back(cap);
DO_IT_NOW(_glEnable, 1);
}
void GLBackend::do_glEnable(Batch& batch, uint32 paramOffset) {
glEnable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisable(GLenum cap) {
ADD_COMMAND_GL(glDisable);
_params.push_back(cap);
DO_IT_NOW(_glDisable, 1);
}
void GLBackend::do_glDisable(Batch& batch, uint32 paramOffset) {
glDisable(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableClientState(GLenum array) {
ADD_COMMAND_GL(glEnableClientState);
_params.push_back(array);
DO_IT_NOW(_glEnableClientState, 1);
}
void GLBackend::do_glEnableClientState(Batch& batch, uint32 paramOffset) {
glEnableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableClientState(GLenum array) {
ADD_COMMAND_GL(glDisableClientState);
_params.push_back(array);
DO_IT_NOW(_glDisableClientState, 1);
}
void GLBackend::do_glDisableClientState(Batch& batch, uint32 paramOffset) {
glDisableClientState(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glCullFace(GLenum mode) {
ADD_COMMAND_GL(glCullFace);
_params.push_back(mode);
DO_IT_NOW(_glCullFace, 1);
}
void GLBackend::do_glCullFace(Batch& batch, uint32 paramOffset) {
glCullFace(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glAlphaFunc(GLenum func, GLclampf ref) {
ADD_COMMAND_GL(glAlphaFunc);
_params.push_back(ref);
_params.push_back(func);
DO_IT_NOW(_glAlphaFunc, 2);
}
void GLBackend::do_glAlphaFunc(Batch& batch, uint32 paramOffset) {
glAlphaFunc(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthFunc(GLenum func) {
ADD_COMMAND_GL(glDepthFunc);
_params.push_back(func);
DO_IT_NOW(_glDepthFunc, 1);
}
void GLBackend::do_glDepthFunc(Batch& batch, uint32 paramOffset) {
glDepthFunc(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthMask(GLboolean flag) {
ADD_COMMAND_GL(glDepthMask);
_params.push_back(flag);
DO_IT_NOW(_glDepthMask, 1);
}
void GLBackend::do_glDepthMask(Batch& batch, uint32 paramOffset) {
glDepthMask(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDepthRange(GLfloat zNear, GLfloat zFar) {
ADD_COMMAND_GL(glDepthRange);
_params.push_back(zFar);
_params.push_back(zNear);
DO_IT_NOW(_glDepthRange, 2);
}
void GLBackend::do_glDepthRange(Batch& batch, uint32 paramOffset) {
glDepthRange(
batch._params[paramOffset + 1]._float,
batch._params[paramOffset + 0]._float);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindBuffer(GLenum target, GLuint buffer) {
ADD_COMMAND_GL(glBindBuffer);
_params.push_back(buffer);
_params.push_back(target);
DO_IT_NOW(_glBindBuffer, 2);
}
void GLBackend::do_glBindBuffer(Batch& batch, uint32 paramOffset) {
glBindBuffer(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glBindTexture(GLenum target, GLuint texture) {
ADD_COMMAND_GL(glBindTexture);
void Batch::_glActiveBindTexture(GLenum unit, GLenum target, GLuint texture) {
ADD_COMMAND_GL(glActiveBindTexture);
_params.push_back(texture);
_params.push_back(target);
_params.push_back(unit);
DO_IT_NOW(_glBindTexture, 2);
DO_IT_NOW(_glActiveBindTexture, 3);
}
void GLBackend::do_glBindTexture(Batch& batch, uint32 paramOffset) {
void GLBackend::do_glActiveBindTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset + 2]._uint);
glBindTexture(
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glActiveTexture(GLenum texture) {
ADD_COMMAND_GL(glActiveTexture);
_params.push_back(texture);
DO_IT_NOW(_glActiveTexture, 1);
}
void GLBackend::do_glActiveTexture(Batch& batch, uint32 paramOffset) {
glActiveTexture(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glTexParameteri(GLenum target, GLenum pname, GLint param) {
ADD_COMMAND_GL(glTexParameteri);
_params.push_back(param);
_params.push_back(pname);
_params.push_back(target);
DO_IT_NOW(glTexParameteri, 3);
}
void GLBackend::do_glTexParameteri(Batch& batch, uint32 paramOffset) {
glTexParameteri(batch._params[paramOffset + 2]._uint,
batch._params[paramOffset + 1]._uint,
batch._params[paramOffset + 0]._int);
(void) CHECK_GL_ERROR();
}
void Batch::_glDrawBuffers(GLsizei n, const GLenum* bufs) {
ADD_COMMAND_GL(glDrawBuffers);
_params.push_back(cacheData(n * sizeof(GLenum), bufs));
_params.push_back(n);
DO_IT_NOW(_glDrawBuffers, 2);
}
void GLBackend::do_glDrawBuffers(Batch& batch, uint32 paramOffset) {
glDrawBuffers(
batch._params[paramOffset + 1]._uint,
(const GLenum*)batch.editData(batch._params[paramOffset + 0]._uint));
(void) CHECK_GL_ERROR();
}
void Batch::_glUseProgram(GLuint program) {
ADD_COMMAND_GL(glUseProgram);
_params.push_back(program);
DO_IT_NOW(_glUseProgram, 1);
}
void GLBackend::do_glUseProgram(Batch& batch, uint32 paramOffset) {
_pipeline._program = batch._params[paramOffset]._uint;
// for this call we still want to execute the glUseProgram in the order of the glCOmmand to avoid any issue
_pipeline._invalidProgram = false;
glUseProgram(_pipeline._program);
(void) CHECK_GL_ERROR();
}
void Batch::_glUniform1i(GLint location, GLint v0) {
if (location < 0) {
return;
@ -660,30 +483,6 @@ void GLBackend::do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
}
void Batch::_glEnableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glEnableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glEnableVertexAttribArray, 1);
}
void GLBackend::do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glEnableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glDisableVertexAttribArray(GLint location) {
ADD_COMMAND_GL(glDisableVertexAttribArray);
_params.push_back(location);
DO_IT_NOW(_glDisableVertexAttribArray, 1);
}
void GLBackend::do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset) {
glDisableVertexAttribArray(batch._params[paramOffset]._uint);
(void) CHECK_GL_ERROR();
}
void Batch::_glColor4f(GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha) {
ADD_COMMAND_GL(glColor4f);

View file

@ -18,16 +18,21 @@
#include "GPUConfig.h"
#include "Context.h"
#include "Batch.h"
namespace gpu {
class GLBackend : public Backend {
public:
// Context Backend static interface required
friend class Context;
static void init();
static Backend* createBackend();
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings);
explicit GLBackend(bool syncCache);
GLBackend();
public:
virtual ~GLBackend();
virtual void render(Batch& batch);
@ -49,7 +54,6 @@ public:
static void checkGLStackStable(std::function<void()> f);
static bool makeProgram(Shader& shader, const Shader::BindingSet& bindings = Shader::BindingSet());
class GLBuffer : public GPUObject {
@ -91,9 +95,9 @@ public:
#if (GPU_TRANSFORM_PROFILE == GPU_CORE)
#else
GLuint _transformObject_model = -1;
GLuint _transformCamera_viewInverse = -1;
GLuint _transformCamera_viewport = -1;
GLint _transformObject_model = -1;
GLint _transformCamera_viewInverse = -1;
GLint _transformCamera_viewport = -1;
#endif
GLShader();
@ -195,8 +199,17 @@ public:
static const int MAX_NUM_ATTRIBUTES = Stream::NUM_INPUT_SLOTS;
static const int MAX_NUM_INPUT_BUFFERS = 16;
uint32 getNumInputBuffers() const { return _input._buffersState.size(); }
uint32 getNumInputBuffers() const { return _input._invalidBuffers.size(); }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_UNIFORM_BUFFERS = 12;
uint32 getMaxNumUniformBuffers() const { return MAX_NUM_UNIFORM_BUFFERS; }
// this is the maximum per shader stage on the low end apple
// TODO make it platform dependant at init time
static const int MAX_NUM_RESOURCE_TEXTURES = 16;
uint32 getMaxNumResourceTextures() const { return MAX_NUM_RESOURCE_TEXTURES; }
// The State setters called by the GLState::Commands when a new state is assigned
void do_setStateFillMode(int32 mode);
@ -250,12 +263,16 @@ protected:
void killInput();
void syncInputStateCache();
void updateInput();
void resetInputStage();
struct InputStageState {
bool _invalidFormat = true;
Stream::FormatPointer _format;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
BuffersState _buffersState;
BuffersState _invalidBuffers;
Buffers _buffers;
Offsets _bufferOffsets;
@ -266,23 +283,20 @@ protected:
Offset _indexBufferOffset;
Type _indexBufferType;
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
ActivationCache _attributeActivation;
GLuint _defaultVAO;
InputStageState() :
_invalidFormat(true),
_format(0),
_buffersState(0),
_buffers(_buffersState.size(), BufferPointer(0)),
_bufferOffsets(_buffersState.size(), 0),
_bufferStrides(_buffersState.size(), 0),
_bufferVBOs(_buffersState.size(), 0),
_attributeActivation(0),
_invalidBuffers(0),
_buffers(_invalidBuffers.size(), BufferPointer(0)),
_bufferOffsets(_invalidBuffers.size(), 0),
_bufferStrides(_invalidBuffers.size(), 0),
_bufferVBOs(_invalidBuffers.size(), 0),
_indexBuffer(0),
_indexBufferOffset(0),
_indexBufferType(UINT32),
_attributeActivation(0),
_defaultVAO(0)
{}
} _input;
@ -298,6 +312,7 @@ protected:
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncTransformStateCache();
void updateTransform();
void resetTransformStage();
struct TransformStageState {
TransformObject _transformObject;
TransformCamera _transformCamera;
@ -330,12 +345,31 @@ protected:
// Uniform Stage
void do_setUniformBuffer(Batch& batch, uint32 paramOffset);
void releaseUniformBuffer(int slot);
void resetUniformStage();
struct UniformStageState {
Buffers _buffers;
UniformStageState():
_buffers(MAX_NUM_UNIFORM_BUFFERS, nullptr)
{}
} _uniform;
// Resource Stage
void do_setResourceTexture(Batch& batch, uint32 paramOffset);
struct UniformStageState {
};
void releaseResourceTexture(int slot);
void resetResourceStage();
struct ResourceStageState {
Textures _textures;
ResourceStageState():
_textures(MAX_NUM_RESOURCE_TEXTURES, nullptr)
{}
} _resource;
// Pipeline Stage
void do_setPipeline(Batch& batch, uint32 paramOffset);
void do_setStateBlendFactor(Batch& batch, uint32 paramOffset);
@ -349,6 +383,7 @@ protected:
void syncPipelineStateCache();
// Grab the actual gl state into it's gpu::State equivalent. THis is used by the above call syncPipleineStateCache()
void getCurrentGLState(State::Data& state);
void resetPipelineStage();
struct PipelineStageState {
@ -388,6 +423,7 @@ protected:
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncOutputStateCache();
void resetOutputStage();
struct OutputStageState {
@ -402,31 +438,20 @@ protected:
void do_endQuery(Batch& batch, uint32 paramOffset);
void do_getQuery(Batch& batch, uint32 paramOffset);
void resetQueryStage();
struct QueryStageState {
};
// Reset stages
void do_resetStages(Batch& batch, uint32 paramOffset);
void resetStages();
// TODO: As long as we have gl calls explicitely issued from interface
// code, we need to be able to record and batch these calls. THe long
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
void do_glEnable(Batch& batch, uint32 paramOffset);
void do_glDisable(Batch& batch, uint32 paramOffset);
void do_glActiveBindTexture(Batch& batch, uint32 paramOffset);
void do_glEnableClientState(Batch& batch, uint32 paramOffset);
void do_glDisableClientState(Batch& batch, uint32 paramOffset);
void do_glCullFace(Batch& batch, uint32 paramOffset);
void do_glAlphaFunc(Batch& batch, uint32 paramOffset);
void do_glDepthFunc(Batch& batch, uint32 paramOffset);
void do_glDepthMask(Batch& batch, uint32 paramOffset);
void do_glDepthRange(Batch& batch, uint32 paramOffset);
void do_glBindBuffer(Batch& batch, uint32 paramOffset);
void do_glBindTexture(Batch& batch, uint32 paramOffset);
void do_glActiveTexture(Batch& batch, uint32 paramOffset);
void do_glTexParameteri(Batch& batch, uint32 paramOffset);
void do_glDrawBuffers(Batch& batch, uint32 paramOffset);
void do_glUseProgram(Batch& batch, uint32 paramOffset);
void do_glUniform1i(Batch& batch, uint32 paramOffset);
void do_glUniform1f(Batch& batch, uint32 paramOffset);
void do_glUniform2f(Batch& batch, uint32 paramOffset);
@ -436,9 +461,6 @@ protected:
void do_glUniform4iv(Batch& batch, uint32 paramOffset);
void do_glUniformMatrix4fv(Batch& batch, uint32 paramOffset);
void do_glEnableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glDisableVertexAttribArray(Batch& batch, uint32 paramOffset);
void do_glColor4f(Batch& batch, uint32 paramOffset);
void do_glLineWidth(Batch& batch, uint32 paramOffset);

View file

@ -52,7 +52,7 @@ void GLBackend::do_setInputBuffer(Batch& batch, uint32 paramOffset) {
}
if (isModified) {
_input._buffersState.set(channel);
_input._invalidBuffers.set(channel);
}
}
}
@ -154,7 +154,7 @@ void GLBackend::updateInput() {
_stats._ISNumFormatChanges++;
}
if (_input._buffersState.any()) {
if (_input._invalidBuffers.any()) {
int numBuffers = _input._buffers.size();
auto buffer = _input._buffers.data();
auto vbo = _input._bufferVBOs.data();
@ -162,7 +162,7 @@ void GLBackend::updateInput() {
auto stride = _input._bufferStrides.data();
for (int bufferNum = 0; bufferNum < numBuffers; bufferNum++) {
if (_input._buffersState.test(bufferNum)) {
if (_input._invalidBuffers.test(bufferNum)) {
glBindVertexBuffer(bufferNum, (*vbo), (*offset), (*stride));
}
buffer++;
@ -170,11 +170,11 @@ void GLBackend::updateInput() {
offset++;
stride++;
}
_input._buffersState.reset();
_input._invalidBuffers.reset();
(void) CHECK_GL_ERROR();
}
#else
if (_input._invalidFormat || _input._buffersState.any()) {
if (_input._invalidFormat || _input._invalidBuffers.any()) {
if (_input._invalidFormat) {
InputStageState::ActivationCache newActivation;
@ -232,7 +232,7 @@ void GLBackend::updateInput() {
if ((channelIt).first < buffers.size()) {
int bufferNum = (channelIt).first;
if (_input._buffersState.test(bufferNum) || _input._invalidFormat) {
if (_input._invalidBuffers.test(bufferNum) || _input._invalidFormat) {
// GLuint vbo = gpu::GLBackend::getBufferID((*buffers[bufferNum]));
GLuint vbo = _input._bufferVBOs[bufferNum];
if (boundVBO != vbo) {
@ -240,7 +240,7 @@ void GLBackend::updateInput() {
(void) CHECK_GL_ERROR();
boundVBO = vbo;
}
_input._buffersState[bufferNum] = false;
_input._invalidBuffers[bufferNum] = false;
for (unsigned int i = 0; i < channel._slots.size(); i++) {
const Stream::Attribute& attrib = attributes.at(channel._slots[i]);
@ -285,6 +285,51 @@ void GLBackend::updateInput() {
#endif
}
void GLBackend::resetInputStage() {
// Reset index buffer
_input._indexBufferType = UINT32;
_input._indexBufferOffset = 0;
_input._indexBuffer.reset();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
(void) CHECK_GL_ERROR();
#if defined(SUPPORT_VAO)
// TODO
#else
glBindBuffer(GL_ARRAY_BUFFER, 0);
size_t i = 0;
#if defined(SUPPORT_LEGACY_OPENGL)
for (; i < NUM_CLASSIC_ATTRIBS; i++) {
glDisableClientState(attributeSlotToClassicAttribName[i]);
}
glVertexPointer(4, GL_FLOAT, 0, 0);
glNormalPointer(GL_FLOAT, 0, 0);
glColorPointer(4, GL_FLOAT, 0, 0);
glTexCoordPointer(4, GL_FLOAT, 0, 0);
#endif
for (; i < _input._attributeActivation.size(); i++) {
glDisableVertexAttribArray(i);
glVertexAttribPointer(i, 4, GL_FLOAT, GL_FALSE, 0, 0);
}
#endif
// Reset vertex buffer and format
_input._format.reset();
_input._invalidFormat = false;
_input._attributeActivation.reset();
for (int i = 0; i < _input._buffers.size(); i++) {
_input._buffers[i].reset();
_input._bufferOffsets[i] = 0;
_input._bufferStrides[i] = 0;
_input._bufferVBOs[i] = 0;
}
_input._invalidBuffers.reset();
}
void GLBackend::do_setIndexBuffer(Batch& batch, uint32 paramOffset) {
_input._indexBufferType = (Type) batch._params[paramOffset + 2]._uint;

View file

@ -177,6 +177,13 @@ void GLBackend::syncOutputStateCache() {
_output._framebuffer.reset();
}
void GLBackend::resetOutputStage() {
if (_output._framebuffer) {
_output._framebuffer.reset();
_output._drawFBO = 0;
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
}
void GLBackend::do_setFramebuffer(Batch& batch, uint32 paramOffset) {
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
@ -206,12 +213,21 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
if (masks & Framebuffer::BUFFER_STENCIL) {
glClearStencil(stencil);
glmask |= GL_STENCIL_BUFFER_BIT;
// TODO: we will probably need to also check the write mask of stencil like we do
// for depth buffer, but as would say a famous Fez owner "We'll cross that bridge when we come to it"
}
bool restoreDepthMask = false;
if (masks & Framebuffer::BUFFER_DEPTH) {
glClearDepth(depth);
glmask |= GL_DEPTH_BUFFER_BIT;
}
bool cacheDepthMask = _pipeline._stateCache.depthTest.getWriteMask();
if (!cacheDepthMask) {
restoreDepthMask = true;
glDepthMask(GL_TRUE);
}
}
std::vector<GLenum> drawBuffers;
if (masks & Framebuffer::BUFFER_COLORS) {
@ -245,6 +261,11 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
glDisable(GL_SCISSOR_TEST);
}
// Restore write mask meaning turn back off
if (restoreDepthMask) {
glDepthMask(GL_FALSE);
}
// Restore the color draw buffers only if a frmaebuffer is bound
if (_output._framebuffer && !drawBuffers.empty()) {
auto glFramebuffer = syncGPUObject(*_output._framebuffer);

View file

@ -164,15 +164,74 @@ void GLBackend::updatePipeline() {
#endif
}
void GLBackend::resetPipelineStage() {
// First reset State to default
State::Signature resetSignature(0);
resetPipelineState(resetSignature);
_pipeline._state = nullptr;
_pipeline._invalidState = false;
// Second the shader side
_pipeline._invalidProgram = false;
_pipeline._program = 0;
_pipeline._pipeline.reset();
glUseProgram(0);
}
void GLBackend::releaseUniformBuffer(int slot) {
#if (GPU_FEATURE_PROFILE == GPU_CORE)
auto& buf = _uniform._buffers[slot];
if (buf) {
auto* object = Backend::getGPUObject<GLBackend::GLBuffer>(*buf);
if (object) {
GLuint bo = object->_buffer;
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
buf.reset();
}
#endif
}
void GLBackend::resetUniformStage() {
for (int i = 0; i < _uniform._buffers.size(); i++) {
releaseUniformBuffer(i);
}
}
void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 3]._uint;
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
#if (GPU_FEATURE_PROFILE == GPU_CORE)
GLuint bo = getBufferID(*uniformBuffer);
glBindBufferRange(GL_UNIFORM_BUFFER, slot, bo, rangeStart, rangeSize);
if (!uniformBuffer) {
releaseUniformBuffer(slot);
return;
}
// check cache before thinking
if (_uniform._buffers[slot] == uniformBuffer) {
return;
}
// Sync BufferObject
auto* object = GLBackend::syncGPUObject(*uniformBuffer);
if (object) {
glBindBufferRange(GL_UNIFORM_BUFFER, slot, object->_buffer, rangeStart, rangeSize);
_uniform._buffers[slot] = uniformBuffer;
(void) CHECK_GL_ERROR();
} else {
releaseResourceTexture(slot);
return;
}
#else
// because we rely on the program uniform mechanism we need to have
// the program bound, thank you MacOSX Legacy profile.
@ -184,19 +243,49 @@ void GLBackend::do_setUniformBuffer(Batch& batch, uint32 paramOffset) {
// NOT working so we ll stick to the uniform float array until we move to core profile
// GLuint bo = getBufferID(*uniformBuffer);
//glUniformBufferEXT(_shader._program, slot, bo);
#endif
(void) CHECK_GL_ERROR();
#endif
}
void GLBackend::releaseResourceTexture(int slot) {
auto& tex = _resource._textures[slot];
if (tex) {
auto* object = Backend::getGPUObject<GLBackend::GLTexture>(*tex);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
glActiveTexture(GL_TEXTURE0 + slot);
glBindTexture(target, 0); // RELEASE
(void) CHECK_GL_ERROR();
}
tex.reset();
}
}
void GLBackend::resetResourceStage() {
for (int i = 0; i < _resource._textures.size(); i++) {
releaseResourceTexture(i);
}
}
void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
GLuint slot = batch._params[paramOffset + 1]._uint;
TexturePointer uniformTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
if (!uniformTexture) {
if (!resourceTexture) {
releaseResourceTexture(slot);
return;
}
// check cache before thinking
if (_resource._textures[slot] == resourceTexture) {
return;
}
GLTexture* object = GLBackend::syncGPUObject(*uniformTexture);
// Always make sure the GLObject is in sync
GLTexture* object = GLBackend::syncGPUObject(*resourceTexture);
if (object) {
GLuint to = object->_texture;
GLuint target = object->_target;
@ -205,7 +294,10 @@ void GLBackend::do_setResourceTexture(Batch& batch, uint32 paramOffset) {
(void) CHECK_GL_ERROR();
_resource._textures[slot] = resourceTexture;
} else {
releaseResourceTexture(slot);
return;
}
}

View file

@ -8,7 +8,6 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GPULogging.h"
#include "GLBackendShared.h"
@ -104,3 +103,7 @@ void GLBackend::do_getQuery(Batch& batch, uint32 paramOffset) {
(void)CHECK_GL_ERROR();
}
}
void GLBackend::resetQueryStage() {
}

View file

@ -8,7 +8,6 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "GPULogging.h"
#include "GLBackendShared.h"
#include "Format.h"
@ -542,7 +541,12 @@ ElementResource getFormatFromGLUniform(GLenum gltype) {
};
int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, Shader::SlotSet& uniforms, Shader::SlotSet& textures, Shader::SlotSet& samplers) {
int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings,
Shader::SlotSet& uniforms, Shader::SlotSet& textures, Shader::SlotSet& samplers
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
, Shader::SlotSet& fakeBuffers
#endif
) {
GLint uniformsCount = 0;
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
@ -583,6 +587,15 @@ int makeUniformSlots(GLuint glprogram, const Shader::BindingSet& slotBindings, S
}
if (elementResource._resource == Resource::BUFFER) {
#if (GPU_FEATURE_PROFILE == GPU_LEGACY)
// if in legacy profile, we fake the uniform buffer with an array
// this is where we detect it assuming it's explicitely assinged a binding
auto requestedBinding = slotBindings.find(std::string(sname));
if (requestedBinding != slotBindings.end()) {
// found one buffer!
fakeBuffers.insert(Shader::Slot(sname, location, elementResource._element, elementResource._resource));
}
#endif
uniforms.insert(Shader::Slot(sname, location, elementResource._element, elementResource._resource));
} else {
// For texture/Sampler, the location is the actual binding value
@ -640,14 +653,13 @@ int makeUniformBlockSlots(GLuint glprogram, const Shader::BindingSet& slotBindin
GLchar name[NAME_LENGTH];
GLint length = 0;
GLint size = 0;
GLenum type = 0;
GLint binding = -1;
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_NAME_LENGTH, &length);
glGetActiveUniformBlockName(glprogram, i, NAME_LENGTH, &length, name);
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_BINDING, &binding);
glGetActiveUniformBlockiv(glprogram, i, GL_UNIFORM_BLOCK_DATA_SIZE, &size);
GLuint blockIndex = glGetUniformBlockIndex(glprogram, name);
// CHeck if there is a requested binding for this block
@ -738,8 +750,12 @@ bool GLBackend::makeProgram(Shader& shader, const Shader::BindingSet& slotBindin
Shader::SlotSet uniforms;
Shader::SlotSet textures;
Shader::SlotSet samplers;
#if (GPU_FEATURE_PROFILE == GPU_CORE)
makeUniformSlots(object->_program, slotBindings, uniforms, textures, samplers);
#else
makeUniformSlots(object->_program, slotBindings, uniforms, textures, samplers, buffers);
#endif
Shader::SlotSet inputs;
makeInputSlots(object->_program, slotBindings, inputs);

View file

@ -11,11 +11,10 @@
#ifndef hifi_gpu_GLBackend_Shared_h
#define hifi_gpu_GLBackend_Shared_h
#include "GLBackend.h"
#include <QDebug>
#include "Batch.h"
#include "GPULogging.h"
#include "GLBackend.h"
static const GLenum _primitiveToGLmode[gpu::NUM_PRIMITIVES] = {
GL_POINTS,

View file

@ -482,9 +482,15 @@ void GLBackend::syncPipelineStateCache() {
State::Data state;
glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS);
// Point size is always on
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glEnable(GL_PROGRAM_POINT_SIZE_EXT);
glEnable(GL_VERTEX_PROGRAM_POINT_SIZE);
getCurrentGLState(state);
State::Signature signature = State::evalSignature(state);
_pipeline._stateCache = state;
_pipeline._stateSignatureCache = signature;
}

View file

@ -184,4 +184,6 @@ void GLBackend::updateTransform() {
_transform._invalidView = _transform._invalidProj = _transform._invalidModel = _transform._invalidViewport = false;
}
void GLBackend::resetTransformStage() {
}

View file

@ -34,11 +34,11 @@
#elif defined(ANDROID)
#else
#include <GL/gl.h>
#include <GL/glext.h>
#define GPU_FEATURE_PROFILE GPU_LEGACY
#define GPU_TRANSFORM_PROFILE GPU_LEGACY
#include <GL/glew.h>
#define GPU_FEATURE_PROFILE GPU_CORE
#define GPU_TRANSFORM_PROFILE GPU_CORE
#endif

View file

@ -13,13 +13,13 @@
#include <bitset>
#include <map>
#include <qurl.h>
#include <glm/glm.hpp>
#include "gpu/Resource.h"
#include "gpu/Texture.h"
#include <qurl.h>
namespace model {

View file

@ -9,15 +9,12 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
#include <gpu/GPUConfig.h>
#include <gpu/GLBackend.h>
#include <glm/gtc/random.hpp>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <gpu/Context.h>
#include "gpu/StandardShaderLib.h"
#include "AmbientOcclusionEffect.h"
@ -164,7 +161,7 @@ const gpu::PipelinePointer& AmbientOcclusion::getBlendPipeline() {
// Blend on transparent
state->setBlendFunction(true,
gpu::State::SRC_COLOR, gpu::State::BLEND_OP_ADD, gpu::State::DEST_COLOR);
gpu::State::INV_SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::SRC_ALPHA);
// Good to go add the brand new pipeline
_blendPipeline.reset(gpu::Pipeline::create(program, state));
@ -176,7 +173,6 @@ void AmbientOcclusion::run(const render::SceneContextPointer& sceneContext, cons
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);
RenderArgs* args = renderContext->args;
auto& scene = sceneContext->_scene;
gpu::Batch batch;

View file

@ -12,12 +12,12 @@
#include "DeferredLightingEffect.h"
#include <GLMHelpers.h>
#include <gpu/GPUConfig.h>
#include <PathUtils.h>
#include <ViewFrustum.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
#include <PathUtils.h>
#include <ViewFrustum.h>
#include "AbstractViewStateInterface.h"
#include "GeometryCache.h"
@ -291,7 +291,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
locations = &_directionalAmbientSphereLightCascadedShadowMapLocations;
}
batch.setPipeline(program);
batch._glUniform3fv(locations->shadowDistances, 1, (const GLfloat*) &_viewState->getShadowDistances());
batch._glUniform3fv(locations->shadowDistances, 1, (const float*) &_viewState->getShadowDistances());
} else {
if (useSkyboxCubemap) {
@ -325,7 +325,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
sh = (*_skybox->getCubemap()->getIrradiance());
}
for (int i =0; i <gpu::SphericalHarmonics::NUM_COEFFICIENTS; i++) {
batch._glUniform4fv(locations->ambientSphere + i, 1, (const GLfloat*) (&sh) + i * 4);
batch._glUniform4fv(locations->ambientSphere + i, 1, (const float*) (&sh) + i * 4);
}
}
@ -340,7 +340,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
if (_atmosphere && (locations->atmosphereBufferUnit >= 0)) {
batch.setUniformBuffer(locations->atmosphereBufferUnit, _atmosphere->getDataBuffer());
}
batch._glUniformMatrix4fv(locations->invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(locations->invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
}
float left, right, bottom, top, nearVal, farVal;
@ -419,9 +419,9 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch._glUniform2f(_pointLightLocations.depthTexCoordOffset, depthTexCoordOffsetS, depthTexCoordOffsetT);
batch._glUniform2f(_pointLightLocations.depthTexCoordScale, depthTexCoordScaleS, depthTexCoordScaleT);
batch._glUniformMatrix4fv(_pointLightLocations.invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(_pointLightLocations.invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
batch._glUniformMatrix4fv(_pointLightLocations.texcoordMat, 1, false, reinterpret_cast< const GLfloat* >(&texcoordMat));
batch._glUniformMatrix4fv(_pointLightLocations.texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
for (auto lightID : _pointLights) {
auto& light = _allocatedLights[lightID];
@ -467,9 +467,9 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch._glUniform2f(_spotLightLocations.depthTexCoordOffset, depthTexCoordOffsetS, depthTexCoordOffsetT);
batch._glUniform2f(_spotLightLocations.depthTexCoordScale, depthTexCoordScaleS, depthTexCoordScaleT);
batch._glUniformMatrix4fv(_spotLightLocations.invViewMat, 1, false, reinterpret_cast< const GLfloat* >(&invViewMat));
batch._glUniformMatrix4fv(_spotLightLocations.invViewMat, 1, false, reinterpret_cast< const float* >(&invViewMat));
batch._glUniformMatrix4fv(_spotLightLocations.texcoordMat, 1, false, reinterpret_cast< const GLfloat* >(&texcoordMat));
batch._glUniformMatrix4fv(_spotLightLocations.texcoordMat, 1, false, reinterpret_cast< const float* >(&texcoordMat));
for (auto lightID : _spotLights) {
auto light = _allocatedLights[lightID];
@ -489,7 +489,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
if ((eyeHalfPlaneDistance > -nearRadius) &&
(glm::distance(eyePoint, glm::vec3(light->getPosition())) < expandedRadius + nearRadius)) {
coneParam.w = 0.0f;
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const GLfloat* >(&coneParam));
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const float* >(&coneParam));
Transform model;
model.setTranslation(glm::vec3(0.0f, 0.0f, -1.0f));
@ -509,7 +509,7 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setViewTransform(viewMat);
} else {
coneParam.w = 1.0f;
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const GLfloat* >(&coneParam));
batch._glUniform4fv(_spotLightLocations.coneParam, 1, reinterpret_cast< const float* >(&coneParam));
Transform model;
model.setTranslation(light->getPosition());
@ -595,9 +595,9 @@ void DeferredLightingEffect::loadLightProgram(const char* vertSource, const char
slotBindings.insert(gpu::Shader::Binding(std::string("depthMap"), 3));
slotBindings.insert(gpu::Shader::Binding(std::string("shadowMap"), 4));
slotBindings.insert(gpu::Shader::Binding(std::string("skyboxMap"), 5));
const GLint LIGHT_GPU_SLOT = 3;
const int LIGHT_GPU_SLOT = 3;
slotBindings.insert(gpu::Shader::Binding(std::string("lightBuffer"), LIGHT_GPU_SLOT));
const GLint ATMOSPHERE_GPU_SLOT = 4;
const int ATMOSPHERE_GPU_SLOT = 4;
slotBindings.insert(gpu::Shader::Binding(std::string("atmosphereBufferUnit"), ATMOSPHERE_GPU_SLOT));
gpu::Shader::makeProgram(*program, slotBindings);
@ -614,13 +614,8 @@ void DeferredLightingEffect::loadLightProgram(const char* vertSource, const char
locations.texcoordMat = program->getUniforms().findLocation("texcoordMat");
locations.coneParam = program->getUniforms().findLocation("coneParam");
#if (GPU_FEATURE_PROFILE == GPU_CORE)
locations.lightBufferUnit = program->getBuffers().findLocation("lightBuffer");
locations.atmosphereBufferUnit = program->getBuffers().findLocation("atmosphereBufferUnit");
#else
locations.lightBufferUnit = program->getUniforms().findLocation("lightBuffer");
locations.atmosphereBufferUnit = program->getUniforms().findLocation("atmosphereBufferUnit");
#endif
auto state = std::make_shared<gpu::State>();
if (lightVolume) {
@ -677,10 +672,10 @@ model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {
int ringFloatOffset = slices * 3;
GLfloat* vertexData = new GLfloat[verticesSize];
GLfloat* vertexRing0 = vertexData;
GLfloat* vertexRing1 = vertexRing0 + ringFloatOffset;
GLfloat* vertexRing2 = vertexRing1 + ringFloatOffset;
float* vertexData = new float[verticesSize];
float* vertexRing0 = vertexData;
float* vertexRing1 = vertexRing0 + ringFloatOffset;
float* vertexRing2 = vertexRing1 + ringFloatOffset;
for (int i = 0; i < slices; i++) {
float theta = TWO_PI * i / slices;
@ -746,7 +741,7 @@ model::MeshPointer DeferredLightingEffect::getSpotLightMesh() {
*(index++) = capVertex;
}
_spotLightMesh->setIndexBuffer(gpu::BufferView(new gpu::Buffer(sizeof(GLushort) * indices, (gpu::Byte*) indexData), gpu::Element::INDEX_UINT16));
_spotLightMesh->setIndexBuffer(gpu::BufferView(new gpu::Buffer(sizeof(unsigned short) * indices, (gpu::Byte*) indexData), gpu::Element::INDEX_UINT16));
delete[] indexData;
model::Mesh::Part part(0, indices, 0, model::Mesh::TRIANGLES);

View file

@ -18,7 +18,6 @@
#include <QMap>
#include <QQueue>
#include <gpu/Batch.h>
#include <gpu/GPUConfig.h>
#include "RenderUtilsLogging.h"
static QQueue<gpu::FramebufferPointer> _cachedFramebuffers;

View file

@ -9,22 +9,22 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLBuffer, which includes an earlier version of OpenGL
#include "GeometryCache.h"
#include <cmath>
#include <QNetworkReply>
#include <QRunnable>
#include <QThreadPool>
#include <gpu/Batch.h>
#include <gpu/GLBackend.h>
#include <FSTReader.h>
#include <NumericalConstants.h>
#include <gpu/Batch.h>
#include <gpu/GLBackend.h>
#include "TextureCache.h"
#include "RenderUtilsLogging.h"
#include "GeometryCache.h"
#include "standardTransformPNTC_vert.h"
#include "standardDrawTexture_frag.h"
@ -1998,7 +1998,7 @@ void GeometryReader::run() {
} else if (_url.path().toLower().endsWith("palaceoforinthilian4.fbx")) {
lightmapLevel = 3.5f;
}
fbxgeo = readFBX(_reply, _mapping, grabLightmaps, lightmapLevel);
fbxgeo = readFBX(_reply, _mapping, _url.path(), grabLightmaps, lightmapLevel);
} else if (_url.path().toLower().endsWith(".obj")) {
fbxgeo = OBJReader().readOBJ(_reply, _mapping, &_url);
}

View file

@ -0,0 +1,86 @@
//
// HitEffect.cpp
// interface/src/renderer
//
// Created by Andrzej Kapolka on 7/14/13.
// Copyright 2013 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// include this before QOpenGLFramebufferObject, which includes an earlier version of OpenGL
#include <glm/gtc/random.hpp>
#include <PathUtils.h>
#include <SharedUtil.h>
#include "AbstractViewStateInterface.h"
#include "HitEffect.h"
#include "TextureCache.h"
#include "DependencyManager.h"
#include "ViewFrustum.h"
#include "GeometryCache.h"
#include <gpu/Context.h>
#include "hit_effect_vert.h"
#include "hit_effect_frag.h"
HitEffect::HitEffect() {
}
const gpu::PipelinePointer& HitEffect::getHitEffectPipeline() {
if (!_hitEffectPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(hit_effect_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(hit_effect_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
gpu::Shader::BindingSet slotBindings;
gpu::Shader::makeProgram(*program, slotBindings);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(false, false, gpu::LESS_EQUAL);
// Blend on transparent
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
// Good to go add the brand new pipeline
_hitEffectPipeline.reset(gpu::Pipeline::create(program, state));
}
return _hitEffectPipeline;
}
void HitEffect::run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext) {
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);
RenderArgs* args = renderContext->args;
gpu::Batch batch;
glm::mat4 projMat;
Transform viewMat;
args->_viewFrustum->evalProjectionMatrix(projMat);
args->_viewFrustum->evalViewTransform(viewMat);
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setModelTransform(Transform());
batch.setPipeline(getHitEffectPipeline());
glm::vec4 color(0.0f, 0.0f, 0.0f, 1.0f);
glm::vec2 bottomLeft(-1.0f, -1.0f);
glm::vec2 topRight(1.0f, 1.0f);
DependencyManager::get<GeometryCache>()->renderQuad(batch, bottomLeft, topRight, color);
// Ready to render
args->_context->render((batch));
}

View file

@ -0,0 +1,33 @@
//
// hitEffect.h
// hifi
//
// Created by eric levin on 7/17/15.
//
//
#ifndef hifi_hitEffect_h
#define hifi_hitEffect_h
#include <DependencyManager.h>
#include "render/DrawTask.h"
class AbstractViewStateInterface;
class ProgramObject;
class HitEffect {
public:
HitEffect();
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
typedef render::Job::Model<HitEffect> JobModel;
const gpu::PipelinePointer& getHitEffectPipeline();
private:
gpu::PipelinePointer _hitEffectPipeline;
};
#endif

View file

@ -17,18 +17,16 @@
#include <glm/gtx/norm.hpp>
#include <GeometryUtil.h>
#include <gpu/Batch.h>
#include <gpu/GLBackend.h>
#include <PathUtils.h>
#include <PerfStat.h>
#include "PhysicsEntity.h"
#include <ViewFrustum.h>
#include <render/Scene.h>
#include <gpu/Batch.h>
#include "AbstractViewStateInterface.h"
#include "AnimationHandle.h"
#include "DeferredLightingEffect.h"
#include "Model.h"
#include "RenderUtilsLogging.h"
#include "model_vert.h"
#include "model_shadow_vert.h"
@ -66,6 +64,7 @@ Model::Model(RigPointer rig, QObject* parent) :
_snapModelToRegistrationPoint(false),
_snappedToRegistrationPoint(false),
_showTrueJointTransforms(true),
_cauterizeBones(false),
_lodDistance(0.0f),
_pupilDilation(0.0f),
_url("http://invalid.com"),
@ -93,7 +92,7 @@ Model::~Model() {
}
Model::RenderPipelineLib Model::_renderPipelineLib;
const GLint MATERIAL_GPU_SLOT = 3;
const int MATERIAL_GPU_SLOT = 3;
void Model::RenderPipelineLib::addRenderPipeline(Model::RenderKey key,
gpu::ShaderPointer& vertexShader,
@ -186,13 +185,9 @@ void Model::RenderPipelineLib::initLocations(gpu::ShaderPointer& program, Model:
locations.specularTextureUnit = program->getTextures().findLocation("specularMap");
locations.emissiveTextureUnit = program->getTextures().findLocation("emissiveMap");
#if (GPU_FEATURE_PROFILE == GPU_CORE)
locations.materialBufferUnit = program->getBuffers().findLocation("materialBuffer");
locations.lightBufferUnit = program->getBuffers().findLocation("lightBuffer");
#else
locations.materialBufferUnit = program->getUniforms().findLocation("materialBuffer");
locations.lightBufferUnit = program->getUniforms().findLocation("lightBuffer");
#endif
locations.clusterMatrices = program->getUniforms().findLocation("clusterMatrices");
locations.clusterIndices = program->getInputs().findLocation("clusterIndices");;
@ -451,6 +446,7 @@ bool Model::updateGeometry() {
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
MeshState state;
state.clusterMatrices.resize(mesh.clusters.size());
state.cauterizedClusterMatrices.resize(mesh.clusters.size());
_meshStates.append(state);
auto buffer = std::make_shared<gpu::Buffer>();
@ -471,7 +467,7 @@ bool Model::updateGeometry() {
void Model::initJointStates(QVector<JointState> states) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
_boundingRadius = _rig->initJointStates(states, parentTransform, geometry.neckJointIndex);
_boundingRadius = _rig->initJointStates(states, parentTransform);
}
bool Model::findRayIntersectionAgainstSubMeshes(const glm::vec3& origin, const glm::vec3& direction, float& distance,
@ -1045,10 +1041,6 @@ void Model::clearJointState(int index) {
_rig->clearJointState(index);
}
void Model::clearJointAnimationPriority(int index) {
_rig->clearJointAnimationPriority(index);
}
void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) {
_rig->setJointState(index, valid, rotation, priority);
}
@ -1336,6 +1328,12 @@ void Model::simulateInternal(float deltaTime) {
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
updateRig(deltaTime, parentTransform);
glm::mat4 zeroScale(glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
auto cauterizeMatrix = _rig->getJointTransform(geometry.neckJointIndex) * zeroScale;
glm::mat4 modelToWorld = glm::mat4_cast(_rotation);
for (int i = 0; i < _meshStates.size(); i++) {
MeshState& state = _meshStates[i];
@ -1343,18 +1341,34 @@ void Model::simulateInternal(float deltaTime) {
if (_showTrueJointTransforms) {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
state.clusterMatrices[j] =
modelToWorld * _rig->getJointTransform(cluster.jointIndex) * cluster.inverseBindMatrix;
auto jointMatrix =_rig->getJointTransform(cluster.jointIndex);
state.clusterMatrices[j] = modelToWorld * jointMatrix * cluster.inverseBindMatrix;
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
if (!_cauterizeBoneSet.empty()) {
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
jointMatrix = cauterizeMatrix;
}
state.cauterizedClusterMatrices[j] = modelToWorld * jointMatrix * cluster.inverseBindMatrix;
}
}
} else {
for (int j = 0; j < mesh.clusters.size(); j++) {
const FBXCluster& cluster = mesh.clusters.at(j);
state.clusterMatrices[j] =
modelToWorld * _rig->getJointVisibleTransform(cluster.jointIndex) * cluster.inverseBindMatrix;
auto jointMatrix = _rig->getJointVisibleTransform(cluster.jointIndex);
state.clusterMatrices[j] = modelToWorld * jointMatrix * cluster.inverseBindMatrix;
// as an optimization, don't build cautrizedClusterMatrices if the boneSet is empty.
if (!_cauterizeBoneSet.empty()) {
if (_cauterizeBoneSet.find(cluster.jointIndex) != _cauterizeBoneSet.end()) {
jointMatrix = cauterizeMatrix;
}
state.cauterizedClusterMatrices[j] = modelToWorld * jointMatrix * cluster.inverseBindMatrix;
}
}
}
}
// post the blender if we're not currently waiting for one to finish
if (geometry.hasBlendedMeshes() && _blendshapeCoefficients != _blendedBlendshapeCoefficients) {
_blendedBlendshapeCoefficients = _blendshapeCoefficients;
@ -1362,12 +1376,6 @@ void Model::simulateInternal(float deltaTime) {
}
}
void Model::updateJointState(int index) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
glm::mat4 parentTransform = glm::scale(_scale) * glm::translate(_offset) * geometry.offset;
_rig->updateJointState(index, parentTransform);
}
bool Model::setJointPosition(int jointIndex, const glm::vec3& position, const glm::quat& rotation, bool useRotation,
int lastFreeIndex, bool allIntermediatesFree, const glm::vec3& alignment, float priority) {
const FBXGeometry& geometry = _geometry->getFBXGeometry();
@ -1610,12 +1618,21 @@ void Model::renderPart(RenderArgs* args, int meshIndex, int partIndex, bool tran
}
if (isSkinned) {
batch._glUniformMatrix4fv(locations->clusterMatrices, state.clusterMatrices.size(), false,
(const float*)state.clusterMatrices.constData());
const float* bones;
if (_cauterizeBones) {
bones = (const float*)state.cauterizedClusterMatrices.constData();
} else {
bones = (const float*)state.clusterMatrices.constData();
}
batch._glUniformMatrix4fv(locations->clusterMatrices, state.clusterMatrices.size(), false, bones);
_transforms[0] = Transform();
_transforms[0].preTranslate(_translation);
} else {
_transforms[0] = Transform(state.clusterMatrices[0]);
if (_cauterizeBones) {
_transforms[0] = Transform(state.cauterizedClusterMatrices[0]);
} else {
_transforms[0] = Transform(state.clusterMatrices[0]);
}
_transforms[0].preTranslate(_translation);
}
batch.setModelTransform(_transforms[0]);

View file

@ -18,10 +18,10 @@
#include <QMutex>
#include <unordered_map>
#include <unordered_set>
#include <functional>
#include <AABox.h>
#include <AnimationCache.h>
#include <DependencyManager.h>
#include <GeometryUtil.h>
#include <gpu/Stream.h>
@ -148,10 +148,6 @@ public:
/// Sets the distance parameter used for LOD computations.
void setLODDistance(float distance) { _lodDistance = distance; }
const QList<AnimationHandlePointer>& getRunningAnimations() const { return _rig->getRunningAnimations(); }
/// Clear the joint animation priority
void clearJointAnimationPriority(int index);
void setScaleToFit(bool scaleToFit, float largestDimension = 0.0f, bool forceRescale = false);
bool getScaleToFit() const { return _scaleToFit; } /// is scale to fit enabled
@ -172,6 +168,9 @@ public:
/// \return true if joint exists
bool getJointRotation(int jointIndex, glm::quat& rotation) const;
/// Returns the index of the parent of the indexed joint, or -1 if not found.
int getParentJointIndex(int jointIndex) const;
void inverseKinematics(int jointIndex, glm::vec3 position, const glm::quat& rotation, float priority);
/// Returns the extents of the model in its bind pose.
@ -187,6 +186,12 @@ public:
bool getIsScaledToFit() const { return _scaledToFit; } /// is model scaled to fit
const glm::vec3& getScaleToFitDimensions() const { return _scaleToFitDimensions; } /// the dimensions model is scaled to
void setCauterizeBones(bool flag) { _cauterizeBones = flag; }
bool getCauterizeBones() const { return _cauterizeBones; }
const std::unordered_set<int>& getCauterizeBoneSet() const { return _cauterizeBoneSet; }
void setCauterizeBoneSet(const std::unordered_set<int>& boneSet) { _cauterizeBoneSet = boneSet; }
protected:
void setPupilDilation(float dilation) { _pupilDilation = dilation; }
@ -218,9 +223,6 @@ protected:
/// Clear the joint states
void clearJointState(int index);
/// Returns the index of the parent of the indexed joint, or -1 if not found.
int getParentJointIndex(int jointIndex) const;
/// Returns the index of the last free ancestor of the indexed joint, or -1 if not found.
int getLastFreeJointIndex(int jointIndex) const;
@ -255,9 +257,12 @@ protected:
class MeshState {
public:
QVector<glm::mat4> clusterMatrices;
QVector<glm::mat4> cauterizedClusterMatrices;
};
QVector<MeshState> _meshStates;
std::unordered_set<int> _cauterizeBoneSet;
bool _cauterizeBones;
// returns 'true' if needs fullUpdate after geometry change
bool updateGeometry();
@ -271,9 +276,6 @@ protected:
void simulateInternal(float deltaTime);
virtual void updateRig(float deltaTime, glm::mat4 parentTransform);
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
/// \param jointIndex index of joint in model structure
/// \param position position of joint in model-frame
/// \param rotation rotation of joint in model-frame

View file

@ -1,3 +1,4 @@
//
// RenderDeferredTask.cpp
// render-utils/src/
@ -10,16 +11,15 @@
//
#include "RenderDeferredTask.h"
#include <gpu/GPUConfig.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <PerfStat.h>
#include <RenderArgs.h>
#include <ViewFrustum.h>
#include <gpu/Context.h>
#include "FramebufferCache.h"
#include "DeferredLightingEffect.h"
#include "TextureCache.h"
#include "HitEffect.h"
#include "render/DrawStatus.h"
#include "AmbientOcclusionEffect.h"
@ -78,6 +78,7 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
_jobs.push_back(Job(new DepthSortItems::JobModel("DepthSortOpaque", _jobs.back().getOutput())));
auto& renderedOpaques = _jobs.back().getOutput();
_jobs.push_back(Job(new DrawOpaqueDeferred::JobModel("DrawOpaqueDeferred", _jobs.back().getOutput())));
_jobs.push_back(Job(new DrawLight::JobModel("DrawLight")));
_jobs.push_back(Job(new RenderDeferred::JobModel("RenderDeferred")));
_jobs.push_back(Job(new ResolveDeferred::JobModel("ResolveDeferred")));
@ -105,8 +106,10 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
_drawStatusJobIndex = _jobs.size() - 1;
_jobs.push_back(Job(new DrawOverlay3D::JobModel("DrawOverlay3D")));
_jobs.push_back(Job(new HitEffect::JobModel("HitEffect")));
_jobs.back().setEnabled(false);
_drawHitEffectJobIndex = _jobs.size() -1;
_jobs.push_back(Job(new ResetGLState::JobModel()));
// Give ourselves 3 frmaes of timer queries
_timerQueries.push_back(std::make_shared<gpu::Query>());
@ -133,6 +136,10 @@ void RenderDeferredTask::run(const SceneContextPointer& sceneContext, const Rend
// Make sure we turn the displayItemStatus on/off
setDrawItemStatus(renderContext->_drawItemStatus);
//Make sure we display hit effect on screen, as desired from a script
setDrawHitEffect(renderContext->_drawHitEffect);
// TODO: turn on/off AO through menu item
setOcclusionStatus(renderContext->_occlusionStatus);

View file

@ -78,9 +78,13 @@ public:
render::Jobs _jobs;
int _drawStatusJobIndex = -1;
int _drawHitEffectJobIndex = -1;
void setDrawItemStatus(bool draw) { if (_drawStatusJobIndex >= 0) { _jobs[_drawStatusJobIndex].setEnabled(draw); } }
bool doDrawItemStatus() const { if (_drawStatusJobIndex >= 0) { return _jobs[_drawStatusJobIndex].isEnabled(); } else { return false; } }
void setDrawHitEffect(bool draw) { if (_drawHitEffectJobIndex >= 0) { _jobs[_drawHitEffectJobIndex].setEnabled(draw); } }
bool doDrawHitEffect() const { if (_drawHitEffectJobIndex >=0) { return _jobs[_drawHitEffectJobIndex].isEnabled(); } else { return false; } }
int _occlusionJobIndex = -1;

View file

@ -16,16 +16,15 @@
#include <glm/glm.hpp>
#include <glm/gtc/random.hpp>
#include <gpu/Batch.h>
#include <gpu/GLBackend.h>
#include <gpu/GPUConfig.h>
#include <QNetworkReply>
#include <QPainter>
#include <QRunnable>
#include <QThreadPool>
#include <qimagereader.h>
#include <gpu/Batch.h>
#include "RenderUtilsLogging.h"

View file

@ -18,6 +18,9 @@
<$declareStandardTransform()$>
// Based on NVidia HBAO implementation in D3D11
// http://www.nvidia.co.uk/object/siggraph-2008-HBAO.html
varying vec2 varTexcoord;
uniform sampler2D depthTexture;
@ -30,80 +33,213 @@ uniform float g_intensity;
uniform float bufferWidth;
uniform float bufferHeight;
#define SAMPLE_COUNT 4
const float PI = 3.14159265;
float getRandom(vec2 uv) {
const vec2 FocalLen = vec2(1.0, 1.0);
const vec2 LinMAD = vec2(0.1-10.0, 0.1+10.0) / (2.0*0.1*10.0);
const vec2 AORes = vec2(1024.0, 768.0);
const vec2 InvAORes = vec2(1.0/1024.0, 1.0/768.0);
const vec2 NoiseScale = vec2(1024.0, 768.0) / 4.0;
const float AOStrength = 1.9;
const float R = 0.3;
const float R2 = 0.3*0.3;
const float NegInvR2 = - 1.0 / (0.3*0.3);
const float TanBias = tan(30.0 * PI / 180.0);
const float MaxRadiusPixels = 50.0;
const int NumDirections = 6;
const int NumSamples = 4;
float ViewSpaceZFromDepth(float d){
// [0,1] -> [-1,1] clip space
d = d * 2.0 - 1.0;
// Get view space Z
return -1.0 / (LinMAD.x * d + LinMAD.y);
}
vec3 UVToViewSpace(vec2 uv, float z){
//uv = UVToViewA * uv + UVToViewB;
return vec3(uv * z, z);
}
vec3 GetViewPos(vec2 uv){
float z = ViewSpaceZFromDepth(texture2D(depthTexture, uv).r);
return UVToViewSpace(uv, z);
}
vec3 GetViewPosPoint(ivec2 uv){
vec2 coord = vec2(gl_FragCoord.xy) + uv;
//float z = texelFetch(texture0, coord, 0).r;
float z = texture2D(depthTexture, uv).r;
return UVToViewSpace(uv, z);
}
float TanToSin(float x){
return x * inversesqrt(x*x + 1.0);
}
float InvLength(vec2 V){
return inversesqrt(dot(V,V));
}
float Tangent(vec3 V){
return V.z * InvLength(V.xy);
}
float BiasedTangent(vec3 V){
return V.z * InvLength(V.xy) + TanBias;
}
float Tangent(vec3 P, vec3 S){
return -(P.z - S.z) * InvLength(S.xy - P.xy);
}
float Length2(vec3 V){
return dot(V,V);
}
vec3 MinDiff(vec3 P, vec3 Pr, vec3 Pl){
vec3 V1 = Pr - P;
vec3 V2 = P - Pl;
return (Length2(V1) < Length2(V2)) ? V1 : V2;
}
vec2 SnapUVOffset(vec2 uv){
return round(uv * AORes) * InvAORes;
}
float Falloff(float d2){
return d2 * NegInvR2 + 1.0f;
}
float HorizonOcclusion( vec2 deltaUV, vec3 P, vec3 dPdu, vec3 dPdv, float randstep, float numSamples){
float ao = 0;
// Offset the first coord with some noise
vec2 uv = varTexcoord + SnapUVOffset(randstep*deltaUV);
deltaUV = SnapUVOffset( deltaUV );
// Calculate the tangent vector
vec3 T = deltaUV.x * dPdu + deltaUV.y * dPdv;
// Get the angle of the tangent vector from the viewspace axis
float tanH = BiasedTangent(T);
float sinH = TanToSin(tanH);
float tanS;
float d2;
vec3 S;
// Sample to find the maximum angle
for(float s = 1; s <= numSamples; ++s){
uv += deltaUV;
S = GetViewPos(uv);
tanS = Tangent(P, S);
d2 = Length2(S - P);
// Is the sample within the radius and the angle greater?
if(d2 < R2 && tanS > tanH)
{
float sinS = TanToSin(tanS);
// Apply falloff based on the distance
ao += Falloff(d2) * (sinS - sinH);
tanH = tanS;
sinH = sinS;
}
}
return ao;
}
vec2 RotateDirections(vec2 Dir, vec2 CosSin){
return vec2(Dir.x*CosSin.x - Dir.y*CosSin.y, Dir.x*CosSin.y + Dir.y*CosSin.x);
}
void ComputeSteps(inout vec2 stepSizeUv, inout float numSteps, float rayRadiusPix, float rand){
// Avoid oversampling if numSteps is greater than the kernel radius in pixels
numSteps = min(NumSamples, rayRadiusPix);
// Divide by Ns+1 so that the farthest samples are not fully attenuated
float stepSizePix = rayRadiusPix / (numSteps + 1);
// Clamp numSteps if it is greater than the max kernel footprint
float maxNumSteps = MaxRadiusPixels / stepSizePix;
if (maxNumSteps < numSteps)
{
// Use dithering to avoid AO discontinuities
numSteps = floor(maxNumSteps + rand);
numSteps = max(numSteps, 1);
stepSizePix = MaxRadiusPixels / numSteps;
}
// Step size in uv space
stepSizeUv = stepSizePix * InvAORes;
}
float getRandom(vec2 uv){
return fract(sin(dot(uv.xy ,vec2(12.9898,78.233))) * 43758.5453);
}
void main(void) {
vec3 sampleKernel[4] = { vec3(0.2, 0.0, 0.0),
vec3(0.0, 0.2, 0.0),
vec3(0.0, 0.0, 0.2),
vec3(0.2, 0.2, 0.2) };
void main(void){
float numDirections = NumDirections;
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
vec3 P, Pr, Pl, Pt, Pb;
P = GetViewPos(varTexcoord);
vec3 eyeDir = vec3(0.0, 0.0, -3.0);
vec3 cameraPositionWorldSpace;
<$transformEyeToWorldDir(cam, eyeDir, cameraPositionWorldSpace)$>
// Sample neighboring pixels
Pr = GetViewPos(varTexcoord + vec2( InvAORes.x, 0));
Pl = GetViewPos(varTexcoord + vec2(-InvAORes.x, 0));
Pt = GetViewPos(varTexcoord + vec2( 0, InvAORes.y));
Pb = GetViewPos(varTexcoord + vec2( 0,-InvAORes.y));
vec4 depthColor = texture2D(depthTexture, varTexcoord);
// Calculate tangent basis vectors using the minimum difference
vec3 dPdu = MinDiff(P, Pr, Pl);
vec3 dPdv = MinDiff(P, Pt, Pb) * (AORes.y * InvAORes.x);
// z in non linear range [0,1]
float depthVal = depthColor.r;
// conversion into NDC [-1,1]
float zNDC = depthVal * 2.0 - 1.0;
float n = 1.0; // the near plane
float f = 30.0; // the far plane
float l = -1.0; // left
float r = 1.0; // right
float b = -1.0; // bottom
float t = 1.0; // top
// conversion into eye space
float zEye = 2*f*n / (zNDC*(f-n)-(f+n));
// Converting from pixel coordinates to NDC
float xNDC = gl_FragCoord.x/bufferWidth * 2.0 - 1.0;
float yNDC = gl_FragCoord.y/bufferHeight * 2.0 - 1.0;
// Unprojecting X and Y from NDC to eye space
float xEye = -zEye*(xNDC*(r-l)+(r+l))/(2.0*n);
float yEye = -zEye*(yNDC*(t-b)+(t+b))/(2.0*n);
vec3 currentFragEyeSpace = vec3(xEye, yEye, zEye);
vec3 currentFragWorldSpace;
<$transformEyeToWorldDir(cam, currentFragEyeSpace, currentFragWorldSpace)$>
vec3 cameraToPositionRay = normalize(currentFragWorldSpace - cameraPositionWorldSpace);
vec3 origin = cameraToPositionRay * depthVal + cameraPositionWorldSpace;
vec3 normal = normalize(texture2D(normalTexture, varTexcoord).xyz);
//normal = normalize(normal * normalMatrix);
vec3 rvec = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx)) * 2.0 - 1.0;
vec3 tangent = normalize(rvec - normal * dot(rvec, normal));
vec3 bitangent = cross(normal, tangent);
mat3 tbn = mat3(tangent, bitangent, normal);
float occlusion = 0.0;
for (int i = 0; i < SAMPLE_COUNT; ++i) {
vec3 samplePos = origin + (tbn * sampleKernel[i]) * g_sample_rad;
vec4 offset = cam._projectionViewUntranslated * vec4(samplePos, 1.0);
offset.xy = (offset.xy / offset.w) * 0.5 + 0.5;
float depth = length(samplePos - cameraPositionWorldSpace);
float sampleDepthVal = texture2D(depthTexture, offset.xy).r;
float rangeDelta = abs(depthVal - sampleDepthVal);
float rangeCheck = smoothstep(0.0, 1.0, g_sample_rad / rangeDelta);
occlusion += rangeCheck * step(sampleDepthVal, depth);
}
occlusion = 1.0 - occlusion / float(SAMPLE_COUNT);
occlusion = clamp(pow(occlusion, g_intensity), 0.0, 1.0);
gl_FragColor = vec4(vec3(occlusion), 1.0);
}
// Get the random samples from the noise function
vec3 random = vec3(getRandom(varTexcoord.xy), getRandom(varTexcoord.yx), getRandom(varTexcoord.xx));
// Calculate the projected size of the hemisphere
vec2 rayRadiusUV = 0.5 * R * FocalLen / -P.z;
float rayRadiusPix = rayRadiusUV.x * AORes.x;
float ao = 1.0;
// Make sure the radius of the evaluated hemisphere is more than a pixel
if(rayRadiusPix > 1.0){
ao = 0.0;
float numSteps;
vec2 stepSizeUV;
// Compute the number of steps
ComputeSteps(stepSizeUV, numSteps, rayRadiusPix, random.z);
float alpha = 2.0 * PI / numDirections;
// Calculate the horizon occlusion of each direction
for(float d = 0; d < numDirections; ++d){
float theta = alpha * d;
// Apply noise to the direction
vec2 dir = RotateDirections(vec2(cos(theta), sin(theta)), random.xy);
vec2 deltaUV = dir * stepSizeUV;
// Sample the pixels along the direction
ao += HorizonOcclusion( deltaUV,
P,
dPdu,
dPdv,
random.z,
numSteps);
}
// Average the results and produce the final AO
ao = 1.0 - ao / numDirections * AOStrength;
}
gl_FragColor = vec4(vec3(ao), 1.0);
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// hit_effect.frag
// fragment shader
//
// Created by Eric Levin on 7/20
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include DeferredBufferWrite.slh@>
varying vec2 varQuadPosition;
void main(void) {
vec2 center = vec2(0.0, 0.0);
float distFromCenter = distance( vec2(0.0, 0.0), varQuadPosition);
float alpha = mix(0.0, 0.5, pow(distFromCenter,5.));
gl_FragColor = vec4(1.0, 0.0, 0.0, alpha);
}

View file

@ -0,0 +1,24 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// hit_effect.vert
// vertex shader
//
// Created by Eric Levin on 7/20/15.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
varying vec2 varQuadPosition;
void main(void) {
varQuadPosition = gl_Vertex.xy;
gl_Position = gl_Vertex;
}

View file

@ -21,9 +21,6 @@ uniform sampler2D blurredOcclusionTexture;
void main(void) {
vec4 occlusionColor = texture2D(blurredOcclusionTexture, varTexcoord);
if(occlusionColor.r > 0.8 && occlusionColor.r <= 1.0) {
gl_FragColor = vec4(vec3(0.0), 0.0);
} else {
gl_FragColor = vec4(vec3(occlusionColor.r), 1.0);
}
gl_FragColor = vec4(vec3(0.0), occlusionColor.r);
}

View file

@ -15,15 +15,11 @@
#include <assert.h>
#include <PerfStat.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/GPUConfig.h>
#include <gpu/GPULogging.h>
#include <ViewFrustum.h>
#include <RenderArgs.h>
#include <gpu/Context.h>
#include "drawItemBounds_vert.h"
#include "drawItemBounds_frag.h"
#include "drawItemStatus_vert.h"
@ -152,17 +148,17 @@ void DrawStatus::run(const SceneContextPointer& sceneContext, const RenderContex
const unsigned int VEC3_ADRESS_OFFSET = 3;
for (int i = 0; i < nbItems; i++) {
batch._glUniform3fv(_drawItemBoundPosLoc, 1, (const GLfloat*) (itemAABox + i));
batch._glUniform3fv(_drawItemBoundDimLoc, 1, ((const GLfloat*) (itemAABox + i)) + VEC3_ADRESS_OFFSET);
batch._glUniform3fv(_drawItemBoundPosLoc, 1, (const float*) (itemAABox + i));
batch._glUniform3fv(_drawItemBoundDimLoc, 1, ((const float*) (itemAABox + i)) + VEC3_ADRESS_OFFSET);
batch.draw(gpu::LINES, 24, 0);
}
batch.setPipeline(getDrawItemStatusPipeline());
for (int i = 0; i < nbItems; i++) {
batch._glUniform3fv(_drawItemStatusPosLoc, 1, (const GLfloat*) (itemAABox + i));
batch._glUniform3fv(_drawItemStatusDimLoc, 1, ((const GLfloat*) (itemAABox + i)) + VEC3_ADRESS_OFFSET);
batch._glUniform4iv(_drawItemStatusValueLoc, 1, (const GLint*) (itemStatus + i));
batch._glUniform3fv(_drawItemStatusPosLoc, 1, (const float*) (itemAABox + i));
batch._glUniform3fv(_drawItemStatusDimLoc, 1, ((const float*) (itemAABox + i)) + VEC3_ADRESS_OFFSET);
batch._glUniform4iv(_drawItemStatusValueLoc, 1, (const int*) (itemStatus + i));
batch.draw(gpu::TRIANGLES, 24, 0);
}
@ -171,4 +167,4 @@ void DrawStatus::run(const SceneContextPointer& sceneContext, const RenderContex
args->_context->syncCache();
renderContext->args->_context->syncCache();
args->_context->render((batch));
}
}

View file

@ -14,12 +14,10 @@
#include <algorithm>
#include <assert.h>
#include <gpu/Batch.h>
#include <gpu/Context.h>
#include <gpu/GPUConfig.h>
#include <PerfStat.h>
#include <RenderArgs.h>
#include <ViewFrustum.h>
#include <gpu/Context.h>
using namespace render;
@ -216,46 +214,6 @@ void render::renderItems(const SceneContextPointer& sceneContext, const RenderCo
}
}
void addClearStateCommands(gpu::Batch& batch) {
batch._glDepthMask(true);
batch._glDepthFunc(GL_LESS);
batch._glDisable(GL_CULL_FACE);
batch._glActiveTexture(GL_TEXTURE0 + 1);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0 + 2);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0 + 3);
batch._glBindTexture(GL_TEXTURE_2D, 0);
batch._glActiveTexture(GL_TEXTURE0);
batch._glBindTexture(GL_TEXTURE_2D, 0);
// deactivate vertex arrays after drawing
batch._glDisableClientState(GL_NORMAL_ARRAY);
batch._glDisableClientState(GL_VERTEX_ARRAY);
batch._glDisableClientState(GL_TEXTURE_COORD_ARRAY);
batch._glDisableClientState(GL_COLOR_ARRAY);
batch._glDisableVertexAttribArray(gpu::Stream::TANGENT);
batch._glDisableVertexAttribArray(gpu::Stream::SKIN_CLUSTER_INDEX);
batch._glDisableVertexAttribArray(gpu::Stream::SKIN_CLUSTER_WEIGHT);
// bind with 0 to switch back to normal operation
batch._glBindBuffer(GL_ARRAY_BUFFER, 0);
batch._glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
batch._glBindTexture(GL_TEXTURE_2D, 0);
// Back to no program
batch._glUseProgram(0);
}
void ResetGLState::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
gpu::Batch theBatch;
addClearStateCommands(theBatch);
assert(renderContext->args);
renderContext->args->_context->render(theBatch);
}
void DrawLight::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
assert(renderContext->args);
assert(renderContext->args->_viewFrustum);

View file

@ -260,14 +260,6 @@ public:
typedef Job::Model<DrawBackground> JobModel;
};
class ResetGLState {
public:
void run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext);
typedef Job::Model<ResetGLState> JobModel;
};
class DrawSceneTask : public Task {
public:

View file

@ -50,6 +50,7 @@ public:
int _maxDrawnOverlay3DItems = -1;
bool _drawItemStatus = false;
bool _drawHitEffect = false;
bool _occlusionStatus = false;

View file

@ -109,6 +109,9 @@ public:
Q_INVOKABLE void setEngineDisplayItemStatus(bool display) { _drawItemStatus = display; }
Q_INVOKABLE bool doEngineDisplayItemStatus() { return _drawItemStatus; }
Q_INVOKABLE void setEngineDisplayHitEffect(bool display) { _drawHitEffect = display; }
Q_INVOKABLE bool doEngineDisplayHitEffect() { return _drawHitEffect; }
signals:
void shouldRenderAvatarsChanged(bool shouldRenderAvatars);
@ -141,6 +144,8 @@ protected:
int _maxDrawnOverlay3DItems = -1;
bool _drawItemStatus = false;
bool _drawHitEffect = false;
};

View file

@ -73,7 +73,7 @@ void RigTests::initTestCase() {
}
_rig = std::make_shared<AvatarRig>();
_rig->initJointStates(jointStates, glm::mat4(), geometry.neckJointIndex);
_rig->initJointStates(jointStates, glm::mat4());
std::cout << "Rig is ready " << geometry.joints.count() << " joints " << std::endl;
}

View file

@ -9,13 +9,16 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "BulletUtilTests.h"
#include <iostream>
//#include "PhysicsTestUtil.h"
#include <BulletUtil.h>
#include <NumericalConstants.h>
#include "BulletUtilTests.h"
// Add additional qtest functionality (the include order is important!)
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
// Constants
const glm::vec3 origin(0.0f);

View file

@ -13,9 +13,6 @@
#define hifi_BulletUtilTests_h
#include <QtTest/QtTest>
// Add additional qtest functionality (the include order is important!)
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
class BulletUtilTests : public QObject {
Q_OBJECT

View file

@ -9,11 +9,15 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MeshMassPropertiesTests.h"
#include <iostream>
#include <string>
#include <MeshMassProperties.h>
#include "MeshMassPropertiesTests.h"
// Add additional qtest functionality (the include order is important!)
#include "BulletTestUtils.h"
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
const btScalar acceptableRelativeError(1.0e-5f);
const btScalar acceptableAbsoluteError(1.0e-4f);

View file

@ -13,12 +13,6 @@
#define hifi_MeshMassPropertiesTests_h
#include <QtTest/QtTest>
#include <QtGlobal>
// Add additional qtest functionality (the include order is important!)
#include "BulletTestUtils.h"
#include "GlmTestUtils.h"
#include "../QTestExtensions.h"
// Relative error macro (see errorTest in BulletTestUtils.h)
#define QCOMPARE_WITH_RELATIVE_ERROR(actual, expected, relativeError) \

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,53 @@
//
// ShapeColliderTests.h
// tests/physics/src
//
// Created by Andrew Meadows on 02/21/2014.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_ShapeColliderTests_h
#define hifi_ShapeColliderTests_h
#include <QtTest/QtTest>
class ShapeColliderTests : public QObject {
Q_OBJECT
private slots:
void initTestCase();
void sphereMissesSphere();
void sphereTouchesSphere();
void sphereMissesCapsule();
void sphereTouchesCapsule();
void capsuleMissesCapsule();
void capsuleTouchesCapsule();
void sphereTouchesAACubeFaces();
void sphereTouchesAACubeEdges();
void sphereTouchesAACubeCorners();
void sphereMissesAACube();
void capsuleMissesAACube();
void capsuleTouchesAACube();
void rayHitsSphere();
void rayBarelyHitsSphere();
void rayBarelyMissesSphere();
void rayHitsCapsule();
void rayMissesCapsule();
void rayHitsPlane();
void rayMissesPlane();
void rayHitsAACube();
void rayMissesAACube();
void measureTimeOfCollisionDispatch();
};
#endif // hifi_ShapeColliderTests_h

View file

@ -8,30 +8,35 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <gpu/GPUConfig.h>
#include <QWindow>
#include <QFile>
#include <QTime>
#include <QImage>
#include <QTimer>
#include <QElapsedTimer>
#include <QOpenGLContext>
#include <QOpenGLBuffer>
#include <QOpenGLShaderProgram>
#include <QResizeEvent>
#include <QLoggingCategory>
#include <QOpenGLTexture>
#include <QOpenGLVertexArrayObject>
#include <QApplication>
#include <QOpenGLDebugLogger>
#include <unordered_map>
#include <memory>
#include <glm/glm.hpp>
#include <PathUtils.h>
#include <QApplication>
#include <QDir>
#include <QElapsedTimer>
#include <QFile>
#include <QImage>
#include <QLoggingCategory>
#include <gpu/Context.h>
#include <gpu/GLBackend.h>
#include <QOpenGLBuffer>
#include <QOpenGLContext>
#include <QOpenGLDebugLogger>
#include <QOpenGLShaderProgram>
#include <QOpenGLTexture>
#include <QOpenGLVertexArrayObject>
#include <QResizeEvent>
#include <QTime>
#include <QTimer>
#include <QWindow>
#include <PathUtils.h>
class RateCounter {
std::vector<float> times;
@ -119,6 +124,10 @@ public:
show();
makeCurrent();
gpu::Context::init<gpu::GLBackend>();
{
QOpenGLDebugLogger* logger = new QOpenGLDebugLogger(this);
logger->initialize(); // initializes in the current context, i.e. ctx
@ -130,23 +139,6 @@ public:
}
qDebug() << (const char*)glGetString(GL_VERSION);
#ifdef WIN32
glewExperimental = true;
GLenum err = glewInit();
if (GLEW_OK != err) {
/* Problem: glewInit failed, something is seriously wrong. */
const GLubyte * errStr = glewGetErrorString(err);
qDebug("Error: %s\n", errStr);
}
qDebug("Status: Using GLEW %s\n", glewGetString(GLEW_VERSION));
if (wglewGetExtension("WGL_EXT_swap_control")) {
int swapInterval = wglGetSwapIntervalEXT();
qDebug("V-Sync is %s\n", (swapInterval > 0 ? "ON" : "OFF"));
}
glGetError();
#endif
//_textRenderer[0] = TextRenderer::getInstance(SANS_FONT_FAMILY, 12, false);
//_textRenderer[1] = TextRenderer::getInstance(SERIF_FONT_FAMILY, 12, false,
// TextRenderer::SHADOW_EFFECT);

View file

@ -16,6 +16,7 @@
#include <StreamUtils.h>
#include "AngularConstraintTests.h"
#include "../QTestExtensions.h"
// Computes the error value between two quaternions (using glm::dot)
float getErrorDifference(const glm::quat& a, const glm::quat& b) {

View file

@ -12,6 +12,7 @@
#ifndef hifi_AngularConstraintTests_h
#define hifi_AngularConstraintTests_h
#include <glm/glm.hpp>
#include <QtTest/QtTest>
class AngularConstraintTests : public QObject {
@ -21,10 +22,6 @@ private slots:
void testConeRollerConstraint();
};
// Use QCOMPARE_WITH_ABS_ERROR and define it for glm::quat
#include <glm/glm.hpp>
float getErrorDifference (const glm::quat& a, const glm::quat& b);
QTextStream & operator << (QTextStream& stream, const glm::quat& q);
#include "../QTestExtensions.h"
float getErrorDifference(const glm::quat& a, const glm::quat& b);
#endif // hifi_AngularConstraintTests_h

View file

@ -16,6 +16,7 @@
#include <limits>
#include <qqueue.h>
#include <../QTestExtensions.h>
QTEST_MAIN(MovingPercentileTests)

View file

@ -13,7 +13,6 @@
#define hifi_MovingPercentileTests_h
#include <QtTest/QtTest>
#include <../QTestExtensions.h>
class MovingPercentileTests : public QObject {
Q_OBJECT

Some files were not shown because too many files have changed in this diff Show more