Merge branch 'master' of https://github.com/highfidelity/hifi into rig

This commit is contained in:
Howard Stearns 2015-07-24 14:08:51 -07:00
commit 1663d095a1
44 changed files with 518 additions and 229 deletions

View file

@ -35,8 +35,32 @@
function onRowClicked(e) {
var id = this.dataset.entityId;
var selection = [this.dataset.entityId];
if (e.shiftKey) {
if (e.ctrlKey) {
selection = selection.concat(selectedEntities);
} else if (e.shiftKey && selectedEntities.length > 0) {
var previousItemFound = -1;
var clickedItemFound = -1;
for (var i in entityList.visibleItems) {
if (clickedItemFound === -1 && this.dataset.entityId == entityList.visibleItems[i].values().id) {
clickedItemFound = i;
} else if(previousItemFound === -1 && selectedEntities[0] == entityList.visibleItems[i].values().id) {
previousItemFound = i;
}
}
if (previousItemFound !== -1 && clickedItemFound !== -1) {
var betweenItems = [];
var toItem = Math.max(previousItemFound, clickedItemFound);
// skip first and last item in this loop, we add them to selection after the loop
for (var i = (Math.min(previousItemFound, clickedItemFound) + 1); i < toItem; i++) {
entityList.visibleItems[i].elm.className = 'selected';
betweenItems.push(entityList.visibleItems[i].values().id);
}
if (previousItemFound > clickedItemFound) {
// always make sure that we add the items in the right order
betweenItems.reverse();
}
selection = selection.concat(betweenItems, selectedEntities);
}
}
selectedEntities = selection;
@ -151,6 +175,17 @@
refreshEntities();
}
document.addEventListener("keydown", function (e) {
if (e.target.nodeName === "INPUT") {
return;
}
var keyCode = e.keyCode;
if (keyCode === 46) {
EventBridge.emitWebEvent(JSON.stringify({ type: 'delete' }));
refreshEntities();
}
}, false);
if (window.EventBridge !== undefined) {
EventBridge.scriptEventReceived.connect(function(data) {
data = JSON.parse(data);

View file

@ -875,6 +875,30 @@ void Application::paintGL() {
PerformanceWarning warn(showWarnings, "Application::paintGL()");
resizeGL();
// Before anything else, let's sync up the gpuContext with the true glcontext used in case anything happened
renderArgs._context->syncCache();
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
renderRearViewMirror(&renderArgs, _mirrorViewRect);
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
{
float ratio = ((float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale());
auto mirrorViewport = glm::ivec4(0, 0, _mirrorViewRect.width() * ratio, _mirrorViewRect.height() * ratio);
auto mirrorViewportDest = mirrorViewport;
auto selfieFbo = DependencyManager::get<FramebufferCache>()->getSelfieFramebuffer();
gpu::Batch batch;
batch.setFramebuffer(selfieFbo);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
batch.blit(primaryFbo, mirrorViewport, selfieFbo, mirrorViewportDest);
batch.setFramebuffer(nullptr);
renderArgs._context->render(batch);
}
}
{
PerformanceTimer perfTimer("renderOverlay");
@ -892,6 +916,9 @@ void Application::paintGL() {
Application::getInstance()->cameraMenuChanged();
}
// The render mode is default or mirror if the camera is in mirror mode, assigned further below
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
// Always use the default eye position, not the actual head eye position.
// Using the latter will cause the camera to wobble with idle animations,
@ -928,6 +955,7 @@ void Application::paintGL() {
glm::vec3(0, _raiseMirror * _myAvatar->getScale(), 0) +
(_myAvatar->getOrientation() * glm::quat(glm::vec3(0.0f, _rotateMirror, 0.0f))) *
glm::vec3(0.0f, 0.0f, -1.0f) * MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
}
// Update camera position
@ -935,12 +963,6 @@ void Application::paintGL() {
_myCamera.update(1.0f / _fps);
}
// Sync up the View Furstum with the camera
loadViewFrustum(_myCamera, _viewFrustum);
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
if (OculusManager::isConnected()) {
//When in mirror mode, use camera rotation. Otherwise, use body rotation
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
@ -952,47 +974,28 @@ void Application::paintGL() {
TV3DManager::display(&renderArgs, _myCamera);
} else {
PROFILE_RANGE(__FUNCTION__ "/mainRender");
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
{
gpu::Batch batch;
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
batch.setFramebuffer(primaryFbo);
// clear the normal and specular buffers
batch.clearFramebuffer(
gpu::Framebuffer::BUFFER_COLOR0 |
gpu::Framebuffer::BUFFER_COLOR1 |
gpu::Framebuffer::BUFFER_COLOR2 |
gpu::Framebuffer::BUFFER_DEPTH,
vec4(vec3(0), 1), 1.0, 0.0);
// Viewport is assigned to the size of the framebuffer
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
batch.setViewportTransform(renderArgs._viewport);
renderArgs._context->render(batch);
}
displaySide(&renderArgs, _myCamera);
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderArgs._renderMode = RenderArgs::MIRROR_RENDER_MODE;
renderRearViewMirror(&renderArgs, _mirrorViewRect);
renderArgs._renderMode = RenderArgs::NORMAL_RENDER_MODE;
}
{
auto geometryCache = DependencyManager::get<GeometryCache>();
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
gpu::Batch batch;
batch.blit(primaryFbo, glm::ivec4(0, 0, _renderResolution.x, _renderResolution.y),
nullptr, glm::ivec4(0, 0, _glWidget->getDeviceSize().width(), _glWidget->getDeviceSize().height()));
batch.setFramebuffer(nullptr);
renderArgs._context->render(batch);
}
_compositor.displayOverlayTexture(&renderArgs);
}
if (!OculusManager::isConnected() || OculusManager::allowSwap()) {
PROFILE_RANGE(__FUNCTION__ "/bufferSwap");
_glWidget->swapBuffers();
@ -1002,6 +1005,7 @@ void Application::paintGL() {
OculusManager::endFrameTiming();
}
_frameCount++;
_numFramesSinceLastResize++;
Stats::getInstance()->setRenderDetails(renderArgs._details);
}
@ -1055,6 +1059,7 @@ void Application::resizeGL() {
}
if (_renderResolution != toGlm(renderSize)) {
_numFramesSinceLastResize = 0;
_renderResolution = toGlm(renderSize);
DependencyManager::get<FramebufferCache>()->setFrameBufferSize(renderSize);
@ -1068,7 +1073,6 @@ void Application::resizeGL() {
auto canvasSize = _glWidget->size();
offscreenUi->resize(canvasSize);
_glWidget->makeCurrent();
}
bool Application::importSVOFromURL(const QString& urlString) {
@ -1798,13 +1802,6 @@ void Application::idle() {
}
double timeSinceLastUpdate = (double)_lastTimeUpdated.nsecsElapsed() / 1000000.0;
if (timeSinceLastUpdate > targetFramePeriod) {
{
static const int IDLE_EVENT_PROCESS_MAX_TIME_MS = 2;
PerformanceTimer perfTimer("processEvents");
processEvents(QEventLoop::AllEvents, IDLE_EVENT_PROCESS_MAX_TIME_MS);
}
_lastTimeUpdated.start();
{
PerformanceTimer perfTimer("update");
@ -1836,12 +1833,8 @@ void Application::idle() {
// Once rendering is off on another thread we should be able to have Application::idle run at start(0) in
// perpetuity and not expect events to get backed up.
static const int IDLE_TIMER_DELAY_MS = 0;
int desiredInterval = _glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : IDLE_TIMER_DELAY_MS;
if (idleTimer->interval() != desiredInterval) {
idleTimer->start(desiredInterval);
}
static const int IDLE_TIMER_DELAY_MS = 2;
idleTimer->start(_glWidget->isThrottleRendering() ? THROTTLED_IDLE_TIMER_DELAY : IDLE_TIMER_DELAY_MS);
}
// check for any requested background downloads.
@ -2318,23 +2311,21 @@ void Application::updateMyAvatarLookAtPosition() {
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
}
}
// Deflect the eyes a bit to match the detected gaze from Faceshift if active.
// DDE doesn't track eyes.
if (tracker && typeid(*tracker) == typeid(Faceshift) && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
float deflection = DependencyManager::get<Faceshift>()->getEyeDeflection();
if (isLookingAtSomeone) {
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
}
lookAtSpot = origin + _myCamera.getRotation() * glm::quat(glm::radians(glm::vec3(
eyePitch * pitchSign * deflection, eyeYaw * deflection, 0.0f))) *
// Deflect the eyes a bit to match the detected gaze from the face tracker if active.
if (tracker && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
float deflection = tracker->getEyeDeflection();
if (isLookingAtSomeone) {
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
}
lookAtSpot = origin + _myCamera.getRotation() * glm::quat(glm::radians(glm::vec3(
eyePitch * deflection, eyeYaw * deflection, 0.0f))) *
glm::inverse(_myCamera.getRotation()) * (lookAtSpot - origin);
}
}
_myAvatar->getHead()->setLookAtPosition(lookAtSpot);
@ -2495,10 +2486,18 @@ void Application::update(float deltaTime) {
_myAvatar->setDriveKeys(DOWN, _userInputMapper.getActionState(UserInputMapper::VERTICAL_DOWN));
_myAvatar->setDriveKeys(LEFT, _userInputMapper.getActionState(UserInputMapper::LATERAL_LEFT));
_myAvatar->setDriveKeys(RIGHT, _userInputMapper.getActionState(UserInputMapper::LATERAL_RIGHT));
_myAvatar->setDriveKeys(ROT_UP, _userInputMapper.getActionState(UserInputMapper::PITCH_UP));
_myAvatar->setDriveKeys(ROT_DOWN, _userInputMapper.getActionState(UserInputMapper::PITCH_DOWN));
_myAvatar->setDriveKeys(ROT_LEFT, _userInputMapper.getActionState(UserInputMapper::YAW_LEFT));
_myAvatar->setDriveKeys(ROT_RIGHT, _userInputMapper.getActionState(UserInputMapper::YAW_RIGHT));
if (deltaTime > FLT_EPSILON) {
// For rotations what we really want are meausures of "angles per second" (in order to prevent
// fps-dependent spin rates) so we need to scale the units of the controller contribution.
// (TODO?: maybe we should similarly scale ALL action state info, or change the expected behavior
// controllers to provide a delta_per_second value rather than a raw delta.)
const float EXPECTED_FRAME_RATE = 60.0f;
float timeFactor = EXPECTED_FRAME_RATE * deltaTime;
_myAvatar->setDriveKeys(ROT_UP, _userInputMapper.getActionState(UserInputMapper::PITCH_UP) / timeFactor);
_myAvatar->setDriveKeys(ROT_DOWN, _userInputMapper.getActionState(UserInputMapper::PITCH_DOWN) / timeFactor);
_myAvatar->setDriveKeys(ROT_LEFT, _userInputMapper.getActionState(UserInputMapper::YAW_LEFT) / timeFactor);
_myAvatar->setDriveKeys(ROT_RIGHT, _userInputMapper.getActionState(UserInputMapper::YAW_RIGHT) / timeFactor);
}
}
_myAvatar->setDriveKeys(BOOM_IN, _userInputMapper.getActionState(UserInputMapper::BOOM_IN));
_myAvatar->setDriveKeys(BOOM_OUT, _userInputMapper.getActionState(UserInputMapper::BOOM_OUT));
@ -2963,35 +2962,25 @@ PickRay Application::computePickRay(float x, float y) const {
if (isHMDMode()) {
getApplicationCompositor().computeHmdPickRay(glm::vec2(x, y), result.origin, result.direction);
} else {
if (QThread::currentThread() == activeRenderingThread) {
getDisplayViewFrustum()->computePickRay(x, y, result.origin, result.direction);
} else {
getViewFrustum()->computePickRay(x, y, result.origin, result.direction);
}
getViewFrustum()->computePickRay(x, y, result.origin, result.direction);
}
return result;
}
QImage Application::renderAvatarBillboard(RenderArgs* renderArgs) {
auto primaryFramebuffer = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
glBindFramebuffer(GL_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(primaryFramebuffer));
// clear the alpha channel so the background is transparent
glColorMask(GL_FALSE, GL_FALSE, GL_FALSE, GL_TRUE);
glClearColor(0.0, 0.0, 0.0, 0.0);
glClear(GL_COLOR_BUFFER_BIT);
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_FALSE);
const int BILLBOARD_SIZE = 64;
// TODO: Pass a RenderArgs to renderAvatarBillboard
renderRearViewMirror(renderArgs, QRect(0, _glWidget->getDeviceHeight() - BILLBOARD_SIZE,
BILLBOARD_SIZE, BILLBOARD_SIZE),
true);
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
glReadPixels(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE, GL_BGRA, GL_UNSIGNED_BYTE, image.bits());
glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glBindFramebuffer(GL_FRAMEBUFFER, 0);
// Need to make sure the gl context is current here
_glWidget->makeCurrent();
renderArgs->_renderMode = RenderArgs::DEFAULT_RENDER_MODE;
renderRearViewMirror(renderArgs, QRect(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE), true);
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
QImage image(BILLBOARD_SIZE, BILLBOARD_SIZE, QImage::Format_ARGB32);
renderArgs->_context->downloadFramebuffer(primaryFbo, glm::ivec4(0, 0, BILLBOARD_SIZE, BILLBOARD_SIZE), image);
return image;
}
@ -3295,10 +3284,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
sceneInterface->setEngineFeedOverlay3DItems(engineRC->_numFeedOverlay3DItems);
sceneInterface->setEngineDrawnOverlay3DItems(engineRC->_numDrawnOverlay3DItems);
}
//Render the sixense lasers
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
_myAvatar->renderLaserPointers(*renderArgs->_batch);
}
if (!selfAvatarOnly) {
// give external parties a change to hook in
@ -3405,35 +3390,23 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
gpu::Vec4i viewport;
if (billboard) {
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
viewport = gpu::Vec4i(region.x(), size.height() - region.y() - region.height(), region.width(), region.height());
viewport = gpu::Vec4i(0, 0, region.width(), region.height());
} else {
// if not rendering the billboard, the region is in device independent coordinates; must convert to device
QSize size = DependencyManager::get<FramebufferCache>()->getFrameBufferSize();
float ratio = (float)QApplication::desktop()->windowHandle()->devicePixelRatio() * getRenderResolutionScale();
int x = region.x() * ratio, y = region.y() * ratio, width = region.width() * ratio, height = region.height() * ratio;
viewport = gpu::Vec4i(x, size.height() - y - height, width, height);
int x = region.x() * ratio;
int y = region.y() * ratio;
int width = region.width() * ratio;
int height = region.height() * ratio;
viewport = gpu::Vec4i(0, 0, width, height);
}
renderArgs->_viewport = viewport;
{
gpu::Batch batch;
batch.setViewportTransform(viewport);
batch.setStateScissorRect(viewport);
batch.clearFramebuffer(
gpu::Framebuffer::BUFFER_COLOR0 |
gpu::Framebuffer::BUFFER_COLOR1 |
gpu::Framebuffer::BUFFER_COLOR2 |
gpu::Framebuffer::BUFFER_DEPTH,
vec4(vec3(0), 1), 1.0, 0.0, true);
// Viewport is assigned to the size of the framebuffer
renderArgs->_context->render(batch);
}
// render rear mirror view
displaySide(renderArgs, _mirrorCamera, true, billboard);
renderArgs->_viewport = originalViewport;
}
void Application::resetSensors() {

View file

@ -328,6 +328,8 @@ public:
gpu::ContextPointer getGPUContext() const { return _gpuContext; }
const QRect& getMirrorViewRect() const { return _mirrorViewRect; }
signals:
/// Fired when we're simulating; allows external parties to hook in.
@ -640,6 +642,7 @@ private:
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
ApplicationCompositor _compositor;
int _numFramesSinceLastResize = 0;
};
#endif // hifi_Application_h

View file

@ -478,7 +478,6 @@ Menu::Menu() {
qApp,
SLOT(setLowVelocityFilter(bool)));
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseMouseInput, 0, true);
addCheckableActionToQMenuAndActionHash(sixenseOptionsMenu, MenuOption::SixenseLasers, 0, false);
MenuWrapper* leapOptionsMenu = handOptionsMenu->addMenu("Leap Motion");
addCheckableActionToQMenuAndActionHash(leapOptionsMenu, MenuOption::LeapMotionOnHMD, 0, false);

View file

@ -272,7 +272,6 @@ namespace MenuOption {
const QString SimpleShadows = "Simple";
const QString SixenseEnabled = "Enable Hydra Support";
const QString SixenseMouseInput = "Enable Sixense Mouse Input";
const QString SixenseLasers = "Enable Sixense UI Lasers";
const QString ShiftHipsForIdleAnimations = "Shift hips for idle animations";
const QString Stars = "Stars";
const QString Stats = "Stats";

View file

@ -75,8 +75,8 @@ void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentSta
glm::translate(_rig->getJointDefaultTranslationInConstrainedFrame(index)) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getCorrectedLookAtPosition() +
_owningHead->getSaccade() - model->getTranslation(), 1.0f));
glm::vec3 lookAtDelta = _owningHead->getCorrectedLookAtPosition() - model->getTranslation();
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(lookAtDelta + glm::length(lookAtDelta) * _owningHead->getSaccade(), 1.0f));
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
_rig->setJointRotationInConstrainedFrame(index, glm::angleAxis(glm::clamp(glm::angle(between),

View file

@ -129,13 +129,14 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * 0.5f;
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
@ -342,7 +343,8 @@ glm::quat Head::getCameraOrientation() const {
glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
glm::quat orientation = getOrientation();
return rotationBetween(orientation * IDENTITY_FRONT, _lookAtPosition + _saccade - eyePosition) * orientation;
glm::vec3 lookAtDelta = _lookAtPosition - eyePosition;
return rotationBetween(orientation * IDENTITY_FRONT, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
}
glm::vec3 Head::getScalePivot() const {

View file

@ -1531,6 +1531,9 @@ void MyAvatar::maybeUpdateBillboard() {
QBuffer buffer(&_billboard);
buffer.open(QIODevice::WriteOnly);
image.save(&buffer, "PNG");
#ifdef DEBUG
image.save("billboard.png", "PNG");
#endif
_billboardValid = true;
sendBillboardPacket();

View file

@ -157,6 +157,10 @@ DdeFaceTracker::DdeFaceTracker(const QHostAddress& host, quint16 serverPort, qui
_reset(false),
_leftBlinkIndex(0), // see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
_rightBlinkIndex(1),
_leftEyeDownIndex(4),
_rightEyeDownIndex(5),
_leftEyeInIndex(6),
_rightEyeInIndex(7),
_leftEyeOpenIndex(8),
_rightEyeOpenIndex(9),
_browDownLeftIndex(14),
@ -173,6 +177,14 @@ DdeFaceTracker::DdeFaceTracker(const QHostAddress& host, quint16 serverPort, qui
_filteredHeadTranslation(glm::vec3(0.0f)),
_lastBrowUp(0.0f),
_filteredBrowUp(0.0f),
_eyePitch(0.0f),
_eyeYaw(0.0f),
_lastEyePitch(0.0f),
_lastEyeYaw(0.0f),
_filteredEyePitch(0.0f),
_filteredEyeYaw(0.0f),
_longTermAverageEyePitch(0.0f),
_longTermAverageEyeYaw(0.0f),
_lastEyeBlinks(),
_filteredEyeBlinks(),
_lastEyeCoefficients(),
@ -282,6 +294,17 @@ void DdeFaceTracker::reset() {
}
}
void DdeFaceTracker::update(float deltaTime) {
if (!isActive()) {
return;
}
FaceTracker::update(deltaTime);
glm::vec3 headEulers = glm::degrees(glm::eulerAngles(_headRotation));
_estimatedEyePitch = _eyePitch - headEulers.x;
_estimatedEyeYaw = _eyeYaw - headEulers.y;
}
bool DdeFaceTracker::isActive() const {
return (_ddeProcess != NULL);
}
@ -436,6 +459,28 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
_coefficients[_mouthSmileLeftIndex] = _coefficients[_mouthSmileLeftIndex] - SMILE_THRESHOLD;
_coefficients[_mouthSmileRightIndex] = _coefficients[_mouthSmileRightIndex] - SMILE_THRESHOLD;
// Eye pitch and yaw
// EyeDown coefficients work better over both +ve and -ve values than EyeUp values.
// EyeIn coefficients work better over both +ve and -ve values than EyeOut values.
// Pitch and yaw values are relative to the screen.
const float EYE_PITCH_SCALE = -1500.0f; // Sign, scale, and average to be similar to Faceshift values.
_eyePitch = EYE_PITCH_SCALE * (_coefficients[_leftEyeDownIndex] + _coefficients[_rightEyeDownIndex]);
const float EYE_YAW_SCALE = 2000.0f; // Scale and average to be similar to Faceshift values.
_eyeYaw = EYE_YAW_SCALE * (_coefficients[_leftEyeInIndex] + _coefficients[_rightEyeInIndex]);
if (isFiltering) {
const float EYE_VELOCITY_FILTER_STRENGTH = 0.005f;
float pitchVelocity = fabsf(_eyePitch - _lastEyePitch) / _averageMessageTime;
float pitchVelocityFilter = glm::clamp(pitchVelocity * EYE_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredEyePitch = pitchVelocityFilter * _eyePitch + (1.0f - pitchVelocityFilter) * _filteredEyePitch;
_lastEyePitch = _eyePitch;
_eyePitch = _filteredEyePitch;
float yawVelocity = fabsf(_eyeYaw - _lastEyeYaw) / _averageMessageTime;
float yawVelocityFilter = glm::clamp(yawVelocity * EYE_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
_filteredEyeYaw = yawVelocityFilter * _eyeYaw + (1.0f - yawVelocityFilter) * _filteredEyeYaw;
_lastEyeYaw = _eyeYaw;
_eyeYaw = _filteredEyeYaw;
}
// Velocity filter EyeBlink values
const float DDE_EYEBLINK_SCALE = 3.0f;
float eyeBlinks[] = { DDE_EYEBLINK_SCALE * _coefficients[_leftBlinkIndex],

View file

@ -31,6 +31,7 @@ class DdeFaceTracker : public FaceTracker, public Dependency {
public:
virtual void init();
virtual void reset();
virtual void update(float deltaTime);
virtual bool isActive() const;
virtual bool isTracking() const;
@ -93,6 +94,11 @@ private:
int _leftEyeOpenIndex;
int _rightEyeOpenIndex;
int _leftEyeDownIndex;
int _rightEyeDownIndex;
int _leftEyeInIndex;
int _rightEyeInIndex;
int _browDownLeftIndex;
int _browDownRightIndex;
int _browUpCenterIndex;
@ -115,6 +121,16 @@ private:
float _lastBrowUp;
float _filteredBrowUp;
float _eyePitch; // Degrees, relative to screen
float _eyeYaw;
float _lastEyePitch;
float _lastEyeYaw;
float _filteredEyePitch;
float _filteredEyeYaw;
float _longTermAverageEyePitch = 0.0f;
float _longTermAverageEyeYaw = 0.0f;
bool _longTermAverageInitialized = false;
enum EyeState {
EYE_UNCONTROLLED,
EYE_OPEN,

View file

@ -20,6 +20,9 @@
const int FPS_TIMER_DELAY = 2000; // ms
const int FPS_TIMER_DURATION = 2000; // ms
const float DEFAULT_EYE_DEFLECTION = 0.25f;
Setting::Handle<float> FaceTracker::_eyeDeflection("faceshiftEyeDeflection", DEFAULT_EYE_DEFLECTION);
void FaceTracker::init() {
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
_isInitialized = true; // FaceTracker can be used now
@ -106,3 +109,7 @@ void FaceTracker::toggleMute() {
_isMuted = !_isMuted;
emit muteToggled();
}
void FaceTracker::setEyeDeflection(float eyeDeflection) {
_eyeDeflection.set(eyeDeflection);
}

View file

@ -18,6 +18,8 @@
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <SettingHandle.h>
/// Base class for face trackers (Faceshift, DDE).
class FaceTracker : public QObject {
Q_OBJECT
@ -47,6 +49,9 @@ public:
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
void toggleMute();
static float getEyeDeflection() { return _eyeDeflection.get(); }
static void setEyeDeflection(float eyeDeflection);
signals:
void muteToggled();
@ -77,6 +82,8 @@ private slots:
private:
bool _isCalculatingFPS = false;
int _frameCount = 0;
static Setting::Handle<float> _eyeDeflection;
};
#endif // hifi_FaceTracker_h

View file

@ -28,10 +28,8 @@ using namespace std;
const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
const quint16 FACESHIFT_PORT = 33433;
const float DEFAULT_FACESHIFT_EYE_DEFLECTION = 0.25f;
Faceshift::Faceshift() :
_eyeDeflection("faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION),
_hostname("faceshiftHostname", DEFAULT_FACESHIFT_HOSTNAME)
{
#ifdef HAVE_FACESHIFT
@ -306,10 +304,6 @@ void Faceshift::receive(const QByteArray& buffer) {
FaceTracker::countFrame();
}
void Faceshift::setEyeDeflection(float faceshiftEyeDeflection) {
_eyeDeflection.set(faceshiftEyeDeflection);
}
void Faceshift::setHostname(const QString& hostname) {
_hostname.set(hostname);
}

View file

@ -68,9 +68,6 @@ public:
float getMouthSmileLeft() const { return getBlendshapeCoefficient(_mouthSmileLeftIndex); }
float getMouthSmileRight() const { return getBlendshapeCoefficient(_mouthSmileRightIndex); }
float getEyeDeflection() { return _eyeDeflection.get(); }
void setEyeDeflection(float faceshiftEyeDeflection);
QString getHostname() { return _hostname.get(); }
void setHostname(const QString& hostname);
@ -134,7 +131,6 @@ private:
float _longTermAverageEyeYaw = 0.0f;
bool _longTermAverageInitialized = false;
Setting::Handle<float> _eyeDeflection;
Setting::Handle<QString> _hostname;
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes

View file

@ -520,8 +520,7 @@ void SixenseManager::emulateMouse(PalmData* palm, int index) {
triggerButton = Qt::LeftButton;
}
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)
|| Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode)) {
pos = qApp->getApplicationCompositor().getPalmClickLocation(palm);
} else {
// Get directon relative to avatar orientation

View file

@ -142,3 +142,7 @@ QScriptValue WebWindowClass::constructor(QScriptContext* context, QScriptEngine*
return engine->newQObject(retVal);
}
void WebWindowClass::setTitle(const QString& title) {
_windowWidget->setWindowTitle(title);
}

View file

@ -48,6 +48,7 @@ public slots:
void raise();
ScriptEventBridge* getEventBridge() const { return _eventBridge; }
void addEventBridgeToWindowObject();
void setTitle(const QString& title);
signals:
void closed();

View file

@ -491,24 +491,18 @@ void ApplicationCompositor::renderControllerPointers(gpu::Batch& batch) {
auto canvasSize = qApp->getCanvasSize();
int mouseX, mouseY;
if (Menu::getInstance()->isOptionChecked(MenuOption::SixenseLasers)) {
QPoint res = getPalmClickLocation(palmData);
mouseX = res.x();
mouseY = res.y();
} else {
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get directon relative to avatar orientation
glm::vec3 direction = glm::inverse(myAvatar->getOrientation()) * palmData->getFingerDirection();
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + PI_OVER_TWO);
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
// Get the angles, scaled between (-0.5,0.5)
float xAngle = (atan2(direction.z, direction.x) + PI_OVER_TWO);
float yAngle = 0.5f - ((atan2f(direction.z, direction.y) + (float)PI_OVER_TWO));
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
// Get the pixel range over which the xAngle and yAngle are scaled
float cursorRange = canvasSize.x * SixenseManager::getInstance().getCursorPixelRangeMult();
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
}
mouseX = (canvasSize.x / 2.0f + cursorRange * xAngle);
mouseY = (canvasSize.y / 2.0f + cursorRange * yAngle);
//If the cursor is out of the screen then don't render it
if (mouseX < 0 || mouseX >= (int)canvasSize.x || mouseY < 0 || mouseY >= (int)canvasSize.y) {

View file

@ -16,6 +16,7 @@
#include <GLMHelpers.h>
#include <gpu/GLBackend.h>
#include <gpu/GLBackendShared.h>
#include <FramebufferCache.h>
#include <GLMHelpers.h>
#include <OffscreenUi.h>
#include <CursorManager.h>
@ -93,6 +94,7 @@ void ApplicationOverlay::renderOverlay(RenderArgs* renderArgs) {
// Now render the overlay components together into a single texture
renderDomainConnectionStatusBorder(renderArgs); // renders the connected domain line
renderAudioScope(renderArgs); // audio scope in the very back
renderRearView(renderArgs); // renders the mirror view selfie
renderQmlUi(renderArgs); // renders a unit quad with the QML UI texture, and the text overlays from scripts
renderOverlays(renderArgs); // renders Scripts Overlay and AudioScope
renderStatsAndLogs(renderArgs); // currently renders nothing
@ -167,6 +169,39 @@ void ApplicationOverlay::renderRearViewToFbo(RenderArgs* renderArgs) {
}
void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
gpu::Batch& batch = *renderArgs->_batch;
auto geometryCache = DependencyManager::get<GeometryCache>();
auto framebuffer = DependencyManager::get<FramebufferCache>();
auto selfieTexture = framebuffer->getSelfieFramebuffer()->getRenderBuffer(0);
int width = renderArgs->_viewport.z;
int height = renderArgs->_viewport.w;
mat4 legacyProjection = glm::ortho<float>(0, width, height, 0, ORTHO_NEAR_CLIP, ORTHO_FAR_CLIP);
batch.setProjectionTransform(legacyProjection);
batch.setModelTransform(Transform());
batch.setViewTransform(Transform());
float screenRatio = ((float)qApp->getDevicePixelRatio());
float renderRatio = ((float)screenRatio * qApp->getRenderResolutionScale());
auto viewport = qApp->getMirrorViewRect();
glm::vec2 bottomLeft(viewport.left(), viewport.top() + viewport.height());
glm::vec2 topRight(viewport.left() + viewport.width(), viewport.top());
bottomLeft *= screenRatio;
topRight *= screenRatio;
glm::vec2 texCoordMinCorner(0.0f, 0.0f);
glm::vec2 texCoordMaxCorner(viewport.width() * renderRatio / float(selfieTexture->getWidth()), viewport.height() * renderRatio / float(selfieTexture->getHeight()));
geometryCache->useSimpleDrawPipeline(batch, true);
batch.setResourceTexture(0, selfieTexture);
geometryCache->renderQuad(batch, bottomLeft, topRight, texCoordMinCorner, texCoordMaxCorner, glm::vec4(1.0f, 1.0f, 1.0f, 1.0f));
batch.setResourceTexture(0, renderArgs->_whiteTexture);
geometryCache->useSimpleDrawPipeline(batch, false);
}
}
void ApplicationOverlay::renderStatsAndLogs(RenderArgs* renderArgs) {

View file

@ -142,10 +142,10 @@ void PreferencesDialog::loadPreferences() {
ui.ddeEyeClosingThresholdSlider->setValue(dde->getEyeClosingThreshold() *
ui.ddeEyeClosingThresholdSlider->maximum());
auto faceshift = DependencyManager::get<Faceshift>();
ui.faceshiftEyeDeflectionSider->setValue(faceshift->getEyeDeflection() *
ui.faceshiftEyeDeflectionSider->maximum());
ui.faceTrackerEyeDeflectionSider->setValue(FaceTracker::getEyeDeflection() *
ui.faceTrackerEyeDeflectionSider->maximum());
auto faceshift = DependencyManager::get<Faceshift>();
ui.faceshiftHostnameEdit->setText(faceshift->getHostname());
auto audio = DependencyManager::get<AudioClient>();
@ -233,10 +233,10 @@ void PreferencesDialog::savePreferences() {
dde->setEyeClosingThreshold(ui.ddeEyeClosingThresholdSlider->value() /
(float)ui.ddeEyeClosingThresholdSlider->maximum());
auto faceshift = DependencyManager::get<Faceshift>();
faceshift->setEyeDeflection(ui.faceshiftEyeDeflectionSider->value() /
(float)ui.faceshiftEyeDeflectionSider->maximum());
FaceTracker::setEyeDeflection(ui.faceTrackerEyeDeflectionSider->value() /
(float)ui.faceTrackerEyeDeflectionSider->maximum());
auto faceshift = DependencyManager::get<Faceshift>();
faceshift->setHostname(ui.faceshiftHostnameEdit->text());
qApp->setMaxOctreePacketsPerSecond(ui.maxOctreePPSSpin->value());

View file

@ -1468,13 +1468,13 @@
</font>
</property>
<property name="text">
<string>Faceshift eye deflection</string>
<string>Face tracker eye deflection</string>
</property>
<property name="indent">
<number>0</number>
</property>
<property name="buddy">
<cstring>faceshiftEyeDeflectionSider</cstring>
<cstring>faceTrackerEyeDeflectionSider</cstring>
</property>
</widget>
</item>
@ -1497,7 +1497,7 @@
</spacer>
</item>
<item>
<widget class="QSlider" name="faceshiftEyeDeflectionSider">
<widget class="QSlider" name="faceTrackerEyeDeflectionSider">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>

View file

@ -1115,11 +1115,15 @@ void AvatarData::sendIdentityPacket() {
void AvatarData::sendBillboardPacket() {
if (!_billboard.isEmpty()) {
auto nodeList = DependencyManager::get<NodeList>();
auto billboardPacket = NLPacket::create(PacketType::AvatarBillboard, _billboard.size());
billboardPacket->write(_billboard);
nodeList->broadcastToNodes(std::move(billboardPacket), NodeSet() << NodeType::AvatarMixer);
// This makes sure the billboard won't be too large to send.
// Once more protocol changes are done and we can send blocks of data we can support sending > MTU sized billboards.
if (_billboard.size() <= NLPacket::maxPayloadSize(PacketType::AvatarBillboard)) {
auto billboardPacket = NLPacket::create(PacketType::AvatarBillboard, _billboard.size());
billboardPacket->write(_billboard);
nodeList->broadcastToNodes(std::move(billboardPacket), NodeSet() << NodeType::AvatarMixer);
}
}
}

View file

@ -220,10 +220,7 @@ void Batch::setStateBlendFactor(const Vec4& factor) {
void Batch::setStateScissorRect(const Vec4i& rect) {
ADD_COMMAND(setStateScissorRect);
_params.push_back(rect.x);
_params.push_back(rect.y);
_params.push_back(rect.z);
_params.push_back(rect.w);
_params.push_back(cacheData(sizeof(Vec4i), &rect));
}
void Batch::setUniformBuffer(uint32 slot, const BufferPointer& buffer, Offset offset, Offset size) {

View file

@ -108,7 +108,6 @@ public:
void blit(const FramebufferPointer& src, const Vec4i& srcViewport,
const FramebufferPointer& dst, const Vec4i& dstViewport);
// Query Section
void beginQuery(const QueryPointer& query);
void endQuery(const QueryPointer& query);

View file

@ -40,4 +40,8 @@ void Context::render(Batch& batch) {
void Context::syncCache() {
PROFILE_RANGE(__FUNCTION__);
_backend->syncCache();
}
}
void Context::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
_backend->downloadFramebuffer(srcFramebuffer, region, destImage);
}

View file

@ -20,6 +20,8 @@
#include "Pipeline.h"
#include "Framebuffer.h"
class QImage;
namespace gpu {
class Backend {
@ -28,6 +30,8 @@ public:
virtual~ Backend() {};
virtual void render(Batch& batch) = 0;
virtual void syncCache() = 0;
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) = 0;
class TransformObject {
public:
@ -121,6 +125,10 @@ public:
void syncCache();
// Downloading the Framebuffer is a synchronous action that is not efficient.
// It s here for convenience to easily capture a snapshot
void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage);
protected:
Context(const Context& context);

View file

@ -0,0 +1,22 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// Draw texture 0 fetched at texcoord.xy
// Alpha is 1
//
// Created by Sam Gateau on 6/22/2015
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D colorMap;
varying vec2 varTexcoord;
void main(void) {
gl_FragColor = vec4(texture2D(colorMap, varTexcoord).xyz, 1.0);
}

View file

@ -14,8 +14,6 @@
#include "Texture.h"
#include <memory>
class QImage;
namespace gpu {
typedef Element Format;
@ -134,8 +132,6 @@ public:
static const uint32 MAX_NUM_RENDER_BUFFERS = 8;
static uint32 getMaxNumRenderBuffers() { return MAX_NUM_RENDER_BUFFERS; }
void getImage(QImage* result) const;
protected:
SwapchainPointer _swapchain;

View file

@ -192,6 +192,7 @@ void GLBackend::syncCache() {
syncTransformStateCache();
syncPipelineStateCache();
syncInputStateCache();
syncOutputStateCache();
glEnable(GL_LINE_SMOOTH);
}
@ -281,6 +282,9 @@ void GLBackend::do_clearFramebuffer(Batch& batch, uint32 paramOffset) {
glClearColor(color.x, color.y, color.z, color.w);
glmask |= GL_COLOR_BUFFER_BIT;
}
// Force the color mask cache to WRITE_ALL if not the case
do_setStateColorWriteMask(State::ColorMask::WRITE_ALL);
}
// Apply scissor if needed and if not already on

View file

@ -38,6 +38,10 @@ public:
// Let's try to avoid to do that as much as possible!
virtual void syncCache();
// This is the ugly "download the pixels to sysmem for taking a snapshot"
// Just avoid using it, it's ugly and will break performances
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage);
static bool checkGLError(const char* name = nullptr);
// Only checks in debug builds
@ -383,11 +387,14 @@ protected:
void do_setFramebuffer(Batch& batch, uint32 paramOffset);
void do_blit(Batch& batch, uint32 paramOffset);
// Synchronize the state cache of this Backend with the actual real state of the GL Context
void syncOutputStateCache();
struct OutputStageState {
FramebufferPointer _framebuffer = nullptr;
GLuint _drawFBO = 0;
OutputStageState() {}
} _output;

View file

@ -8,9 +8,12 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qimage.h>
#include "GPULogging.h"
#include "GLBackendShared.h"
using namespace gpu;
GLBackend::GLFramebuffer::GLFramebuffer() {}
@ -34,6 +37,9 @@ GLBackend::GLFramebuffer* GLBackend::syncGPUObject(const Framebuffer& framebuffe
// need to have a gpu object?
if (!object) {
GLint currentFBO;
glGetIntegerv(GL_DRAW_FRAMEBUFFER_BINDING, &currentFBO);
GLuint fbo;
glGenFramebuffers(1, &fbo);
(void) CHECK_GL_ERROR();
@ -84,6 +90,8 @@ GLBackend::GLFramebuffer* GLBackend::syncGPUObject(const Framebuffer& framebuffe
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, renderBuffer);
(void) CHECK_GL_ERROR();
}
// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT | GL_STENCIL_BUFFER_BIT);
#endif
@ -139,6 +147,9 @@ GLBackend::GLFramebuffer* GLBackend::syncGPUObject(const Framebuffer& framebuffe
object->_fbo = fbo;
object->_colorBuffers = colorBuffers;
Backend::setGPUObject(framebuffer, object);
// restore the current framebuffer
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, currentFBO);
}
return object;
@ -158,11 +169,24 @@ GLuint GLBackend::getFramebufferID(const FramebufferPointer& framebuffer) {
}
}
void GLBackend::syncOutputStateCache() {
GLint currentFBO;
glGetIntegerv(GL_DRAW_FRAMEBUFFER_BINDING, &currentFBO);
_output._drawFBO = currentFBO;
_output._framebuffer.reset();
}
void GLBackend::do_setFramebuffer(Batch& batch, uint32 paramOffset) {
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
if (_output._framebuffer != framebuffer) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, getFramebufferID(framebuffer));
auto newFBO = getFramebufferID(framebuffer);
if (_output._drawFBO != newFBO) {
_output._drawFBO = newFBO;
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, newFBO);
}
_output._framebuffer = framebuffer;
}
}
@ -184,4 +208,38 @@ void GLBackend::do_blit(Batch& batch, uint32 paramOffset) {
glBlitFramebuffer(srcvp.x, srcvp.y, srcvp.z, srcvp.w,
dstvp.x, dstvp.y, dstvp.z, dstvp.w,
GL_COLOR_BUFFER_BIT, GL_LINEAR);
(void) CHECK_GL_ERROR();
if (_output._framebuffer) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, getFramebufferID(_output._framebuffer));
}
}
void GLBackend::downloadFramebuffer(const FramebufferPointer& srcFramebuffer, const Vec4i& region, QImage& destImage) {
auto readFBO = gpu::GLBackend::getFramebufferID(srcFramebuffer);
if (srcFramebuffer && readFBO) {
if ((srcFramebuffer->getWidth() < (region.x + region.z)) || (srcFramebuffer->getHeight() < (region.y + region.w))) {
qCDebug(gpulogging) << "GLBackend::downloadFramebuffer : srcFramebuffer is too small to provide the region queried";
return;
}
}
if ((destImage.width() < region.z) || (destImage.height() < region.w)) {
qCDebug(gpulogging) << "GLBackend::downloadFramebuffer : destImage is too small to receive the region of the framebuffer";
return;
}
GLenum format = GL_BGRA;
if (destImage.format() != QImage::Format_ARGB32) {
qCDebug(gpulogging) << "GLBackend::downloadFramebuffer : destImage format must be FORMAT_ARGB32 to receive the region of the framebuffer";
return;
}
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(srcFramebuffer));
glReadPixels(region.x, region.y, region.z, region.w, format, GL_UNSIGNED_BYTE, destImage.bits());
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
(void) CHECK_GL_ERROR();
}

View file

@ -757,11 +757,8 @@ void GLBackend::do_setStateBlendFactor(Batch& batch, uint32 paramOffset) {
}
void GLBackend::do_setStateScissorRect(Batch& batch, uint32 paramOffset) {
Vec4 rect(batch._params[paramOffset + 0]._float,
batch._params[paramOffset + 1]._float,
batch._params[paramOffset + 2]._float,
batch._params[paramOffset + 3]._float);
Vec4i rect;
memcpy(&rect, batch.editData(batch._params[paramOffset]._uint), sizeof(Vec4i));
glScissor(rect.x, rect.y, rect.z, rect.w);
(void) CHECK_GL_ERROR();

View file

@ -16,6 +16,7 @@
#include "DrawTexcoordRectTransformUnitQuad_vert.h"
#include "DrawViewportQuadTransformTexcoord_vert.h"
#include "DrawTexture_frag.h"
#include "DrawTextureOpaque_frag.h"
#include "DrawColoredTexture_frag.h"
using namespace gpu;
@ -24,6 +25,7 @@ ShaderPointer StandardShaderLib::_drawTransformUnitQuadVS;
ShaderPointer StandardShaderLib::_drawTexcoordRectTransformUnitQuadVS;
ShaderPointer StandardShaderLib::_drawViewportQuadTransformTexcoordVS;
ShaderPointer StandardShaderLib::_drawTexturePS;
ShaderPointer StandardShaderLib::_drawTextureOpaquePS;
ShaderPointer StandardShaderLib::_drawColoredTexturePS;
StandardShaderLib::ProgramMap StandardShaderLib::_programs;
@ -82,6 +84,15 @@ ShaderPointer StandardShaderLib::getDrawTexturePS() {
return _drawTexturePS;
}
ShaderPointer StandardShaderLib::getDrawTextureOpaquePS() {
if (!_drawTextureOpaquePS) {
_drawTextureOpaquePS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(DrawTextureOpaque_frag)));
}
return _drawTextureOpaquePS;
}
ShaderPointer StandardShaderLib::getDrawColoredTexturePS() {
if (!_drawColoredTexturePS) {
_drawColoredTexturePS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(DrawColoredTexture_frag)));

View file

@ -35,6 +35,7 @@ public:
static ShaderPointer getDrawViewportQuadTransformTexcoordVS();
static ShaderPointer getDrawTexturePS();
static ShaderPointer getDrawTextureOpaquePS();
static ShaderPointer getDrawColoredTexturePS();
// The shader program combining the shaders available above, so they are unique
@ -47,6 +48,7 @@ protected:
static ShaderPointer _drawTexcoordRectTransformUnitQuadVS;
static ShaderPointer _drawViewportQuadTransformTexcoordVS;
static ShaderPointer _drawTexturePS;
static ShaderPointer _drawTextureOpaquePS;
static ShaderPointer _drawColoredTexturePS;
typedef std::map<std::pair<GetShader, GetShader>, ShaderPointer> ProgramMap;

View file

@ -110,7 +110,7 @@ void Skybox::render(gpu::Batch& batch, const ViewFrustum& viewFrustum, const Sky
} else {
// skybox has no cubemap, just clear the color buffer
auto color = skybox.getColor();
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(color, 0.0f), 0.0f, 0);
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(color, 0.0f), 0.0f, 0, true);
}
}

View file

@ -280,7 +280,7 @@ void PacketReceiver::processDatagrams() {
auto it = _packetListenerMap.find(packet->getType());
if (it != _packetListenerMap.end()) {
if (it != _packetListenerMap.end() && it->second.isValid()) {
auto listener = it.value();
@ -367,10 +367,12 @@ void PacketReceiver::processDatagrams() {
}
} else {
qWarning() << "No listener found for packet type " << nameForPacketType(packet->getType());
// insert a dummy listener so we don't print this again
_packetListenerMap.insert(packet->getType(), { nullptr, QMetaMethod() });
if (it == _packetListenerMap.end()) {
qWarning() << "No listener found for packet type " << nameForPacketType(packet->getType());
// insert a dummy listener so we don't print this again
_packetListenerMap.insert(packet->getType(), { nullptr, QMetaMethod() });
}
}
_packetListenerLock.unlock();

View file

@ -220,13 +220,17 @@ void DeferredLightingEffect::addSpotLight(const glm::vec3& position, float radiu
void DeferredLightingEffect::prepare(RenderArgs* args) {
gpu::Batch batch;
// clear the normal and specular buffers
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR1, glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
const float MAX_SPECULAR_EXPONENT = 128.0f;
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR2, glm::vec4(0.0f, 0.0f, 0.0f, 1.0f / MAX_SPECULAR_EXPONENT));
args->_context->syncCache();
batch.setStateScissorRect(args->_viewport);
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebuffer();
batch.setFramebuffer(primaryFbo);
// clear the normal and specular buffers
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR1, glm::vec4(0.0f, 0.0f, 0.0f, 0.0f), true);
const float MAX_SPECULAR_EXPONENT = 128.0f;
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR2, glm::vec4(0.0f, 0.0f, 0.0f, 1.0f / MAX_SPECULAR_EXPONENT), true);
args->_context->render(batch);
}
@ -245,8 +249,9 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setFramebuffer(_copyFBO);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
batch.clearColorFramebuffer(_copyFBO->getBufferMask(), glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
batch.clearColorFramebuffer(_copyFBO->getBufferMask(), glm::vec4(0.0f, 0.0f, 0.0f, 0.0f), true);
batch.setResourceTexture(0, framebufferCache->getPrimaryColorTexture());
@ -533,7 +538,6 @@ void DeferredLightingEffect::render(RenderArgs* args) {
batch.setResourceTexture(2, nullptr);
batch.setResourceTexture(3, nullptr);
args->_context->syncCache();
args->_context->render(batch);
// End of the Lighting pass
@ -546,7 +550,8 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
QSize framebufferSize = framebufferCache->getFrameBufferSize();
// TODO why doesn't this blit work? It only seems to affect a small area below the rear view mirror.
auto destFbo = framebufferCache->getPrimaryFramebuffer();
// auto destFbo = framebufferCache->getPrimaryFramebuffer();
auto destFbo = framebufferCache->getPrimaryFramebufferDepthColor();
// gpu::Vec4i vp = args->_viewport;
// batch.blit(_copyFBO, vp, framebufferCache->getPrimaryFramebuffer(), vp);
batch.setFramebuffer(destFbo);
@ -565,11 +570,6 @@ void DeferredLightingEffect::copyBack(RenderArgs* args) {
batch.setModelTransform(model);
}
GLenum buffers[3];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
batch._glDrawBuffers(bufferCount, buffers);
batch.setResourceTexture(0, _copyFBO->getRenderBuffer(0));
batch.draw(gpu::TRIANGLE_STRIP, 4);

View file

@ -34,17 +34,20 @@ void FramebufferCache::setFrameBufferSize(QSize frameBufferSize) {
//If the size changed, we need to delete our FBOs
if (_frameBufferSize != frameBufferSize) {
_frameBufferSize = frameBufferSize;
_primaryFramebuffer.reset();
_primaryFramebufferFull.reset();
_primaryFramebufferDepthColor.reset();
_primaryDepthTexture.reset();
_primaryColorTexture.reset();
_primaryNormalTexture.reset();
_primarySpecularTexture.reset();
_selfieFramebuffer.reset();
_cachedFramebuffers.clear();
}
}
void FramebufferCache::createPrimaryFramebuffer() {
_primaryFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
_primaryFramebufferFull = gpu::FramebufferPointer(gpu::Framebuffer::create());
_primaryFramebufferDepthColor = gpu::FramebufferPointer(gpu::Framebuffer::create());
auto colorFormat = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
auto width = _frameBufferSize.width();
@ -55,24 +58,37 @@ void FramebufferCache::createPrimaryFramebuffer() {
_primaryNormalTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
_primarySpecularTexture = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width, height, defaultSampler));
_primaryFramebuffer->setRenderBuffer(0, _primaryColorTexture);
_primaryFramebuffer->setRenderBuffer(1, _primaryNormalTexture);
_primaryFramebuffer->setRenderBuffer(2, _primarySpecularTexture);
_primaryFramebufferFull->setRenderBuffer(0, _primaryColorTexture);
_primaryFramebufferFull->setRenderBuffer(1, _primaryNormalTexture);
_primaryFramebufferFull->setRenderBuffer(2, _primarySpecularTexture);
_primaryFramebufferDepthColor->setRenderBuffer(0, _primaryColorTexture);
auto depthFormat = gpu::Element(gpu::SCALAR, gpu::FLOAT, gpu::DEPTH);
_primaryDepthTexture = gpu::TexturePointer(gpu::Texture::create2D(depthFormat, width, height, defaultSampler));
_primaryFramebuffer->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
_primaryFramebufferFull->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
_primaryFramebufferDepthColor->setDepthStencilBuffer(_primaryDepthTexture, depthFormat);
_selfieFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
auto tex = gpu::TexturePointer(gpu::Texture::create2D(colorFormat, width * 0.5, height * 0.5, defaultSampler));
_selfieFramebuffer->setRenderBuffer(0, tex);
}
gpu::FramebufferPointer FramebufferCache::getPrimaryFramebuffer() {
if (!_primaryFramebuffer) {
if (!_primaryFramebufferFull) {
createPrimaryFramebuffer();
}
return _primaryFramebuffer;
return _primaryFramebufferFull;
}
gpu::FramebufferPointer FramebufferCache::getPrimaryFramebufferDepthColor() {
if (!_primaryFramebufferDepthColor) {
createPrimaryFramebuffer();
}
return _primaryFramebufferDepthColor;
}
gpu::TexturePointer FramebufferCache::getPrimaryDepthTexture() {
@ -112,7 +128,6 @@ gpu::FramebufferPointer FramebufferCache::getFramebuffer() {
return result;
}
void FramebufferCache::releaseFramebuffer(const gpu::FramebufferPointer& framebuffer) {
if (QSize(framebuffer->getSize().x, framebuffer->getSize().y) == _frameBufferSize) {
_cachedFramebuffers.push_back(framebuffer);
@ -126,3 +141,10 @@ gpu::FramebufferPointer FramebufferCache::getShadowFramebuffer() {
}
return _shadowFramebuffer;
}
gpu::FramebufferPointer FramebufferCache::getSelfieFramebuffer() {
if (!_selfieFramebuffer) {
createPrimaryFramebuffer();
}
return _selfieFramebuffer;
}

View file

@ -30,6 +30,7 @@ public:
/// Returns a pointer to the primary framebuffer object. This render target includes a depth component, and is
/// used for scene rendering.
gpu::FramebufferPointer getPrimaryFramebuffer();
gpu::FramebufferPointer getPrimaryFramebufferDepthColor();
gpu::TexturePointer getPrimaryDepthTexture();
gpu::TexturePointer getPrimaryColorTexture();
@ -39,8 +40,12 @@ public:
/// Returns the framebuffer object used to render shadow maps;
gpu::FramebufferPointer getShadowFramebuffer();
/// Returns the framebuffer object used to render selfie maps;
gpu::FramebufferPointer getSelfieFramebuffer();
/// Returns a free framebuffer with a single color attachment for temp or intra-frame operations
gpu::FramebufferPointer getFramebuffer();
// TODO add sync functionality to the release, so we don't reuse a framebuffer being read from
/// Releases a free framebuffer back for reuse
void releaseFramebuffer(const gpu::FramebufferPointer& framebuffer);
@ -51,13 +56,17 @@ private:
void createPrimaryFramebuffer();
gpu::FramebufferPointer _primaryFramebuffer;
gpu::FramebufferPointer _primaryFramebufferFull;
gpu::FramebufferPointer _primaryFramebufferDepthColor;
gpu::TexturePointer _primaryDepthTexture;
gpu::TexturePointer _primaryColorTexture;
gpu::TexturePointer _primaryNormalTexture;
gpu::TexturePointer _primarySpecularTexture;
gpu::FramebufferPointer _shadowFramebuffer;
gpu::FramebufferPointer _selfieFramebuffer;
QSize _frameBufferSize{ 100, 100 };
};

View file

@ -29,6 +29,8 @@
#include "standardTransformPNTC_vert.h"
#include "standardDrawTexture_frag.h"
#include "gpu/StandardShaderLib.h"
//#define WANT_DEBUG
const int GeometryCache::UNKNOWN_ID = -1;
@ -1651,7 +1653,7 @@ QSharedPointer<Resource> GeometryCache::createResource(const QUrl& url, const QS
return geometry.staticCast<Resource>();
}
void GeometryCache::useSimpleDrawPipeline(gpu::Batch& batch) {
void GeometryCache::useSimpleDrawPipeline(gpu::Batch& batch, bool noBlend) {
if (!_standardDrawPipeline) {
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(standardTransformPNTC_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(standardDrawTexture_frag)));
@ -1660,12 +1662,24 @@ void GeometryCache::useSimpleDrawPipeline(gpu::Batch& batch) {
auto state = std::make_shared<gpu::State>();
// enable decal blend
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_standardDrawPipeline.reset(gpu::Pipeline::create(program, state));
auto stateNoBlend = std::make_shared<gpu::State>();
auto noBlendPS = gpu::StandardShaderLib::getDrawTextureOpaquePS();
auto programNoBlend = gpu::ShaderPointer(gpu::Shader::createProgram(vs, noBlendPS));
gpu::Shader::makeProgram((*programNoBlend));
_standardDrawPipelineNoBlend.reset(gpu::Pipeline::create(programNoBlend, stateNoBlend));
}
if (noBlend) {
batch.setPipeline(_standardDrawPipelineNoBlend);
} else {
batch.setPipeline(_standardDrawPipeline);
}
batch.setPipeline(_standardDrawPipeline);
}
const float NetworkGeometry::NO_HYSTERESIS = -1.0f;

View file

@ -206,7 +206,7 @@ public:
QSharedPointer<NetworkGeometry> getGeometry(const QUrl& url, const QUrl& fallback = QUrl(), bool delayLoad = false);
/// Set a batch to the simple pipeline, returning the previous pipeline
void useSimpleDrawPipeline(gpu::Batch& batch);
void useSimpleDrawPipeline(gpu::Batch& batch, bool noBlend = false);
protected:
@ -221,6 +221,7 @@ private:
typedef QPair<unsigned int, unsigned int> VerticesIndices;
gpu::PipelinePointer _standardDrawPipeline;
gpu::PipelinePointer _standardDrawPipelineNoBlend;
QHash<float, gpu::BufferPointer> _cubeVerticies;
QHash<Vec2Pair, gpu::BufferPointer> _cubeColors;
gpu::BufferPointer _wireCubeIndexBuffer;

View file

@ -17,6 +17,7 @@
#include <RenderArgs.h>
#include <ViewFrustum.h>
#include "FramebufferCache.h"
#include "DeferredLightingEffect.h"
#include "TextureCache.h"
@ -27,6 +28,26 @@
using namespace render;
void SetupDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
RenderArgs* args = renderContext->args;
auto primaryFbo = DependencyManager::get<FramebufferCache>()->getPrimaryFramebufferDepthColor();
gpu::Batch batch;
batch.setFramebuffer(nullptr);
batch.setFramebuffer(primaryFbo);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
batch.clearFramebuffer(
gpu::Framebuffer::BUFFER_COLOR0 |
gpu::Framebuffer::BUFFER_DEPTH,
vec4(vec3(0), 1), 1.0, 0.0, true);
args->_context->render(batch);
}
void PrepareDeferred::run(const SceneContextPointer& sceneContext, const RenderContextPointer& renderContext) {
DependencyManager::get<DeferredLightingEffect>()->prepare(renderContext->args);
}
@ -41,6 +62,7 @@ void ResolveDeferred::run(const SceneContextPointer& sceneContext, const RenderC
}
RenderDeferredTask::RenderDeferredTask() : Task() {
_jobs.push_back(Job(new SetupDeferred::JobModel("SetupFramebuffer")));
_jobs.push_back(Job(new DrawBackground::JobModel("DrawBackground")));
_jobs.push_back(Job(new PrepareDeferred::JobModel("PrepareDeferred")));
@ -56,7 +78,6 @@ RenderDeferredTask::RenderDeferredTask() : Task() {
auto& renderedOpaques = _jobs.back().getOutput();
_jobs.push_back(Job(new DrawOpaqueDeferred::JobModel("DrawOpaqueDeferred", _jobs.back().getOutput())));
_jobs.push_back(Job(new DrawLight::JobModel("DrawLight")));
_jobs.push_back(Job(new ResetGLState::JobModel()));
_jobs.push_back(Job(new RenderDeferred::JobModel("RenderDeferred")));
_jobs.push_back(Job(new ResolveDeferred::JobModel("ResolveDeferred")));
_jobs.push_back(Job(new FetchItems::JobModel("FetchTransparent",
@ -133,21 +154,12 @@ void DrawOpaqueDeferred::run(const SceneContextPointer& sceneContext, const Rend
batch.setViewTransform(viewMat);
{
GLenum buffers[3];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT1;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT2;
batch._glDrawBuffers(bufferCount, buffers);
const float OPAQUE_ALPHA_THRESHOLD = 0.5f;
args->_alphaThreshold = OPAQUE_ALPHA_THRESHOLD;
}
renderItems(sceneContext, renderContext, inItems, renderContext->_maxDrawnOpaqueItems);
// Before rendering the batch make sure we re in sync with gl state
args->_context->syncCache();
renderContext->args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
}
@ -171,21 +183,15 @@ void DrawTransparentDeferred::run(const SceneContextPointer& sceneContext, const
}
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
const float TRANSPARENT_ALPHA_THRESHOLD = 0.0f;
{
GLenum buffers[3];
int bufferCount = 0;
buffers[bufferCount++] = GL_COLOR_ATTACHMENT0;
batch._glDrawBuffers(bufferCount, buffers);
const float TRANSPARENT_ALPHA_THRESHOLD = 0.0f;
args->_alphaThreshold = TRANSPARENT_ALPHA_THRESHOLD;
}
renderItems(sceneContext, renderContext, inItems, renderContext->_maxDrawnTransparentItems);
// Before rendering the batch make sure we re in sync with gl state
args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
}
@ -239,17 +245,17 @@ void DrawOverlay3D::run(const SceneContextPointer& sceneContext, const RenderCon
}
batch.setProjectionTransform(projMat);
batch.setViewTransform(viewMat);
batch.setViewportTransform(args->_viewport);
batch.setStateScissorRect(args->_viewport);
batch.setPipeline(getOpaquePipeline());
batch.setResourceTexture(0, args->_whiteTexture);
if (!inItems.empty()) {
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_DEPTH, glm::vec4(), 1.f, 0);
batch.clearFramebuffer(gpu::Framebuffer::BUFFER_DEPTH, glm::vec4(), 1.f, 0, true);
renderItems(sceneContext, renderContext, inItems, renderContext->_maxDrawnOverlay3DItems);
}
// Before rendering the batch make sure we re in sync with gl state
args->_context->syncCache();
args->_context->render((*args->_batch));
args->_batch = nullptr;
args->_whiteTexture.reset();

View file

@ -16,6 +16,13 @@
#include "gpu/Pipeline.h"
class SetupDeferred {
public:
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);
typedef render::Job::Model<SetupDeferred> JobModel;
};
class PrepareDeferred {
public:
void run(const render::SceneContextPointer& sceneContext, const render::RenderContextPointer& renderContext);

View file

@ -120,6 +120,13 @@ void vhacd::VHACDUtil::fattenMeshes(const FBXMesh& mesh, FBXMesh& result,
glm::vec3 p2 = result.vertices[index2];
glm::vec3 av = (p0 + p1 + p2) / 3.0f; // center of the triangular face
glm::vec3 normal = glm::normalize(glm::cross(p1 - p0, p2 - p0));
float threshold = 1.0f / sqrtf(3.0f);
if (normal.y > -threshold && normal.y < threshold) {
// this triangle is more a wall than a floor, skip it.
continue;
}
float dropAmount = 0;
dropAmount = glm::max(glm::length(p1 - p0), dropAmount);
dropAmount = glm::max(glm::length(p2 - p1), dropAmount);