Merge branch 'master' of https://github.com/worklist/hifi into occlusion_culling

This commit is contained in:
ZappoMan 2013-07-10 16:28:51 -07:00
commit 4afda27ee0
28 changed files with 2235 additions and 452 deletions

View file

@ -0,0 +1,44 @@
# Find the OpenNI library
#
# You must provide an OPENNI_ROOT_DIR which contains lib and include directories
#
# Once done this will define
#
# OPENNI_FOUND - system found OpenNI
# OPENNI_INCLUDE_DIRS - the OpenNI include directory
# OPENNI_LIBRARIES - Link this to use OpenNI
#
# Created on 6/28/2013 by Andrzej Kapolka
# Copyright (c) 2013 High Fidelity
#
if (OPENNI_LIBRARIES AND OPENNI_INCLUDE_DIRS)
# in cache already
set(OPENNI_FOUND TRUE)
else (OPENNI_LIBRARIES AND OPENNI_INCLUDE_DIRS)
find_path(OPENNI_INCLUDE_DIRS XnOpenNI.h /usr/include/ni)
if (APPLE)
find_library(OPENNI_LIBRARIES libOpenNI.dylib /usr/lib)
elseif (UNIX)
find_library(OPENNI_LIBRARIES libOpenNI.so /usr/lib)
endif ()
if (OPENNI_INCLUDE_DIRS AND OPENNI_LIBRARIES)
set(OPENNI_FOUND TRUE)
endif (OPENNI_INCLUDE_DIRS AND OPENNI_LIBRARIES)
if (OPENNI_FOUND)
if (NOT OPENNI_FIND_QUIETLY)
message(STATUS "Found OpenNI: ${OPENNI_LIBRARIES}")
endif (NOT OPENNI_FIND_QUIETLY)
else (OPENNI_FOUND)
if (OPENNI_FIND_REQUIRED)
message(FATAL_ERROR "Could not find OpenNI")
endif (OPENNI_FIND_REQUIRED)
endif (OPENNI_FOUND)
# show the OPENNI_INCLUDE_DIRS and OPENNI_LIBRARIES variables only in the advanced view
mark_as_advanced(OPENNI_INCLUDE_DIRS OPENNI_LIBRARIES)
endif (OPENNI_LIBRARIES AND OPENNI_INCLUDE_DIRS)

View file

@ -65,7 +65,7 @@ if (APPLE)
endif (APPLE)
find_package(Qt4 REQUIRED QtCore QtGui QtNetwork QtOpenGL QtWebKit)
find_package(Qt4 REQUIRED QtCore QtGui QtNetwork QtOpenGL QtWebKit QtSvg)
include(${QT_USE_FILE})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${QT_QTGUI_INCLUDE_DIR}")
@ -95,6 +95,15 @@ find_package(Leap)
find_package(OpenCV)
find_package(ZLIB)
find_package(UVCCameraControl)
find_package(OpenNI)
# let the source know that we have OpenNI/NITE for Kinect
if (OPENNI_FOUND)
add_definitions(-DHAVE_OPENNI)
include_directories(SYSTEM ${OPENNI_INCLUDE_DIRS})
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${OPENNI_INCLUDE_DIRS}")
target_link_libraries(${TARGET_NAME} ${OPENNI_LIBRARIES})
endif (OPENNI_FOUND)
# include headers for interface and InterfaceConfig.
include_directories(

View file

@ -0,0 +1,177 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 17.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="124px" height="400px" viewBox="-0.5 0.5 124 400" enable-background="new -0.5 0.5 124 400" xml:space="preserve">
<g>
<g>
<rect x="-0.5" y="120.382" width="62" height="40"/>
</g>
<g>
<g>
<path fill="#999999" d="M13.77,149.864c0.296,0.296,0.665,0.102,0.665,0.102s0.527-0.615,1.101-0.328
c0.811,0.405,1.315-0.113,1.315-0.113l10.611-10.611l-1.419-1.419l-1.161-1.161l-10.61,10.61c0,0-0.575,0.449-0.169,1.261
c0.287,0.573-0.384,1.045-0.384,1.045S13.474,149.569,13.77,149.864"/>
</g>
<g>
<path fill="#CCCCCC" d="M25.913,135.042l2.173-2.173c0,0,1.898-1.905,3.256-0.547c0.68,0.68,1.083,1.627-0.546,3.257
c-1.549,1.548-2.173,2.172-2.173,2.172L25.913,135.042z"/>
</g>
<g>
<rect x="22.984" y="135.651" transform="matrix(-0.7071 -0.7071 0.7071 -0.7071 -52.0828 253.0143)" fill="#CCCCCC" width="6.751" height="3.285"/>
</g>
</g>
<g>
<path fill="#FFFFFF" d="M46.482,138.915h3.697v4.617c-1.004,0.344-2.107,0.516-3.311,0.516c-1.32,0-2.341-0.383-3.062-1.148
s-1.081-1.857-1.081-3.275c0-1.383,0.395-2.459,1.184-3.229s1.895-1.154,3.316-1.154c0.539,0,1.048,0.051,1.526,0.152
s0.896,0.23,1.251,0.387l-0.732,1.816c-0.617-0.305-1.295-0.457-2.033-0.457c-0.676,0-1.198,0.22-1.567,0.659
s-0.554,1.067-0.554,1.884c0,0.801,0.167,1.411,0.501,1.831s0.815,0.63,1.444,0.63c0.344,0,0.66-0.033,0.949-0.1v-1.342h-1.529
L46.482,138.915L46.482,138.915z"/>
</g>
</g>
<g>
<g>
<rect x="-0.5" y="80.5" width="62" height="40"/>
</g>
<g>
<path fill="#FFFFFF" d="M43.84,96.434h2.988c1.164,0,2.028,0.173,2.593,0.519s0.847,0.884,0.847,1.614
c0,0.48-0.123,0.891-0.369,1.23s-0.57,0.559-0.973,0.656v0.059c0.531,0.141,0.916,0.375,1.154,0.703s0.357,0.754,0.357,1.277
c0,0.777-0.292,1.39-0.876,1.837S48.182,105,47.174,105H43.84V96.434z M46.154,99.721h0.697c0.332,0,0.589-0.068,0.771-0.205
s0.272-0.34,0.272-0.609c0-0.48-0.363-0.721-1.09-0.721h-0.65C46.154,98.186,46.154,99.721,46.154,99.721z M46.154,101.414v1.799
h0.814c0.723,0,1.084-0.305,1.084-0.914c0-0.285-0.097-0.504-0.29-0.656s-0.474-0.229-0.841-0.229
C46.921,101.414,46.154,101.414,46.154,101.414z"/>
</g>
<g>
<path fill="#FFFFFF" d="M18,106.06c-3,1-3,3-7,3c2,1,8,4,9,0C20.47,107.181,18,106.06,18,106.06z"/>
<path fill="#CCCCCC" d="M19,105.06l2,2c0,0,3.952-4.712,7-9c3.048-4.287,7.32-10.785,3-7C28.399,93.338,22.048,100.772,19,105.06z
"/>
</g>
</g>
<g>
<g>
<rect x="-0.5" y="40.5" width="62" height="40"/>
</g>
<g>
<g>
<g>
<polygon fill="#A5A5A5" points="26,58.63 39,53.893 39,67.213 26,72.394 "/>
</g>
<g>
<g>
<path fill="#333333" d="M38,54.5v12.59l-12,5.032V58.757l12.204-4.468 M39,53.63l-13,4.873v14.162l13-5.33V53.63L39,53.63z"/>
</g>
</g>
</g>
<g>
<g>
<polygon fill="#BFBFBF" points="13,67.213 13,53.893 25,58.63 25,72.394 "/>
</g>
<g>
<path fill="#BFBFBF" d="M12.887,54.289L25,58.757v13.365L13,67.09V54.5 M13,53.63v13.705l13,5.33V58.503L13,53.63L13,53.63z"/>
</g>
</g>
<g>
<g>
<polygon fill="#FFFFFF" points="13.173,53.63 25.746,48.952 38.318,53.63 25.746,58.309 "/>
</g>
<g>
<path fill="#333333" d="M25.746,49.146l12.049,4.483l-12.049,4.483L13.697,53.63L25.746,49.146 M25.746,48.758L12.65,53.63
l13.096,4.873l13.096-4.873L25.746,48.758L25.746,48.758z"/>
</g>
</g>
</g>
<g>
<path fill="#FFFFFF" d="M46.119,62.383V65.5h-2.314v-8.566h2.807c2.328,0,3.492,0.844,3.492,2.531c0,0.992-0.484,1.76-1.453,2.303
l2.495,3.732h-2.625l-1.816-3.117H46.119z M46.119,60.643h0.434c0.809,0,1.213-0.357,1.213-1.072c0-0.59-0.396-0.885-1.189-0.885
H46.12L46.119,60.643L46.119,60.643z"/>
</g>
<g>
<polygon fill="#333333" points="7,59.5 9,59.5 10,59.5 15,59.5 16,59.5 18,59.5 18,64.5 16,64.5 15,64.5 10,64.5 9,64.5 7,64.5
"/>
<g>
<g>
<rect x="8" y="60.5" fill="#FFFFFF" width="9" height="3"/>
</g>
</g>
</g>
</g>
<g>
<rect x="-0.5" y="0.5" width="62" height="40"/>
</g>
<g>
<g>
<g>
<polygon fill="#A5A5A5" points="26,18.63 39,13.893 39,27.213 26,32.394 "/>
</g>
<g>
<g>
<path fill="#333333" d="M38,14.5v12.59l-12,5.032V18.757l12.204-4.468 M39,13.63l-13,4.873v14.162l13-5.33V13.63L39,13.63z"/>
</g>
</g>
</g>
<g>
<g>
<polygon fill="#BFBFBF" points="13,27.213 13,13.893 25,18.63 25,32.394 "/>
</g>
<g>
<path fill="#BFBFBF" d="M12.887,14.289L25,18.757v13.365L13,27.09V14.5 M13,13.63v13.705l13,5.33V18.503L13,13.63L13,13.63z"/>
</g>
</g>
<g>
<g>
<polygon fill="#FFFFFF" points="13.173,13.63 25.746,8.952 38.318,13.63 25.746,18.309 "/>
</g>
<g>
<path fill="#333333" d="M25.746,9.146l12.049,4.483l-12.049,4.483L13.697,13.63L25.746,9.146 M25.746,8.758L12.65,13.63
l13.096,4.873l13.096-4.873L25.746,8.758L25.746,8.758z"/>
</g>
</g>
</g>
<g>
<path fill="#FFFFFF" d="M48.41,16.934H51L48.193,25.5h-2.725l-2.795-8.566h2.602l1.166,4.342c0.242,0.965,0.375,1.637,0.398,2.016
c0.027-0.273,0.082-0.615,0.164-1.025s0.154-0.732,0.217-0.967L48.41,16.934z"/>
</g>
<g>
<polygon fill="#333333" points="7,19.5 10,19.5 10,16.5 15,16.5 15,19.5 18,19.5 18,24.5 15,24.5 15,27.5 10,27.5 10,24.5 7,24.5
"/>
<g>
<g>
<rect x="11" y="17.5" fill="#FFFFFF" width="3" height="9"/>
</g>
<g>
<rect x="8" y="20.5" fill="#FFFFFF" width="9" height="3"/>
</g>
</g>
</g>
<g>
<g>
<rect x="-0.5" y="160.5" width="62" height="40"/>
</g>
<g>
<path fill="#FFFFFF" d="M50.379,179.205c0,1.441-0.354,2.537-1.061,3.287s-1.742,1.125-3.105,1.125
c-1.344,0-2.374-0.377-3.091-1.131s-1.075-1.852-1.075-3.293c0-1.426,0.356-2.515,1.069-3.267s1.749-1.128,3.108-1.128
c1.363,0,2.396,0.373,3.1,1.119S50.379,177.76,50.379,179.205z M44.484,179.205c0,1.656,0.576,2.484,1.729,2.484
c0.586,0,1.021-0.201,1.304-0.604s0.425-1.029,0.425-1.881c0-0.855-0.144-1.487-0.431-1.896s-0.716-0.612-1.286-0.612
C45.064,176.697,44.484,177.533,44.484,179.205z"/>
</g>
<g>
<g>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" points="19.944,187.389 19.944,188.389 19.031,187.981 "/>
<line fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="1.9973,1.9973" x1="17.208" y1="187.166" x2="10.825" y2="184.315"/>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" points="9.913,183.908 9,183.5 9,182.5 "/>
<line fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="2,2" x1="8.5" y1="180.5" x2="8.5" y2="173.5"/>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" points="9,172.5 9,171.5 9.927,171.876 "/>
<line fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="1.9625,1.9625" x1="11.745" y1="172.615" x2="18.109" y2="175.199"/>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" points="19.018,175.568 19.944,175.944 19.944,176.944 "/>
<line fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="2,2" x1="19.5" y1="179.5" x2="19.5" y2="186.5"/>
</g>
</g>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="2,2" points="19.944,175.944 30,171.5 20,168.5
9,171.5 "/>
<polyline fill="none" stroke="#CCCCCC" stroke-miterlimit="10" stroke-dasharray="2,2" points="20.333,188.611 29.5,183.5
29.5,171.5 "/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 7.5 KiB

View file

@ -8,6 +8,7 @@
#include <sstream>
#include <stdlib.h>
#include <cmath>
#ifdef _WIN32
#include "Syssocket.h"
@ -63,6 +64,7 @@
#include "Util.h"
#include "renderer/ProgramObject.h"
#include "ui/TextRenderer.h"
#include "Swatch.h"
#include "fvupdater.h"
using namespace std;
@ -198,7 +200,8 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
_packetCount(0),
_packetsPerSecond(0),
_bytesPerSecond(0),
_bytesCount(0)
_bytesCount(0),
_swatch(NULL)
{
_applicationStartupTime = startup_time;
_window->setWindowTitle("Interface");
@ -274,7 +277,7 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
FvUpdater::sharedUpdater()->SetFeedURL("https://s3-us-west-1.amazonaws.com/highfidelity/appcast.xml");
FvUpdater::sharedUpdater()->CheckForUpdatesSilent();
#endif
initMenu();
QRect available = desktop()->availableGeometry();
@ -483,30 +486,41 @@ void Application::resizeGL(int width, int height) {
glLoadIdentity();
}
void Application::broadcastToNodes(unsigned char* data, size_t bytes, const char type) {
void Application::controlledBroadcastToNodes(unsigned char* broadcastData, size_t dataBytes,
const char* nodeTypes, int numNodeTypes) {
Application* self = getInstance();
for (int i = 0; i < numNodeTypes; ++i) {
int n = NodeList::getInstance()->broadcastToNodes(data, bytes, &type, 1);
// Intercept data to voxel server when voxels are disabled
if (nodeTypes[i] == NODE_TYPE_VOXEL_SERVER && ! self->_renderVoxels->isChecked()) {
continue;
}
BandwidthMeter::ChannelIndex channel;
switch (type) {
case NODE_TYPE_AGENT:
case NODE_TYPE_AVATAR_MIXER:
channel = BandwidthMeter::AVATARS;
break;
case NODE_TYPE_VOXEL_SERVER:
channel = BandwidthMeter::VOXELS;
break;
default:
return;
// Perform the broadcast for one type
int nReceivingNodes = NodeList::getInstance()->broadcastToNodes(broadcastData, dataBytes, & nodeTypes[i], 1);
// Feed number of bytes to corresponding channel of the bandwidth meter, if any (done otherwise)
BandwidthMeter::ChannelIndex channel;
switch (nodeTypes[i]) {
case NODE_TYPE_AGENT:
case NODE_TYPE_AVATAR_MIXER:
channel = BandwidthMeter::AVATARS;
break;
case NODE_TYPE_VOXEL_SERVER:
channel = BandwidthMeter::VOXELS;
break;
default:
continue;
}
self->_bandwidthMeter.outputStream(channel).updateValue(nReceivingNodes * dataBytes);
}
getInstance()->_bandwidthMeter.outputStream(channel).updateValue(n * bytes);
}
void Application::sendVoxelServerAddScene() {
char message[100];
sprintf(message,"%c%s",'Z',"add scene");
int messageSize = strlen(message) + 1;
broadcastToNodes((unsigned char*)message, messageSize, NODE_TYPE_VOXEL_SERVER);
controlledBroadcastToNodes((unsigned char*)message, messageSize, & NODE_TYPE_VOXEL_SERVER, 1);
}
void Application::keyPressEvent(QKeyEvent* event) {
@ -716,7 +730,16 @@ void Application::keyPressEvent(QKeyEvent* event) {
deleteVoxelUnderCursor();
}
break;
case Qt::Key_1:
case Qt::Key_2:
case Qt::Key_3:
case Qt::Key_4:
case Qt::Key_5:
case Qt::Key_6:
case Qt::Key_7:
case Qt::Key_8:
_swatch.handleEvent(event->key(), _eyedropperMode->isChecked());
break;
default:
event->ignore();
break;
@ -877,16 +900,16 @@ void Application::wheelEvent(QWheelEvent* event) {
}
}
void sendPingPackets() {
void Application::sendPingPackets() {
char nodeTypesOfInterest[] = {NODE_TYPE_VOXEL_SERVER, NODE_TYPE_AUDIO_MIXER, NODE_TYPE_AVATAR_MIXER};
long long currentTime = usecTimestampNow();
char pingPacket[1 + sizeof(currentTime)];
unsigned char pingPacket[1 + sizeof(currentTime)];
pingPacket[0] = PACKET_HEADER_PING;
memcpy(&pingPacket[1], &currentTime, sizeof(currentTime));
NodeList::getInstance()->broadcastToNodes((unsigned char*)pingPacket, 1 + sizeof(currentTime), nodeTypesOfInterest, 3);
getInstance()->controlledBroadcastToNodes(pingPacket, 1 + sizeof(currentTime),
nodeTypesOfInterest, sizeof(nodeTypesOfInterest));
}
// Every second, check the frame rates and other stuff
@ -944,14 +967,17 @@ void Application::idle() {
// Only run simulation code if more than IDLE_SIMULATE_MSECS have passed since last time we ran
if (diffclock(&_lastTimeIdle, &check) > IDLE_SIMULATE_MSECS) {
// We call processEvents() here because the idle timer takes priority over
// event handling in Qt, so when the framerate gets low events will pile up
// unless we handle them here.
// NOTE - this is commented out for now - causing event processing issues reported by Philip and Ryan
// birarda - July 3rd
// processEvents();
// If we're using multi-touch look, immediately process any
// touch events, and no other events.
// This is necessary because id the idle() call takes longer than the
// interval between idle() calls, the event loop never gets to run,
// and touch events get delayed.
if (_touchLook->isChecked()) {
sendPostedEvents(NULL, QEvent::TouchBegin);
sendPostedEvents(NULL, QEvent::TouchUpdate);
sendPostedEvents(NULL, QEvent::TouchEnd);
}
update(1.0f / _fps);
@ -987,7 +1013,7 @@ void Application::sendAvatarVoxelURLMessage(const QUrl& url) {
message.append((const char*)&ownerID, sizeof(ownerID));
message.append(url.toEncoded());
broadcastToNodes((unsigned char*)message.data(), message.size(), NODE_TYPE_AVATAR_MIXER);
controlledBroadcastToNodes((unsigned char*)message.data(), message.size(), & NODE_TYPE_AVATAR_MIXER, 1);
}
void Application::processAvatarVoxelURLMessage(unsigned char *packetData, size_t dataBytes) {
@ -1224,7 +1250,7 @@ void Application::sendVoxelEditMessage(PACKET_HEADER header, VoxelDetail& detail
int sizeOut;
if (createVoxelEditMessage(header, 0, 1, &detail, bufferOut, sizeOut)){
Application::broadcastToNodes(bufferOut, sizeOut, NODE_TYPE_VOXEL_SERVER);
Application::controlledBroadcastToNodes(bufferOut, sizeOut, & NODE_TYPE_VOXEL_SERVER, 1);
delete[] bufferOut;
}
}
@ -1242,7 +1268,11 @@ void Application::decreaseVoxelSize() {
void Application::increaseVoxelSize() {
_mouseVoxelScale *= 2;
}
void Application::resetSwatchColors() {
_swatch.reset();
}
static QIcon createSwatchIcon(const QColor& color) {
QPixmap map(16, 16);
map.fill(color);
@ -1305,7 +1335,7 @@ bool Application::sendVoxelsOperation(VoxelNode* node, int level, void* extraDat
// if we have room don't have room in the buffer, then send the previously generated message first
if (args->bufferInUse + codeAndColorLength > MAXIMUM_EDIT_VOXEL_MESSAGE_SIZE) {
broadcastToNodes(args->messageBuffer, args->bufferInUse, NODE_TYPE_VOXEL_SERVER);
controlledBroadcastToNodes(args->messageBuffer, args->bufferInUse, & NODE_TYPE_VOXEL_SERVER, 1);
args->bufferInUse = sizeof(PACKET_HEADER_SET_VOXEL_DESTRUCTIVE) + sizeof(unsigned short int); // reset
}
@ -1337,8 +1367,10 @@ void Application::exportVoxels() {
void Application::importVoxels() {
QString desktopLocation = QDesktopServices::storageLocation(QDesktopServices::DesktopLocation);
QString fileNameString = QFileDialog::getOpenFileName(_glWidget, tr("Import Voxels"), desktopLocation,
tr("Sparse Voxel Octree Files, Square PNG (*.svo *.png)"));
QString fileNameString = QFileDialog::getOpenFileName(
_glWidget, tr("Import Voxels"), desktopLocation,
tr("Sparse Voxel Octree Files, Square PNG, Schematic Files (*.svo *.png *.schematic)"));
QByteArray fileNameAscii = fileNameString.toAscii();
const char* fileName = fileNameAscii.data();
@ -1359,8 +1391,10 @@ void Application::importVoxels() {
}
importVoxels.readFromSquareARGB32Pixels(pixels, pngImage.height());
} else {
} else if (fileNameString.endsWith(".svo", Qt::CaseInsensitive)) {
importVoxels.readFromSVOFile(fileName);
} else {
importVoxels.readFromSchematicFile(fileName);
}
VoxelNode* selectedNode = _voxels.getVoxelAt(_mouseVoxel.x, _mouseVoxel.y, _mouseVoxel.z, _mouseVoxel.s);
@ -1386,7 +1420,7 @@ void Application::importVoxels() {
// If we have voxels left in the packet, then send the packet
if (args.bufferInUse > (sizeof(PACKET_HEADER_SET_VOXEL_DESTRUCTIVE) + sizeof(unsigned short int))) {
broadcastToNodes(args.messageBuffer, args.bufferInUse, NODE_TYPE_VOXEL_SERVER);
controlledBroadcastToNodes(args.messageBuffer, args.bufferInUse, & NODE_TYPE_VOXEL_SERVER, 1);
}
if (calculatedOctCode) {
@ -1438,7 +1472,7 @@ void Application::pasteVoxels() {
// If we have voxels left in the packet, then send the packet
if (args.bufferInUse > (sizeof(PACKET_HEADER_SET_VOXEL_DESTRUCTIVE) + sizeof(unsigned short int))) {
broadcastToNodes(args.messageBuffer, args.bufferInUse, NODE_TYPE_VOXEL_SERVER);
controlledBroadcastToNodes(args.messageBuffer, args.bufferInUse, & NODE_TYPE_VOXEL_SERVER, 1);
}
if (calculatedOctCode) {
@ -1480,12 +1514,13 @@ void Application::initMenu() {
(_testPing = optionsMenu->addAction("Test Ping"))->setCheckable(true);
_testPing->setChecked(true);
(_fullScreenMode = optionsMenu->addAction("Fullscreen", this, SLOT(setFullscreen(bool)), Qt::Key_F))->setCheckable(true);
optionsMenu->addAction("Webcam", &_webcam, SLOT(setEnabled(bool)))->setCheckable(true);
optionsMenu->addAction("Webcam", &_webcam, SLOT(setEnabled(bool)))->setCheckable(true);
optionsMenu->addAction("Go Home", this, SLOT(goHome()));
QMenu* renderMenu = menuBar->addMenu("Render");
(_renderVoxels = renderMenu->addAction("Voxels"))->setCheckable(true);
_renderVoxels->setChecked(true);
_renderVoxels->setShortcut(Qt::Key_V);
_renderVoxels->setShortcut(Qt::SHIFT | Qt::Key_V);
(_renderVoxelTextures = renderMenu->addAction("Voxel Textures"))->setCheckable(true);
(_renderStarsOn = renderMenu->addAction("Stars"))->setCheckable(true);
_renderStarsOn->setChecked(true);
@ -1515,6 +1550,8 @@ void Application::initMenu() {
(_logOn = toolsMenu->addAction("Log"))->setCheckable(true);
_logOn->setChecked(false);
_logOn->setShortcut(Qt::CTRL | Qt::Key_L);
(_oscilloscopeOn = toolsMenu->addAction("Audio Oscilloscope"))->setCheckable(true);
_oscilloscopeOn->setChecked(true);
(_bandwidthDisplayOn = toolsMenu->addAction("Bandwidth Display"))->setCheckable(true);
_bandwidthDisplayOn->setChecked(true);
toolsMenu->addAction("Bandwidth Details", this, SLOT(bandwidthDetails()));
@ -1525,36 +1562,39 @@ void Application::initMenu() {
_voxelModeActions->setExclusive(false); // exclusivity implies one is always checked
(_addVoxelMode = voxelMenu->addAction(
"Add Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::CTRL | Qt::Key_A))->setCheckable(true);
"Add Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::Key_V))->setCheckable(true);
_voxelModeActions->addAction(_addVoxelMode);
(_deleteVoxelMode = voxelMenu->addAction(
"Delete Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::CTRL | Qt::Key_D))->setCheckable(true);
"Delete Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::Key_R))->setCheckable(true);
_voxelModeActions->addAction(_deleteVoxelMode);
(_colorVoxelMode = voxelMenu->addAction(
"Color Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::CTRL | Qt::Key_B))->setCheckable(true);
"Color Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::Key_B))->setCheckable(true);
_voxelModeActions->addAction(_colorVoxelMode);
(_selectVoxelMode = voxelMenu->addAction(
"Select Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::CTRL | Qt::Key_S))->setCheckable(true);
"Select Voxel Mode", this, SLOT(updateVoxelModeActions()), Qt::Key_O))->setCheckable(true);
_voxelModeActions->addAction(_selectVoxelMode);
(_eyedropperMode = voxelMenu->addAction(
"Get Color Mode", this, SLOT(updateVoxelModeActions()), Qt::CTRL | Qt::Key_G))->setCheckable(true);
"Get Color Mode", this, SLOT(updateVoxelModeActions()), Qt::Key_G))->setCheckable(true);
_voxelModeActions->addAction(_eyedropperMode);
voxelMenu->addAction("Decrease Voxel Size", this, SLOT(decreaseVoxelSize()), QKeySequence::ZoomOut);
voxelMenu->addAction("Increase Voxel Size", this, SLOT(increaseVoxelSize()), QKeySequence::ZoomIn);
voxelMenu->addAction("Reset Swatch Colors", this, SLOT(resetSwatchColors()));
_voxelPaintColor = voxelMenu->addAction("Voxel Paint Color", this,
SLOT(chooseVoxelPaintColor()), Qt::META | Qt::Key_C);
_swatch.setAction(_voxelPaintColor);
QColor paintColor(128, 128, 128);
_voxelPaintColor->setData(paintColor);
_voxelPaintColor->setIcon(createSwatchIcon(paintColor));
(_destructiveAddVoxel = voxelMenu->addAction("Create Voxel is Destructive"))->setCheckable(true);
voxelMenu->addAction("Export Voxels", this, SLOT(exportVoxels()), Qt::CTRL | Qt::Key_E);
voxelMenu->addAction("Import Voxels", this, SLOT(importVoxels()), Qt::CTRL | Qt::Key_I);
voxelMenu->addAction("Cut Voxels", this, SLOT(cutVoxels()), Qt::CTRL | Qt::Key_X);
voxelMenu->addAction("Copy Voxels", this, SLOT(copyVoxels()), Qt::CTRL | Qt::Key_C);
voxelMenu->addAction("Paste Voxels", this, SLOT(pasteVoxels()), Qt::CTRL | Qt::Key_V);
voxelMenu->addAction("Export Voxels", this, SLOT(exportVoxels()), Qt::CTRL | Qt::Key_E);
voxelMenu->addAction("Import Voxels", this, SLOT(importVoxels()), Qt::CTRL | Qt::Key_I);
voxelMenu->addAction("Cut Voxels", this, SLOT(cutVoxels()), Qt::CTRL | Qt::Key_X);
voxelMenu->addAction("Copy Voxels", this, SLOT(copyVoxels()), Qt::CTRL | Qt::Key_C);
voxelMenu->addAction("Paste Voxels", this, SLOT(pasteVoxels()), Qt::CTRL | Qt::Key_V);
QMenu* debugMenu = menuBar->addMenu("Debug");
@ -1680,6 +1720,14 @@ void Application::init() {
printLog("Loaded settings.\n");
sendAvatarVoxelURLMessage(_myAvatar.getVoxels()->getVoxelURL());
_palette.init(_glWidget->width(), _glWidget->height());
_palette.addAction(_addVoxelMode, 0, 0);
_palette.addAction(_deleteVoxelMode, 0, 1);
_palette.addTool(&_swatch);
_palette.addAction(_colorVoxelMode, 0, 2);
_palette.addAction(_eyedropperMode, 0, 3);
_palette.addAction(_selectVoxelMode, 0, 4);
}
const float MAX_AVATAR_EDIT_VELOCITY = 1.0f;
@ -1912,7 +1960,7 @@ void Application::update(float deltaTime) {
if (_bandwidthDialog) {
_bandwidthDialog->update();
}
// Update audio stats for procedural sounds
#ifndef _WIN32
_audio.setLastAcceleration(_myAvatar.getThrust());
@ -1923,11 +1971,11 @@ void Application::update(float deltaTime) {
void Application::updateAvatar(float deltaTime) {
// Update my avatar's head position from gyros and/or webcam
_myAvatar.updateHeadFromGyrosAndOrWebcam(_gyroLook->isChecked(),
glm::vec3(_headCameraPitchYawScale,
_headCameraPitchYawScale,
_headCameraPitchYawScale));
// Update my avatar's state from gyros and/or webcam
_myAvatar.updateFromGyrosAndOrWebcam(_gyroLook->isChecked(),
glm::vec3(_headCameraPitchYawScale,
_headCameraPitchYawScale,
_headCameraPitchYawScale));
if (_serialHeadSensor.isActive()) {
@ -1989,9 +2037,10 @@ void Application::updateAvatar(float deltaTime) {
endOfBroadcastStringWrite += packNodeId(endOfBroadcastStringWrite, nodeList->getOwnerID());
endOfBroadcastStringWrite += _myAvatar.getBroadcastData(endOfBroadcastStringWrite);
broadcastToNodes(broadcastString, endOfBroadcastStringWrite - broadcastString, NODE_TYPE_VOXEL_SERVER);
broadcastToNodes(broadcastString, endOfBroadcastStringWrite - broadcastString, NODE_TYPE_AVATAR_MIXER);
const char nodeTypesOfInterest[] = { NODE_TYPE_VOXEL_SERVER, NODE_TYPE_AVATAR_MIXER };
controlledBroadcastToNodes(broadcastString, endOfBroadcastStringWrite - broadcastString,
nodeTypesOfInterest, sizeof(nodeTypesOfInterest));
// once in a while, send my voxel url
const float AVATAR_VOXEL_URL_SEND_INTERVAL = 1.0f; // seconds
@ -2350,8 +2399,9 @@ void Application::displayOverlay() {
#ifndef _WIN32
_audio.render(_glWidget->width(), _glWidget->height());
_audioScope.render(20, _glWidget->height() - 200);
//_audio.renderEchoCompare(); // PER: Will turn back on to further test echo
if (_oscilloscopeOn->isChecked()) {
_audioScope.render(20, _glWidget->height() - 200);
}
#endif
//noiseTest(_glWidget->width(), _glWidget->height());
@ -2431,7 +2481,52 @@ void Application::displayOverlay() {
// render the webcam input frame
_webcam.renderPreview(_glWidget->width(), _glWidget->height());
_palette.render(_glWidget->width(), _glWidget->height());
if (_eyedropperMode->isChecked() && _voxelPaintColor->data().value<QColor>() != _swatch.getColor()) {
QColor color = _voxelPaintColor->data().value<QColor>();
TextRenderer textRenderer(SANS_FONT_FAMILY, 11, 50);
const char line1[] = "Assign this color to a swatch";
const char line2[] = "by choosing a key from 1 to 8.";
int left = (_glWidget->width() - POPUP_WIDTH - 2 * POPUP_MARGIN) / 2;
int top = _glWidget->height() / 40;
glBegin(GL_POLYGON);
glColor3f(0.0f, 0.0f, 0.0f);
for (double a = M_PI; a < 1.5f * M_PI; a += POPUP_STEP) {
glVertex2f(left + POPUP_MARGIN * cos(a) , top + POPUP_MARGIN * sin(a));
}
for (double a = 1.5f * M_PI; a < 2.0f * M_PI; a += POPUP_STEP) {
glVertex2f(left + POPUP_WIDTH + POPUP_MARGIN * cos(a), top + POPUP_MARGIN * sin(a));
}
for (double a = 0.0f; a < 0.5f * M_PI; a += POPUP_STEP) {
glVertex2f(left + POPUP_WIDTH + POPUP_MARGIN * cos(a), top + POPUP_HEIGHT + POPUP_MARGIN * sin(a));
}
for (double a = 0.5f * M_PI; a < 1.0f * M_PI; a += POPUP_STEP) {
glVertex2f(left + POPUP_MARGIN * cos(a) , top + POPUP_HEIGHT + POPUP_MARGIN * sin(a));
}
glEnd();
glBegin(GL_QUADS);
glColor3f(color.redF(),
color.greenF(),
color.blueF());
glVertex2f(left , top);
glVertex2f(left + SWATCH_WIDTH, top);
glVertex2f(left + SWATCH_WIDTH, top + SWATCH_HEIGHT);
glVertex2f(left , top + SWATCH_HEIGHT);
glEnd();
glColor3f(1.0f, 1.0f, 1.0f);
textRenderer.draw(left + SWATCH_WIDTH + POPUP_MARGIN, top + FIRST_LINE_OFFSET , line1);
textRenderer.draw(left + SWATCH_WIDTH + POPUP_MARGIN, top + SECOND_LINE_OFFSET, line2);
}
else {
_swatch.checkColor();
}
glPopMatrix();
}
@ -2953,6 +3048,7 @@ void Application::eyedropperVoxelUnderCursor() {
}
void Application::goHome() {
printLog("Going Home!\n");
_myAvatar.setPosition(START_LOCATION);
}
@ -3127,7 +3223,8 @@ void Application::loadSettings(QSettings* settings) {
settings->endGroup();
scanMenuBar(&Application::loadAction, settings);
getAvatar()->loadData(settings);
getAvatar()->loadData(settings);
_swatch.loadData(settings);
}
@ -3149,6 +3246,7 @@ void Application::saveSettings(QSettings* settings) {
scanMenuBar(&Application::saveAction, settings);
getAvatar()->saveData(settings);
_swatch.saveData(settings);
}
void Application::importSettings() {

View file

@ -39,6 +39,8 @@
#include "Webcam.h"
#include "renderer/GeometryCache.h"
#include "ui/ChatEntry.h"
#include "ToolsPalette.h"
#include "Swatch.h"
class QAction;
class QActionGroup;
@ -140,6 +142,7 @@ private slots:
void updateVoxelModeActions();
void decreaseVoxelSize();
void increaseVoxelSize();
void resetSwatchColors();
void chooseVoxelPaintColor();
void loadSettings(QSettings* set = NULL);
void saveSettings(QSettings* set = NULL);
@ -159,15 +162,19 @@ private slots:
void renderCoverageMapsV2Recursively(CoverageMapV2* map);
glm::vec2 getScaledScreenPoint(glm::vec2 projectedPoint);
void goHome();
private:
static void broadcastToNodes(unsigned char* data, size_t bytes, const char type);
static void controlledBroadcastToNodes(unsigned char* broadcastData, size_t dataBytes,
const char* nodeTypes, int numNodeTypes);
static void sendVoxelServerAddScene();
static bool sendVoxelsOperation(VoxelNode* node, int level, void* extraData);
static void sendVoxelEditMessage(PACKET_HEADER header, VoxelDetail& detail);
static void sendAvatarVoxelURLMessage(const QUrl& url);
static void processAvatarVoxelURLMessage(unsigned char *packetData, size_t dataBytes);
static void sendVoxelEditMessage(PACKET_HEADER header, VoxelDetail& detail);
static void sendPingPackets();
void initMenu();
void updateFrustumRenderModeAction();
@ -191,7 +198,6 @@ private:
void maybeEditVoxelUnderCursor();
void deleteVoxelUnderCursor();
void eyedropperVoxelUnderCursor();
void goHome();
void resetSensors();
void setMenuShortcutsEnabled(bool enabled);
@ -236,6 +242,7 @@ private:
QAction* _manualFirstPerson; // Whether to force first-person mode
QAction* _manualThirdPerson; // Whether to force third-person mode
QAction* _logOn; // Whether to show on-screen log
QAction* _oscilloscopeOn; // Whether to show the oscilloscope
QAction* _bandwidthDisplayOn; // Whether to show on-screen bandwidth bars
QActionGroup* _voxelModeActions; // The group of voxel edit mode actions
QAction* _addVoxelMode; // Whether add voxel mode is enabled
@ -370,6 +377,8 @@ private:
int _bytesPerSecond;
int _bytesCount;
ToolsPalette _palette;
Swatch _swatch;
};
#endif /* defined(__interface__Application__) */

View file

@ -148,121 +148,121 @@ void Avatar::initializeBodyBalls() {
_bodyBall[ BODY_BALL_HEAD_BASE ].radius = 0.07;
_bodyBall[ BODY_BALL_LEFT_COLLAR ].radius = 0.04;
_bodyBall[ BODY_BALL_LEFT_SHOULDER ].radius = 0.03;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].radius = 0.02;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].radius = 0.02;
_bodyBall[ BODY_BALL_LEFT_WRIST ].radius = 0.02;
_bodyBall[ BODY_BALL_LEFT_FINGERTIPS ].radius = 0.01;
_bodyBall[ BODY_BALL_RIGHT_COLLAR ].radius = 0.04;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].radius = 0.03;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].radius = 0.02;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].radius = 0.03;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].radius = 0.02;
_bodyBall[ BODY_BALL_RIGHT_WRIST ].radius = 0.02;
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS ].radius = 0.01;
_bodyBall[ BODY_BALL_LEFT_HIP ].radius = 0.04;
_bodyBall[ BODY_BALL_LEFT_HIP ].radius = 0.04;
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH ].radius = 0.03;
_bodyBall[ BODY_BALL_LEFT_KNEE ].radius = 0.025;
_bodyBall[ BODY_BALL_LEFT_HEEL ].radius = 0.025;
_bodyBall[ BODY_BALL_LEFT_TOES ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_HIP ].radius = 0.04;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_TOES ].radius = 0.025;
_bodyBall[ BODY_BALL_LEFT_KNEE ].radius = 0.025;
_bodyBall[ BODY_BALL_LEFT_HEEL ].radius = 0.025;
_bodyBall[ BODY_BALL_LEFT_TOES ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_HIP ].radius = 0.04;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].radius = 0.025;
_bodyBall[ BODY_BALL_RIGHT_TOES ].radius = 0.025;
// specify the parent joint for each ball
_bodyBall[ BODY_BALL_PELVIS ].parentJoint = AVATAR_JOINT_PELVIS;
_bodyBall[ BODY_BALL_PELVIS ].parentJoint = AVATAR_JOINT_PELVIS;
_bodyBall[ BODY_BALL_TORSO ].parentJoint = AVATAR_JOINT_TORSO;
_bodyBall[ BODY_BALL_CHEST ].parentJoint = AVATAR_JOINT_CHEST;
_bodyBall[ BODY_BALL_NECK_BASE ].parentJoint = AVATAR_JOINT_NECK_BASE;
_bodyBall[ BODY_BALL_CHEST ].parentJoint = AVATAR_JOINT_CHEST;
_bodyBall[ BODY_BALL_NECK_BASE ].parentJoint = AVATAR_JOINT_NECK_BASE;
_bodyBall[ BODY_BALL_HEAD_BASE ].parentJoint = AVATAR_JOINT_HEAD_BASE;
_bodyBall[ BODY_BALL_HEAD_TOP ].parentJoint = AVATAR_JOINT_HEAD_TOP;
_bodyBall[ BODY_BALL_LEFT_COLLAR ].parentJoint = AVATAR_JOINT_LEFT_COLLAR;
_bodyBall[ BODY_BALL_LEFT_SHOULDER ].parentJoint = AVATAR_JOINT_LEFT_SHOULDER;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentJoint = AVATAR_JOINT_LEFT_ELBOW;
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentJoint = AVATAR_JOINT_LEFT_WRIST;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentJoint = AVATAR_JOINT_LEFT_ELBOW;
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentJoint = AVATAR_JOINT_LEFT_WRIST;
_bodyBall[ BODY_BALL_LEFT_FINGERTIPS ].parentJoint = AVATAR_JOINT_LEFT_FINGERTIPS;
_bodyBall[ BODY_BALL_RIGHT_COLLAR ].parentJoint = AVATAR_JOINT_RIGHT_COLLAR;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentJoint = AVATAR_JOINT_RIGHT_SHOULDER;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentJoint = AVATAR_JOINT_RIGHT_ELBOW;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentJoint = AVATAR_JOINT_RIGHT_SHOULDER;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentJoint = AVATAR_JOINT_RIGHT_ELBOW;
_bodyBall[ BODY_BALL_RIGHT_WRIST ].parentJoint = AVATAR_JOINT_RIGHT_WRIST;
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS ].parentJoint = AVATAR_JOINT_RIGHT_FINGERTIPS;
_bodyBall[ BODY_BALL_LEFT_HIP ].parentJoint = AVATAR_JOINT_LEFT_HIP;
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentJoint = AVATAR_JOINT_LEFT_KNEE;
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentJoint = AVATAR_JOINT_LEFT_HEEL;
_bodyBall[ BODY_BALL_LEFT_TOES ].parentJoint = AVATAR_JOINT_LEFT_TOES;
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentJoint = AVATAR_JOINT_RIGHT_HIP;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentJoint = AVATAR_JOINT_RIGHT_KNEE;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentJoint = AVATAR_JOINT_RIGHT_HEEL;
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentJoint = AVATAR_JOINT_RIGHT_TOES;
_bodyBall[ BODY_BALL_LEFT_HIP ].parentJoint = AVATAR_JOINT_LEFT_HIP;
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentJoint = AVATAR_JOINT_LEFT_KNEE;
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentJoint = AVATAR_JOINT_LEFT_HEEL;
_bodyBall[ BODY_BALL_LEFT_TOES ].parentJoint = AVATAR_JOINT_LEFT_TOES;
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentJoint = AVATAR_JOINT_RIGHT_HIP;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentJoint = AVATAR_JOINT_RIGHT_KNEE;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentJoint = AVATAR_JOINT_RIGHT_HEEL;
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentJoint = AVATAR_JOINT_RIGHT_TOES;
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH].parentJoint = AVATAR_JOINT_LEFT_HIP;
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH ].parentJoint = AVATAR_JOINT_LEFT_HIP;
// specify the parent offset for each ball
_bodyBall[ BODY_BALL_PELVIS ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_PELVIS ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_TORSO ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_CHEST ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_NECK_BASE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_CHEST ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_NECK_BASE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_HEAD_BASE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_HEAD_TOP ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_COLLAR ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_SHOULDER ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_FINGERTIPS ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_COLLAR ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_WRIST ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_HIP ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_TOES ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_HIP ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_LEFT_TOES ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentOffset = glm::vec3(0.0, 0.0, 0.0);
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH].parentOffset = glm::vec3(-0.1, -0.1, 0.0);
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH ].parentOffset = glm::vec3(-0.1, -0.1, 0.0);
// specify the parent BALL for each ball
_bodyBall[ BODY_BALL_PELVIS ].parentBall = BODY_BALL_NULL;
_bodyBall[ BODY_BALL_PELVIS ].parentBall = BODY_BALL_NULL;
_bodyBall[ BODY_BALL_TORSO ].parentBall = BODY_BALL_PELVIS;
_bodyBall[ BODY_BALL_CHEST ].parentBall = BODY_BALL_TORSO;
_bodyBall[ BODY_BALL_NECK_BASE ].parentBall = BODY_BALL_CHEST;
_bodyBall[ BODY_BALL_CHEST ].parentBall = BODY_BALL_TORSO;
_bodyBall[ BODY_BALL_NECK_BASE ].parentBall = BODY_BALL_CHEST;
_bodyBall[ BODY_BALL_HEAD_BASE ].parentBall = BODY_BALL_NECK_BASE;
_bodyBall[ BODY_BALL_HEAD_TOP ].parentBall = BODY_BALL_HEAD_BASE;
_bodyBall[ BODY_BALL_LEFT_COLLAR ].parentBall = BODY_BALL_CHEST;
_bodyBall[ BODY_BALL_LEFT_SHOULDER ].parentBall = BODY_BALL_LEFT_COLLAR;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentBall = BODY_BALL_LEFT_SHOULDER;
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentBall = BODY_BALL_LEFT_ELBOW;
_bodyBall[ BODY_BALL_LEFT_ELBOW ].parentBall = BODY_BALL_LEFT_SHOULDER;
_bodyBall[ BODY_BALL_LEFT_WRIST ].parentBall = BODY_BALL_LEFT_ELBOW;
_bodyBall[ BODY_BALL_LEFT_FINGERTIPS ].parentBall = BODY_BALL_LEFT_WRIST;
_bodyBall[ BODY_BALL_RIGHT_COLLAR ].parentBall = BODY_BALL_CHEST;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentBall = BODY_BALL_RIGHT_COLLAR;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentBall = BODY_BALL_RIGHT_SHOULDER;
_bodyBall[ BODY_BALL_RIGHT_SHOULDER ].parentBall = BODY_BALL_RIGHT_COLLAR;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].parentBall = BODY_BALL_RIGHT_SHOULDER;
_bodyBall[ BODY_BALL_RIGHT_WRIST ].parentBall = BODY_BALL_RIGHT_ELBOW;
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS ].parentBall = BODY_BALL_RIGHT_WRIST;
_bodyBall[ BODY_BALL_LEFT_HIP ].parentBall = BODY_BALL_PELVIS;
_bodyBall[ BODY_BALL_LEFT_HIP ].parentBall = BODY_BALL_PELVIS;
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH ].parentBall = BODY_BALL_LEFT_HIP;
//_bodyBall[ BODY_BALL_LEFT_MID_THIGH ].parentBall = BODY_BALL_LEFT_HIP;
// _bodyBall[ BODY_BALL_LEFT_KNEE ].parentBall = BODY_BALL_LEFT_MID_THIGH;
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentBall = BODY_BALL_LEFT_HIP;
//_bodyBall[ BODY_BALL_LEFT_KNEE ].parentBall = BODY_BALL_LEFT_MID_THIGH;
_bodyBall[ BODY_BALL_LEFT_KNEE ].parentBall = BODY_BALL_LEFT_HIP;
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentBall = BODY_BALL_LEFT_KNEE;
_bodyBall[ BODY_BALL_LEFT_TOES ].parentBall = BODY_BALL_LEFT_HEEL;
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentBall = BODY_BALL_PELVIS;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentBall = BODY_BALL_RIGHT_HIP;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentBall = BODY_BALL_RIGHT_KNEE;
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentBall = BODY_BALL_RIGHT_HEEL;
_bodyBall[ BODY_BALL_LEFT_HEEL ].parentBall = BODY_BALL_LEFT_KNEE;
_bodyBall[ BODY_BALL_LEFT_TOES ].parentBall = BODY_BALL_LEFT_HEEL;
_bodyBall[ BODY_BALL_RIGHT_HIP ].parentBall = BODY_BALL_PELVIS;
_bodyBall[ BODY_BALL_RIGHT_KNEE ].parentBall = BODY_BALL_RIGHT_HIP;
_bodyBall[ BODY_BALL_RIGHT_HEEL ].parentBall = BODY_BALL_RIGHT_KNEE;
_bodyBall[ BODY_BALL_RIGHT_TOES ].parentBall = BODY_BALL_RIGHT_HEEL;
/*
// to aid in hand-shaking and hand-holding, the right hand is not collidable
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].isCollidable = false;
_bodyBall[ BODY_BALL_RIGHT_WRIST ].isCollidable = false;
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS].isCollidable = false;
_bodyBall[ BODY_BALL_RIGHT_ELBOW ].isCollidable = false;
_bodyBall[ BODY_BALL_RIGHT_WRIST ].isCollidable = false;
_bodyBall[ BODY_BALL_RIGHT_FINGERTIPS ].isCollidable = false;
*/
}
@ -285,24 +285,37 @@ void Avatar::reset() {
}
// Update avatar head rotation with sensor data
void Avatar::updateHeadFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngle) {
void Avatar::updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngle) {
SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor();
Webcam* webcam = Application::getInstance()->getWebcam();
glm::vec3 estimatedPosition, estimatedRotation;
if (gyros->isActive()) {
if (webcam->isActive()) {
estimatedPosition = webcam->getEstimatedPosition();
}
estimatedRotation = gyros->getEstimatedRotation();
} else if (webcam->isActive()) {
estimatedPosition = webcam->getEstimatedPosition();
estimatedRotation = webcam->getEstimatedRotation();
} else {
return;
}
if (webcam->isActive()) {
estimatedPosition = webcam->getEstimatedPosition();
// compute and store the joint rotations
const JointVector& joints = webcam->getEstimatedJoints();
_joints.clear();
for (int i = 0; i < NUM_AVATAR_JOINTS; i++) {
if (joints.size() > i && joints[i].isValid) {
JointData data = { i, joints[i].rotation };
_joints.push_back(data);
if (i == AVATAR_JOINT_CHEST) {
// if we have a chest rotation, don't apply lean based on head
estimatedPosition = glm::vec3();
}
}
}
}
_head.setPitch(estimatedRotation.x * amplifyAngle.x);
_head.setYaw(estimatedRotation.y * amplifyAngle.y);
_head.setRoll(estimatedRotation.z * amplifyAngle.z);
@ -485,9 +498,18 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
_skeleton.joint[AVATAR_JOINT_TORSO].rotation = glm::quat(glm::radians(glm::vec3(
_head.getLeanForward(), 0.0f, _head.getLeanSideways())));
// update avatar skeleton
_skeleton.update(deltaTime, getOrientation(), _position);
// apply joint data (if any) to skeleton
bool enableHandMovement = true;
for (vector<JointData>::iterator it = _joints.begin(); it != _joints.end(); it++) {
_skeleton.joint[it->jointID].rotation = it->rotation;
// disable hand movement if we have joint info for the right wrist
enableHandMovement &= (it->jointID != AVATAR_JOINT_RIGHT_WRIST);
}
// update avatar skeleton
_skeleton.update(deltaTime, getOrientation(), _position);
//determine the lengths of the body springs now that we have updated the skeleton at least once
if (!_ballSpringsInitialized) {
for (int b = 0; b < NUM_AVATAR_BODY_BALLS; b++) {
@ -518,7 +540,7 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
}
//update the movement of the hand and process handshaking with other avatars...
updateHandMovementAndTouching(deltaTime);
updateHandMovementAndTouching(deltaTime, enableHandMovement);
_avatarTouch.simulate(deltaTime);
// apply gravity and collision with the ground/floor
@ -698,7 +720,7 @@ void Avatar::setOrientation(const glm::quat& orientation) {
_bodyRoll = eulerAngles.z;
}
void Avatar::updateHandMovementAndTouching(float deltaTime) {
void Avatar::updateHandMovementAndTouching(float deltaTime, bool enableHandMovement) {
glm::quat orientation = getOrientation();
@ -707,12 +729,14 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
glm::vec3 up = orientation * IDENTITY_UP;
glm::vec3 front = orientation * IDENTITY_FRONT;
glm::vec3 transformedHandMovement
= right * _movedHandOffset.x * 2.0f
+ up * -_movedHandOffset.y * 2.0f
+ front * -_movedHandOffset.y * 2.0f;
if (enableHandMovement) {
glm::vec3 transformedHandMovement =
right * _movedHandOffset.x * 2.0f +
up * -_movedHandOffset.y * 2.0f +
front * -_movedHandOffset.y * 2.0f;
_skeleton.joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position += transformedHandMovement;
_skeleton.joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position += transformedHandMovement;
}
if (isMyAvatar()) {
_avatarTouch.setMyBodyPosition(_position);
@ -803,7 +827,9 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
//constrain right arm length and re-adjust elbow position as it bends
// NOTE - the following must be called on all avatars - not just _isMine
updateArmIKAndConstraints(deltaTime);
if (enableHandMovement) {
updateArmIKAndConstraints(deltaTime);
}
//Set right hand position and state to be transmitted, and also tell AvatarTouch about it
if (isMyAvatar()) {

View file

@ -87,7 +87,7 @@ public:
void reset();
void simulate(float deltaTime, Transmitter* transmitter);
void updateThrust(float deltaTime, Transmitter * transmitter);
void updateHeadFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngles);
void updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngles);
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
void updateFromTouch(float touchAvgDistX, float touchAvgDistY);
void addBodyYaw(float y) {_bodyYaw += y;};
@ -151,10 +151,6 @@ public:
// Get the position/rotation of a single body ball
void getBodyBallTransform(AvatarJointID jointID, glm::vec3& position, glm::quat& rotation) const;
//read/write avatar data
void writeAvatarDataToFile();
void readAvatarDataFromFile();
static void renderJointConnectingCone(glm::vec3 position1, glm::vec3 position2, float radius1, float radius2);
@ -229,7 +225,7 @@ private:
void updateBodyBalls( float deltaTime );
void calculateBoneLengths();
void readSensors();
void updateHandMovementAndTouching(float deltaTime);
void updateHandMovementAndTouching(float deltaTime, bool enableHandMovement);
void updateAvatarCollisions(float deltaTime);
void updateArmIKAndConstraints( float deltaTime );
void updateCollisionWithSphere( glm::vec3 position, float radius, float deltaTime );

View file

@ -68,6 +68,12 @@ void BandwidthMeter::Stream::updateValue(double amount) {
timeval now;
gettimeofday(& now, NULL);
double dt = diffclock(& _prevTime, & now);
// Ignore this value when timer imprecision yields dt = 0
if (dt == 0.0) {
return;
}
memcpy(& _prevTime, & now, sizeof(timeval));
// Compute approximate average
@ -225,6 +231,13 @@ void BandwidthMeter::render(int screenWidth, int screenHeight) {
textYlowerLine, fmtBuf);
glPopMatrix();
// After rendering, indicate that no data has been sent/received since the last feed.
// This way, the meters fall when not continuously fed.
for (int i = 0; i < N_CHANNELS; ++i) {
inputStream(ChannelIndex(i)).updateValue(0);
outputStream(ChannelIndex(i)).updateValue(0);
}
}

View file

@ -189,12 +189,13 @@ void Head::simulate(float deltaTime, bool isMine) {
const float FULLY_CLOSED = 1.0f;
if (_leftEyeBlinkVelocity == 0.0f && _rightEyeBlinkVelocity == 0.0f) {
// no blinking when brows are raised; blink less with increasing loudness
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.75f;
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(
sqrtf(_averageLoudness) * ROOT_LOUDNESS_TO_BLINK_INTERVAL, deltaTime))) {
const float BASE_BLINK_RATE = 15.0f / 60.0f;
const float ROOT_LOUDNESS_TO_BLINK_INTERVAL = 0.25f;
if (forceBlink || (_browAudioLift < EPSILON && shouldDo(glm::max(1.0f, sqrt(_averageLoudness) *
ROOT_LOUDNESS_TO_BLINK_INTERVAL) / BASE_BLINK_RATE, deltaTime))) {
_leftEyeBlinkVelocity = BLINK_SPEED;
_rightEyeBlinkVelocity = BLINK_SPEED;
}
}
} else {
_leftEyeBlink = glm::clamp(_leftEyeBlink + _leftEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
_rightEyeBlink = glm::clamp(_rightEyeBlink + _rightEyeBlinkVelocity * deltaTime, FULLY_OPEN, FULLY_CLOSED);
@ -222,11 +223,11 @@ void Head::simulate(float deltaTime, bool isMine) {
if (isMine && _cameraFollowsHead) {
// If we are using gyros and using gyroLook, have the camera follow head but with a null region
// to create stable rendering view with small head movements.
const float CAMERA_FOLLOW_HEAD_RATE_START = 0.05f;
const float CAMERA_FOLLOW_HEAD_RATE_MAX = 0.25f;
const float CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE = 1.5f;
const float CAMERA_STOP_TOLERANCE_DEGREES = 0.25f;
const float CAMERA_START_TOLERANCE_DEGREES = 15.0f;
const float CAMERA_FOLLOW_HEAD_RATE_START = 0.01f;
const float CAMERA_FOLLOW_HEAD_RATE_MAX = 0.5f;
const float CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE = 1.05f;
const float CAMERA_STOP_TOLERANCE_DEGREES = 0.1f;
const float CAMERA_START_TOLERANCE_DEGREES = 2.0f;
float cameraHeadAngleDifference = glm::length(glm::vec2(_pitch - _cameraPitch, _yaw - _cameraYaw));
if (_isCameraMoving) {
_cameraFollowHeadRate = glm::clamp(_cameraFollowHeadRate * CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE,

View file

@ -6,6 +6,7 @@
#include "Skeleton.h"
#include "Util.h"
#include "world.h"
const float BODY_SPRING_DEFAULT_TIGHTNESS = 1000.0f;
const float FLOATING_HEIGHT = 0.13f;
@ -18,12 +19,21 @@ void Skeleton::initialize() {
for (int b = 0; b < NUM_AVATAR_JOINTS; b++) {
joint[b].parent = AVATAR_JOINT_NULL;
joint[b].position = glm::vec3(0.0, 0.0, 0.0);
joint[b].defaultPosePosition = glm::vec3(0.0, 0.0, 0.0);
joint[b].rotation = glm::quat(1.0f, 0.0f, 0.0f, 0.0f);
joint[b].length = 0.0;
joint[b].bindRadius = 1.0f / 8;
}
// put the arms at the side
joint[AVATAR_JOINT_LEFT_ELBOW].rotation = glm::quat(glm::vec3(0.0f, 0.0f, PIf * 0.5f));
joint[AVATAR_JOINT_RIGHT_ELBOW].rotation = glm::quat(glm::vec3(0.0f, 0.0f, -PIf * 0.5f));
// bend the knees
joint[AVATAR_JOINT_LEFT_KNEE].rotation = joint[AVATAR_JOINT_RIGHT_KNEE].rotation =
glm::quat(glm::vec3(PIf / 8.0f, 0.0f, 0.0f));
joint[AVATAR_JOINT_LEFT_HEEL].rotation = joint[AVATAR_JOINT_RIGHT_HEEL].rotation =
glm::quat(glm::vec3(-PIf / 4.0f, 0.0f, 0.0f));
// specify the parental hierarchy
joint[ AVATAR_JOINT_PELVIS ].parent = AVATAR_JOINT_NULL;
joint[ AVATAR_JOINT_TORSO ].parent = AVATAR_JOINT_PELVIS;
@ -80,39 +90,9 @@ void Skeleton::initialize() {
joint[ AVATAR_JOINT_RIGHT_HEEL ].bindPosePosition = glm::vec3( 0.00, -0.23, 0.00 );
joint[ AVATAR_JOINT_RIGHT_TOES ].bindPosePosition = glm::vec3( 0.00, 0.00, -0.06 );
// specify the default pose position
joint[ AVATAR_JOINT_PELVIS ].defaultPosePosition = glm::vec3( 0.0, 0.0, 0.0 );
joint[ AVATAR_JOINT_TORSO ].defaultPosePosition = glm::vec3( 0.0, 0.09, -0.01 );
joint[ AVATAR_JOINT_CHEST ].defaultPosePosition = glm::vec3( 0.0, 0.09, -0.01 );
joint[ AVATAR_JOINT_NECK_BASE ].defaultPosePosition = glm::vec3( 0.0, 0.14, 0.01 );
joint[ AVATAR_JOINT_HEAD_BASE ].defaultPosePosition = glm::vec3( 0.0, 0.04, 0.00 );
joint[ AVATAR_JOINT_HEAD_TOP ].defaultPosePosition = glm::vec3( 0.0, 0.04, 0.00 );
joint[ AVATAR_JOINT_LEFT_COLLAR ].defaultPosePosition = glm::vec3( -0.06, 0.04, 0.01 );
joint[ AVATAR_JOINT_LEFT_SHOULDER ].defaultPosePosition = glm::vec3( -0.05, 0.0, 0.01 );
joint[ AVATAR_JOINT_LEFT_ELBOW ].defaultPosePosition = glm::vec3( 0.0, -0.16, 0.0 );
joint[ AVATAR_JOINT_LEFT_WRIST ].defaultPosePosition = glm::vec3( 0.0, -0.117, 0.0 );
joint[ AVATAR_JOINT_LEFT_FINGERTIPS ].defaultPosePosition = glm::vec3( 0.0, -0.1, 0.0 );
joint[ AVATAR_JOINT_RIGHT_COLLAR ].defaultPosePosition = glm::vec3( 0.06, 0.04, 0.01 );
joint[ AVATAR_JOINT_RIGHT_SHOULDER ].defaultPosePosition = glm::vec3( 0.05, 0.0, 0.01 );
joint[ AVATAR_JOINT_RIGHT_ELBOW ].defaultPosePosition = glm::vec3( 0.0, -0.16, 0.0 );
joint[ AVATAR_JOINT_RIGHT_WRIST ].defaultPosePosition = glm::vec3( 0.0, -0.117, 0.0 );
joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].defaultPosePosition = glm::vec3( 0.0, -0.1, 0.0 );
joint[ AVATAR_JOINT_LEFT_HIP ].defaultPosePosition = glm::vec3( -0.05, 0.0, 0.02 );
joint[ AVATAR_JOINT_LEFT_KNEE ].defaultPosePosition = glm::vec3( 0.01, -0.25, -0.03 );
joint[ AVATAR_JOINT_LEFT_HEEL ].defaultPosePosition = glm::vec3( 0.01, -0.22, 0.08 );
joint[ AVATAR_JOINT_LEFT_TOES ].defaultPosePosition = glm::vec3( 0.00, -0.03, -0.05 );
joint[ AVATAR_JOINT_RIGHT_HIP ].defaultPosePosition = glm::vec3( 0.05, 0.0, 0.02 );
joint[ AVATAR_JOINT_RIGHT_KNEE ].defaultPosePosition = glm::vec3( -0.01, -0.25, -0.03 );
joint[ AVATAR_JOINT_RIGHT_HEEL ].defaultPosePosition = glm::vec3( -0.01, -0.22, 0.08 );
joint[ AVATAR_JOINT_RIGHT_TOES ].defaultPosePosition = glm::vec3( 0.00, -0.03, -0.05 );
// calculate bone length, absolute bind positions/rotations
for (int b = 0; b < NUM_AVATAR_JOINTS; b++) {
joint[b].length = glm::length(joint[b].defaultPosePosition);
joint[b].length = glm::length(joint[b].bindPosePosition);
if (joint[b].parent == AVATAR_JOINT_NULL) {
joint[b].absoluteBindPosePosition = joint[b].bindPosePosition;
@ -122,7 +102,7 @@ void Skeleton::initialize() {
joint[b].bindPosePosition;
glm::vec3 parentDirection = joint[ joint[b].parent ].absoluteBindPoseRotation * JOINT_DIRECTION;
joint[b].absoluteBindPoseRotation = rotationBetween(parentDirection, joint[b].bindPosePosition) *
joint[ joint[b].parent ].absoluteBindPoseRotation;
joint[ joint[b].parent ].absoluteBindPoseRotation;
}
}
}
@ -140,7 +120,7 @@ void Skeleton::update(float deltaTime, const glm::quat& orientation, glm::vec3 p
joint[b].position = joint[ joint[b].parent ].position;
}
glm::vec3 rotatedJointVector = joint[b].absoluteRotation * joint[b].defaultPosePosition;
glm::vec3 rotatedJointVector = joint[b].absoluteRotation * joint[b].bindPosePosition;
joint[b].position += rotatedJointVector;
}
}

View file

@ -63,7 +63,6 @@ public:
{
AvatarJointID parent; // which joint is this joint connected to?
glm::vec3 position; // the position at the "end" of the joint - in global space
glm::vec3 defaultPosePosition; // the parent relative position when the avatar is in the default pose
glm::vec3 bindPosePosition; // the parent relative position when the avatar is in the "T-pose"
glm::vec3 absoluteBindPosePosition; // the absolute position when the avatar is in the "T-pose"
glm::quat absoluteBindPoseRotation; // the absolute rotation when the avatar is in the "T-pose"

169
interface/src/Swatch.cpp Normal file
View file

@ -0,0 +1,169 @@
#include "Swatch.h"
#include <iostream>
Swatch::Swatch(QAction* action) :
Tool(action, 0, -1, -1),
_textRenderer(MONO_FONT_FAMILY, 10, 100),
_selected(1) {
}
void Swatch::reset() {
for (int i = 0; i < 8; ++i) {
_colors[i].setRgb(colorBase[i][0],
colorBase[i][1],
colorBase[i][2]);
}
}
QColor Swatch::getColor() {
return _colors[_selected - 1];
}
void Swatch::checkColor() {
if (_action->data().value<QColor>() == _colors[_selected - 1]) {
return;
}
QPixmap map(16, 16);
map.fill(_colors[_selected - 1]);
_action->setData(_colors[_selected - 1]) ;
_action->setIcon(map);
}
void Swatch::saveData(QSettings* settings) {
settings->beginGroup("Swatch");
for (int i(0); i < SWATCH_SIZE; ++i) {
QString rx("R1"), gx("G1"), bx("B1");
rx[1] = '1' + i;
gx[1] = rx[1];
bx[1] = rx[1];
settings->setValue(rx, _colors[i].red());
settings->setValue(gx, _colors[i].green());
settings->setValue(bx, _colors[i].blue());
}
settings->endGroup();
}
void Swatch::loadData(QSettings* settings) {
settings->beginGroup("Swatch");
for (int i = 0; i < SWATCH_SIZE; ++i) {
QString rx("R1"), gx("G1"), bx("B1");
rx[1] = '1' + i;
gx[1] = rx[1];
bx[1] = rx[1];
_colors[i].setRgb(settings->value(rx, colorBase[i][0]).toInt(),
settings->value(gx, colorBase[i][1]).toInt(),
settings->value(bx, colorBase[i][2]).toInt());
}
settings->endGroup();
checkColor();
}
void Swatch::handleEvent(int key, bool getColor) {
int next(0);
switch (key) {
case Qt::Key_1:
next = 1;
break;
case Qt::Key_2:
next = 2;
break;
case Qt::Key_3:
next = 3;
break;
case Qt::Key_4:
next = 4;
break;
case Qt::Key_5:
next = 5;
break;
case Qt::Key_6:
next = 6;
break;
case Qt::Key_7:
next = 7;
break;
case Qt::Key_8:
next = 8;
break;
default:
break;
}
if (getColor) {
if (_action->data().value<QColor>() != _colors[_selected - 1]) {
_selected = next;
_colors[_selected - 1] = _action->data().value<QColor>();
}
} else {
_selected = next;
QPixmap map(16, 16);
map.fill(_colors[_selected - 1]);
_action->setData(_colors[_selected - 1]) ;
_action->setIcon(map);
}
}
void Swatch::render(int width, int height) {
char str[2];
int margin = 0.10f * height;
height = 0.75f * height;
glBegin(GL_QUADS);
glColor3f(0.0f, 0.0f, 0.0f);
glVertex2f(0, 8 * (height - margin) + margin);
glVertex2f(width, 8 * (height - margin) + margin);
glVertex2f(width, 0);
glVertex2f(0, 0);
glEnd();
for (unsigned int i = 0; i < SWATCH_SIZE; ++i) {
glBegin(GL_QUADS);
glColor3f(_colors[i].redF(),
_colors[i].greenF(),
_colors[i].blueF());
glVertex2f(margin , (i + 1) * (height - margin));
glVertex2f(width - margin, (i + 1) * (height - margin));
glVertex2f(width - margin, i * (height - margin) + margin);
glVertex2f(margin , i * (height - margin) + margin);
glEnd();
if (_colors[i].lightness() < 50) {
glBegin(GL_LINES);
glColor3f(1.0f, 1.0f, 1.0f);
glVertex2f(margin , (i + 1) * (height - margin));
glVertex2f(width - margin, (i + 1) * (height - margin));
glVertex2f(width - margin, (i + 1) * (height - margin));
glVertex2f(width - margin, i * (height - margin) + margin);
glVertex2f(width - margin, i * (height - margin) + margin);
glVertex2f(margin , i * (height - margin) + margin);
glVertex2f(margin , i * (height - margin) + margin);
glVertex2f(margin , (i + 1) * (height - margin));
glEnd();
} else {
glColor3f(0.0f, 0.0f, 0.0f);
}
if (_selected == i + 1) {
glBegin(GL_TRIANGLES);
glVertex2f(margin , (i + 1) * (height - margin) - margin);
glVertex2f(width/4 - margin, i * (height - margin) + height / 2.0f);
glVertex2f(margin , i * (height - margin) + margin + margin);
glEnd();
}
sprintf(str, "%d", i + 1);
_textRenderer.draw(3 * width/4, (i + 1) * (height - margin) - 0.2f * height, str);
}
glTranslated(0, 8 * (height - margin) + margin + 0.075f * height, 0);
}

43
interface/src/Swatch.h Normal file
View file

@ -0,0 +1,43 @@
//
// Swatch.h
// interface
//
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__Swatch__
#define __interface__Swatch__
#include "Tool.h"
#include "ui/TextRenderer.h"
static const int SWATCH_SIZE = 8;
static const int colorBase[8][3] = {{237, 175, 0},
{61, 211, 72},
{51, 204, 204},
{63, 169, 245},
{193, 99, 122},
{255, 54, 69},
{124, 36, 36},
{63, 35, 19}};
class Swatch : public Tool {
public:
Swatch(QAction* action);
QColor getColor();
void checkColor();
void saveData(QSettings* settings);
void loadData(QSettings* settings);
void reset();
void render(int width, int height);
void handleEvent(int key, bool getColor);
private:
TextRenderer _textRenderer;
QColor _colors[SWATCH_SIZE];
int _selected;
};
#endif /* defined(__interface__Swatch__) */

51
interface/src/Tool.cpp Normal file
View file

@ -0,0 +1,51 @@
#include "Tool.h"
#include <QSvgRenderer>
#include <QPainter>
#include <QGLWidget>
Tool::Tool(QAction *action, GLuint texture, int x, int y) :
_action(action),
_texture(texture),
_x(x),
_y(y) {
}
void Tool::setAction(QAction* action) {
_action = action;
}
bool Tool::isActive() {
return _action->isChecked();
}
void Tool::render(int width, int height) {
glEnable(GL_TEXTURE_2D);
glBindTexture(GL_TEXTURE_2D, _texture);
if (_action == 0 || _action->isChecked()) {
glColor3f(1.0f, 1.0f, 1.0f); // reset gl color
} else {
glColor3f(0.3f, 0.3f, 0.3f);
}
glBegin(GL_QUADS);
glTexCoord2f( _x / NUM_TOOLS_COLS, 1.0f - (_y + 1) / NUM_TOOLS_ROWS);
glVertex2f(0 , height);
glTexCoord2f((_x + 1) / NUM_TOOLS_COLS, 1.0f - (_y + 1) / NUM_TOOLS_ROWS);
glVertex2f(width, height);
glTexCoord2f((_x + 1) / NUM_TOOLS_COLS, 1.0f - _y / NUM_TOOLS_ROWS);
glVertex2f(width, 0);
glTexCoord2f( _x / NUM_TOOLS_COLS, 1.0f - _y / NUM_TOOLS_ROWS);
glVertex2f(0 , 0);
glEnd();
glDisable(GL_TEXTURE_2D);
glTranslated(0, 1.10f * height, 0);
}

55
interface/src/Tool.h Normal file
View file

@ -0,0 +1,55 @@
//
// Tool.h
// interface
//
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__Tool__
#define __interface__Tool__
#include <QAction>
#include "InterfaceConfig.h"
#include "Util.h"
class QAction;
// Number of rows and columns in the SVG file for the tool palette
static const int NUM_TOOLS_ROWS = 10;
static const int NUM_TOOLS_COLS = 2;
static const int SWATCHS_TOOLS_COUNT = 13; // 8 swatch + 5 tools
static const int WIDTH_MIN = 47; // Minimal tools width
static const float TOOLS_RATIO = 40.0f / 60.0f; // ratio height/width of tools icons
static const float PAL_SCREEN_RATIO = 3.0f / 100.0f; // Percentage of the screeen width the palette is going to occupy
// Swatch popup consts
static const float POPUP_STEP = 0.05f;
static const float POPUP_MARGIN = 10.0f;
static const int POPUP_WIDTH = 280;
static const int POPUP_HEIGHT = 30;
static const int SWATCH_WIDTH = 64;
static const int SWATCH_HEIGHT = 30;
static const int FIRST_LINE_OFFSET = 12;
static const int SECOND_LINE_OFFSET = 28;
class Tool {
public:
Tool(QAction* action, GLuint texture, int x, int y);
void setAction(QAction* action);
bool isActive();
virtual void render(int width, int height);
protected:
QAction* _action;
GLuint _texture;
// position in the SVG grid
double _x;
double _y;
};
#endif /* defined(__interface__Tool__) */

View file

@ -0,0 +1,79 @@
#include "ToolsPalette.h"
#include <QSvgRenderer>
#include <QPainter>
#include <QGLWidget>
#include <SharedUtil.h>
void ToolsPalette::init(int screenWidth, int screenHeight) {
_width = (PAL_SCREEN_RATIO * screenWidth < WIDTH_MIN) ? WIDTH_MIN : PAL_SCREEN_RATIO * screenWidth;
_height = TOOLS_RATIO * _width;
_left = screenWidth / 150;
_top = (screenHeight - SWATCHS_TOOLS_COUNT * _height) / 2;
// Load SVG
switchToResourcesParentIfRequired();
QSvgRenderer renderer(QString("./resources/images/hifi-interface-tools.svg"));
// Prepare a QImage with desired characteritisc
QImage image(NUM_TOOLS_COLS * _width, NUM_TOOLS_ROWS * _height, QImage::Format_ARGB32);
// Get QPainter that paints to the image
QPainter painter(&image);
renderer.render(&painter);
//get the OpenGL-friendly image
_textureImage = QGLWidget::convertToGLFormat(image);
glGenTextures(1, &_textureID);
glBindTexture(GL_TEXTURE_2D, _textureID);
//generate the texture
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA,
_textureImage.width(),
_textureImage.height(),
0, GL_RGBA, GL_UNSIGNED_BYTE,
_textureImage.bits());
//texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
}
void ToolsPalette::addAction(QAction* action, int x, int y) {
Tool* tmp = new Tool(action, _textureID, x, y);
_tools.push_back(tmp);
}
void ToolsPalette::addTool(Tool* tool) {
_tools.push_back(tool);
}
void ToolsPalette::render(int screenWidth, int screenHeight) {
_width = (PAL_SCREEN_RATIO * screenWidth < WIDTH_MIN) ? WIDTH_MIN : PAL_SCREEN_RATIO * screenWidth;
_height = TOOLS_RATIO * _width;
_left = screenWidth / 150;
_top = (screenHeight - SWATCHS_TOOLS_COUNT * _height) / 2;
glPushMatrix();
glTranslated(_left, _top, 0);
bool show = false;
for (unsigned int i = 0; i < _tools.size(); ++i) {
if (_tools[i]->isActive()) {
show = true;
break;
}
}
if (show) {
for (unsigned int i = 0; i < _tools.size(); ++i) {
_tools[i]->render(_width, _height);
}
}
glPopMatrix();
}

View file

@ -0,0 +1,35 @@
//
// ToolsPalette.h
// interface
//
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __interface__ToolsPalette__
#define __interface__ToolsPalette__
#include "Tool.h"
#include "Swatch.h"
#include <vector>
class ToolsPalette {
public:
void init(int screenWidth, int screenHeight);
void addAction(QAction* action, int x, int y);
void addTool(Tool* tool);
void render(int screenWidth, int screenHeight);
private:
QImage _textureImage;
GLuint _textureID;
std::vector<Tool*> _tools;
int _top;
int _left;
int _width;
int _height;
};
#endif /* defined(__interface__ToolsPalette__) */

View file

@ -25,7 +25,6 @@
// the standard mono font family
#define MONO_FONT_FAMILY "Courier"
void eulerToOrthonormals(glm::vec3 * angles, glm::vec3 * fwd, glm::vec3 * left, glm::vec3 * up);
float azimuth_to(glm::vec3 head_pos, glm::vec3 source_pos);

View file

@ -21,11 +21,16 @@
using namespace cv;
using namespace std;
// register OpenCV matrix type with Qt metatype system
#ifdef HAVE_OPENNI
using namespace xn;
#endif
// register types with Qt metatype system
int jointVectorMetaType = qRegisterMetaType<JointVector>("JointVector");
int matMetaType = qRegisterMetaType<Mat>("cv::Mat");
int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect");
Webcam::Webcam() : _enabled(false), _active(false), _frameTextureID(0) {
Webcam::Webcam() : _enabled(false), _active(false), _frameTextureID(0), _depthTextureID(0) {
// the grabber simply runs as fast as possible
_grabber = new FrameGrabber();
_grabber->moveToThread(&_grabberThread);
@ -79,9 +84,45 @@ void Webcam::renderPreview(int screenWidth, int screenHeight) {
glTexCoord2f(0, 1);
glVertex2f(left, top + PREVIEW_HEIGHT);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
if (_depthTextureID != 0) {
glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glBegin(GL_QUADS);
int depthPreviewWidth = _depthWidth * PREVIEW_HEIGHT / _depthHeight;
int depthLeft = screenWidth - depthPreviewWidth - 10;
glTexCoord2f(0, 0);
glVertex2f(depthLeft, top - PREVIEW_HEIGHT);
glTexCoord2f(1, 0);
glVertex2f(depthLeft + depthPreviewWidth, top - PREVIEW_HEIGHT);
glTexCoord2f(1, 1);
glVertex2f(depthLeft + depthPreviewWidth, top);
glTexCoord2f(0, 1);
glVertex2f(depthLeft, top);
glEnd();
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
if (!_joints.isEmpty()) {
glColor3f(1.0f, 0.0f, 0.0f);
glPointSize(4.0f);
glBegin(GL_POINTS);
float projectedScale = PREVIEW_HEIGHT / (float)_depthHeight;
foreach (const Joint& joint, _joints) {
if (joint.isValid) {
glVertex2f(depthLeft + joint.projected.x * projectedScale,
top - PREVIEW_HEIGHT + joint.projected.y * projectedScale);
}
}
glEnd();
glPointSize(1.0f);
}
} else {
glBindTexture(GL_TEXTURE_2D, 0);
glDisable(GL_TEXTURE_2D);
}
glColor3f(1.0f, 1.0f, 1.0f);
glBegin(GL_LINE_LOOP);
Point2f facePoints[4];
_faceRect.points(facePoints);
@ -107,26 +148,45 @@ Webcam::~Webcam() {
delete _grabber;
}
void Webcam::setFrame(const Mat& frame, const RotatedRect& faceRect) {
void Webcam::setFrame(const Mat& frame, int format, const Mat& depth, const RotatedRect& faceRect, const JointVector& joints) {
IplImage image = frame;
glPixelStorei(GL_UNPACK_ROW_LENGTH, image.widthStep / 3);
if (_frameTextureID == 0) {
glGenTextures(1, &_frameTextureID);
glBindTexture(GL_TEXTURE_2D, _frameTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _frameWidth = image.width, _frameHeight = image.height, 0, GL_BGR,
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _frameWidth = image.width, _frameHeight = image.height, 0, format,
GL_UNSIGNED_BYTE, image.imageData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
printLog("Capturing webcam at %dx%d.\n", _frameWidth, _frameHeight);
printLog("Capturing video at %dx%d.\n", _frameWidth, _frameHeight);
} else {
glBindTexture(GL_TEXTURE_2D, _frameTextureID);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _frameWidth, _frameHeight, GL_BGR, GL_UNSIGNED_BYTE, image.imageData);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _frameWidth, _frameHeight, format, GL_UNSIGNED_BYTE, image.imageData);
}
if (!depth.empty()) {
IplImage depthImage = depth;
glPixelStorei(GL_UNPACK_ROW_LENGTH, depthImage.widthStep);
if (_depthTextureID == 0) {
glGenTextures(1, &_depthTextureID);
glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _depthWidth = depthImage.width, _depthHeight = depthImage.height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, depthImage.imageData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
printLog("Capturing depth at %dx%d.\n", _depthWidth, _depthHeight);
} else {
glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _depthWidth, _depthHeight, GL_LUMINANCE,
GL_UNSIGNED_BYTE, depthImage.imageData);
}
}
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
glBindTexture(GL_TEXTURE_2D, 0);
// store our face rect, update our frame count for fps computation
// store our face rect and joints, update our frame count for fps computation
_faceRect = faceRect;
_joints = joints;
_frameCount++;
const int MAX_FPS = 60;
@ -140,33 +200,60 @@ void Webcam::setFrame(const Mat& frame, const RotatedRect& faceRect) {
}
_lastFrameTimestamp = now;
// roll is just the angle of the face rect (correcting for 180 degree rotations)
float roll = faceRect.angle;
if (roll < -90.0f) {
roll += 180.0f;
// see if we have joint data
if (!_joints.isEmpty()) {
_estimatedJoints.resize(NUM_AVATAR_JOINTS);
glm::vec3 origin;
if (_joints[AVATAR_JOINT_LEFT_HIP].isValid && _joints[AVATAR_JOINT_RIGHT_HIP].isValid) {
origin = glm::mix(_joints[AVATAR_JOINT_LEFT_HIP].position, _joints[AVATAR_JOINT_RIGHT_HIP].position, 0.5f);
} else if (_joints[AVATAR_JOINT_TORSO].isValid) {
const glm::vec3 TORSO_TO_PELVIS = glm::vec3(0.0f, -0.09f, -0.01f);
origin = _joints[AVATAR_JOINT_TORSO].position + TORSO_TO_PELVIS;
}
for (int i = 0; i < NUM_AVATAR_JOINTS; i++) {
if (!_joints[i].isValid) {
continue;
}
const float JOINT_SMOOTHING = 0.9f;
_estimatedJoints[i].isValid = true;
_estimatedJoints[i].position = glm::mix(_joints[i].position - origin,
_estimatedJoints[i].position, JOINT_SMOOTHING);
_estimatedJoints[i].rotation = safeMix(_joints[i].rotation,
_estimatedJoints[i].rotation, JOINT_SMOOTHING);
}
_estimatedRotation = safeEulerAngles(_estimatedJoints[AVATAR_JOINT_HEAD_BASE].rotation);
_estimatedPosition = _estimatedJoints[AVATAR_JOINT_HEAD_BASE].position;
} else if (roll > 90.0f) {
roll -= 180.0f;
}
const float ROTATION_SMOOTHING = 0.95f;
_estimatedRotation.z = glm::mix(roll, _estimatedRotation.z, ROTATION_SMOOTHING);
// determine position based on translation and scaling of the face rect
if (_initialFaceRect.size.area() == 0) {
_initialFaceRect = faceRect;
_estimatedPosition = glm::vec3();
} else {
float proportion = sqrtf(_initialFaceRect.size.area() / (float)faceRect.size.area());
const float DISTANCE_TO_CAMERA = 0.333f;
const float POSITION_SCALE = 0.5f;
float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA;
glm::vec3 position = glm::vec3(
(faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _frameWidth,
(faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _frameWidth,
z);
const float POSITION_SMOOTHING = 0.95f;
_estimatedPosition = glm::mix(position, _estimatedPosition, POSITION_SMOOTHING);
// roll is just the angle of the face rect (correcting for 180 degree rotations)
float roll = faceRect.angle;
if (roll < -90.0f) {
roll += 180.0f;
} else if (roll > 90.0f) {
roll -= 180.0f;
}
const float ROTATION_SMOOTHING = 0.95f;
_estimatedRotation.z = glm::mix(roll, _estimatedRotation.z, ROTATION_SMOOTHING);
// determine position based on translation and scaling of the face rect
if (_initialFaceRect.size.area() == 0) {
_initialFaceRect = faceRect;
_estimatedPosition = glm::vec3();
} else {
float proportion = sqrtf(_initialFaceRect.size.area() / (float)faceRect.size.area());
const float DISTANCE_TO_CAMERA = 0.333f;
const float POSITION_SCALE = 0.5f;
float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA;
glm::vec3 position = glm::vec3(
(faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _frameWidth,
(faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _frameWidth,
z);
const float POSITION_SMOOTHING = 0.95f;
_estimatedPosition = glm::mix(position, _estimatedPosition, POSITION_SMOOTHING);
}
}
// note that we have data
@ -176,7 +263,7 @@ void Webcam::setFrame(const Mat& frame, const RotatedRect& faceRect) {
QTimer::singleShot(qMax((int)remaining / 1000, 0), _grabber, SLOT(grabFrame()));
}
FrameGrabber::FrameGrabber() : _capture(0), _searchWindow(0, 0, 0, 0) {
FrameGrabber::FrameGrabber() : _initialized(false), _capture(0), _searchWindow(0, 0, 0, 0) {
}
FrameGrabber::~FrameGrabber() {
@ -185,52 +272,170 @@ FrameGrabber::~FrameGrabber() {
}
}
#ifdef HAVE_OPENNI
static AvatarJointID xnToAvatarJoint(XnSkeletonJoint joint) {
switch (joint) {
case XN_SKEL_HEAD: return AVATAR_JOINT_HEAD_TOP;
case XN_SKEL_NECK: return AVATAR_JOINT_HEAD_BASE;
case XN_SKEL_TORSO: return AVATAR_JOINT_CHEST;
case XN_SKEL_LEFT_SHOULDER: return AVATAR_JOINT_RIGHT_ELBOW;
case XN_SKEL_LEFT_ELBOW: return AVATAR_JOINT_RIGHT_WRIST;
case XN_SKEL_RIGHT_SHOULDER: return AVATAR_JOINT_LEFT_ELBOW;
case XN_SKEL_RIGHT_ELBOW: return AVATAR_JOINT_LEFT_WRIST;
case XN_SKEL_LEFT_HIP: return AVATAR_JOINT_RIGHT_KNEE;
case XN_SKEL_LEFT_KNEE: return AVATAR_JOINT_RIGHT_HEEL;
case XN_SKEL_LEFT_FOOT: return AVATAR_JOINT_RIGHT_TOES;
case XN_SKEL_RIGHT_HIP: return AVATAR_JOINT_LEFT_KNEE;
case XN_SKEL_RIGHT_KNEE: return AVATAR_JOINT_LEFT_HEEL;
case XN_SKEL_RIGHT_FOOT: return AVATAR_JOINT_LEFT_TOES;
default: return AVATAR_JOINT_NULL;
}
}
static int getParentJoint(XnSkeletonJoint joint) {
switch (joint) {
case XN_SKEL_HEAD: return XN_SKEL_NECK;
case XN_SKEL_TORSO: return -1;
case XN_SKEL_LEFT_ELBOW: return XN_SKEL_LEFT_SHOULDER;
case XN_SKEL_LEFT_HAND: return XN_SKEL_LEFT_ELBOW;
case XN_SKEL_RIGHT_ELBOW: return XN_SKEL_RIGHT_SHOULDER;
case XN_SKEL_RIGHT_HAND: return XN_SKEL_RIGHT_ELBOW;
case XN_SKEL_LEFT_KNEE: return XN_SKEL_LEFT_HIP;
case XN_SKEL_LEFT_FOOT: return XN_SKEL_LEFT_KNEE;
case XN_SKEL_RIGHT_KNEE: return XN_SKEL_RIGHT_HIP;
case XN_SKEL_RIGHT_FOOT: return XN_SKEL_RIGHT_KNEE;
default: return XN_SKEL_TORSO;
}
}
static glm::vec3 xnToGLM(const XnVector3D& vector, bool flip = false) {
return glm::vec3(vector.X * (flip ? -1 : 1), vector.Y, vector.Z);
}
static glm::quat xnToGLM(const XnMatrix3X3& matrix) {
glm::quat rotation = glm::quat_cast(glm::mat3(
matrix.elements[0], matrix.elements[1], matrix.elements[2],
matrix.elements[3], matrix.elements[4], matrix.elements[5],
matrix.elements[6], matrix.elements[7], matrix.elements[8]));
return glm::quat(rotation.w, -rotation.x, rotation.y, rotation.z);
}
static void XN_CALLBACK_TYPE newUser(UserGenerator& generator, XnUserID id, void* cookie) {
printLog("Found user %d.\n", id);
generator.GetSkeletonCap().RequestCalibration(id, false);
}
static void XN_CALLBACK_TYPE lostUser(UserGenerator& generator, XnUserID id, void* cookie) {
printLog("Lost user %d.\n", id);
}
static void XN_CALLBACK_TYPE calibrationStarted(SkeletonCapability& capability, XnUserID id, void* cookie) {
printLog("Calibration started for user %d.\n", id);
}
static void XN_CALLBACK_TYPE calibrationCompleted(SkeletonCapability& capability,
XnUserID id, XnCalibrationStatus status, void* cookie) {
if (status == XN_CALIBRATION_STATUS_OK) {
printLog("Calibration completed for user %d.\n", id);
capability.StartTracking(id);
} else {
printLog("Calibration failed to user %d.\n", id);
capability.RequestCalibration(id, true);
}
}
#endif
void FrameGrabber::reset() {
_searchWindow = cv::Rect(0, 0, 0, 0);
#ifdef HAVE_OPENNI
if (_userGenerator.IsValid() && _userGenerator.GetSkeletonCap().IsTracking(_userID)) {
_userGenerator.GetSkeletonCap().RequestCalibration(_userID, true);
}
#endif
}
void FrameGrabber::grabFrame() {
if (_capture == 0) {
if ((_capture = cvCaptureFromCAM(-1)) == 0) {
printLog("Failed to open webcam.\n");
return;
}
const int IDEAL_FRAME_WIDTH = 320;
const int IDEAL_FRAME_HEIGHT = 240;
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_WIDTH, IDEAL_FRAME_WIDTH);
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_HEIGHT, IDEAL_FRAME_HEIGHT);
if (!(_initialized || init())) {
return;
}
int format = GL_BGR;
Mat frame;
JointVector joints;
#ifdef HAVE_OPENNI
if (_depthGenerator.IsValid()) {
_xnContext.WaitAnyUpdateAll();
frame = Mat(_imageMetaData.YRes(), _imageMetaData.XRes(), CV_8UC3, (void*)_imageGenerator.GetImageMap());
format = GL_RGB;
#ifdef __APPLE__
configureCamera(0x5ac, 0x8510, false, 0.975, 0.5, 1.0, 0.5, true, 0.5);
#else
cvSetCaptureProperty(_capture, CV_CAP_PROP_EXPOSURE, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_CONTRAST, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_SATURATION, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_BRIGHTNESS, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_HUE, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_GAIN, 0.5);
#endif
switchToResourcesParentIfRequired();
if (!_faceCascade.load("resources/haarcascades/haarcascade_frontalface_alt.xml")) {
printLog("Failed to load Haar cascade for face tracking.\n");
Mat depth = Mat(_depthMetaData.YRes(), _depthMetaData.XRes(), CV_16UC1, (void*)_depthGenerator.GetDepthMap());
const double EIGHT_BIT_MAX = 255;
const double ELEVEN_BIT_MAX = 2047;
depth.convertTo(_grayDepthFrame, CV_8UC1, EIGHT_BIT_MAX / ELEVEN_BIT_MAX);
_userID = 0;
XnUInt16 userCount = 1;
_userGenerator.GetUsers(&_userID, userCount);
if (userCount > 0 && _userGenerator.GetSkeletonCap().IsTracking(_userID)) {
joints.resize(NUM_AVATAR_JOINTS);
const int MAX_ACTIVE_JOINTS = 16;
XnSkeletonJoint activeJoints[MAX_ACTIVE_JOINTS];
XnUInt16 activeJointCount = MAX_ACTIVE_JOINTS;
_userGenerator.GetSkeletonCap().EnumerateActiveJoints(activeJoints, activeJointCount);
XnSkeletonJointTransformation transform;
for (int i = 0; i < activeJointCount; i++) {
AvatarJointID avatarJoint = xnToAvatarJoint(activeJoints[i]);
if (avatarJoint == AVATAR_JOINT_NULL) {
continue;
}
_userGenerator.GetSkeletonCap().GetSkeletonJoint(_userID, activeJoints[i], transform);
XnVector3D projected;
_depthGenerator.ConvertRealWorldToProjective(1, &transform.position.position, &projected);
glm::quat rotation = xnToGLM(transform.orientation.orientation);
int parentJoint = getParentJoint(activeJoints[i]);
if (parentJoint != -1) {
XnSkeletonJointOrientation parentOrientation;
_userGenerator.GetSkeletonCap().GetSkeletonJointOrientation(
_userID, (XnSkeletonJoint)parentJoint, parentOrientation);
rotation = glm::inverse(xnToGLM(parentOrientation.orientation)) * rotation;
}
const float METERS_PER_MM = 1.0f / 1000.0f;
joints[avatarJoint] = Joint(xnToGLM(transform.position.position, true) * METERS_PER_MM,
rotation, xnToGLM(projected));
}
}
}
IplImage* image = cvQueryFrame(_capture);
if (image == 0) {
// try again later
QMetaObject::invokeMethod(this, "grabFrame", Qt::QueuedConnection);
return;
}
// make sure it's in the format we expect
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U || image->dataOrder != IPL_DATA_ORDER_PIXEL ||
image->origin != 0) {
printLog("Invalid webcam image format.\n");
return;
#endif
if (frame.empty()) {
IplImage* image = cvQueryFrame(_capture);
if (image == 0) {
// try again later
QMetaObject::invokeMethod(this, "grabFrame", Qt::QueuedConnection);
return;
}
// make sure it's in the format we expect
if (image->nChannels != 3 || image->depth != IPL_DEPTH_8U || image->dataOrder != IPL_DATA_ORDER_PIXEL ||
image->origin != 0) {
printLog("Invalid webcam image format.\n");
return;
}
frame = image;
}
// if we don't have a search window (yet), try using the face cascade
Mat frame = image;
int channels = 0;
float ranges[] = { 0, 180 };
const float* range = ranges;
@ -239,7 +444,7 @@ void FrameGrabber::grabFrame() {
_faceCascade.detectMultiScale(frame, faces, 1.1, 6);
if (!faces.empty()) {
_searchWindow = faces.front();
updateHSVFrame(frame);
updateHSVFrame(frame, format);
Mat faceHsv(_hsvFrame, _searchWindow);
Mat faceMask(_mask, _searchWindow);
@ -252,7 +457,7 @@ void FrameGrabber::grabFrame() {
}
RotatedRect faceRect;
if (_searchWindow.area() > 0) {
updateHSVFrame(frame);
updateHSVFrame(frame, format);
calcBackProject(&_hsvFrame, 1, &channels, _histogram, _backProject, &range);
bitwise_and(_backProject, _mask, _backProject);
@ -261,10 +466,74 @@ void FrameGrabber::grabFrame() {
_searchWindow = faceRect.boundingRect();
}
QMetaObject::invokeMethod(Application::getInstance()->getWebcam(), "setFrame",
Q_ARG(cv::Mat, frame), Q_ARG(cv::RotatedRect, faceRect));
Q_ARG(cv::Mat, frame), Q_ARG(int, format), Q_ARG(cv::Mat, _grayDepthFrame),
Q_ARG(cv::RotatedRect, faceRect), Q_ARG(JointVector, joints));
}
void FrameGrabber::updateHSVFrame(const Mat& frame) {
cvtColor(frame, _hsvFrame, CV_BGR2HSV);
bool FrameGrabber::init() {
_initialized = true;
// load our face cascade
switchToResourcesParentIfRequired();
if (!_faceCascade.load("resources/haarcascades/haarcascade_frontalface_alt.xml")) {
printLog("Failed to load Haar cascade for face tracking.\n");
return false;
}
// first try for a Kinect
#ifdef HAVE_OPENNI
_xnContext.Init();
if (_depthGenerator.Create(_xnContext) == XN_STATUS_OK && _imageGenerator.Create(_xnContext) == XN_STATUS_OK &&
_userGenerator.Create(_xnContext) == XN_STATUS_OK &&
_userGenerator.IsCapabilitySupported(XN_CAPABILITY_SKELETON)) {
_depthGenerator.GetMetaData(_depthMetaData);
_imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
_imageGenerator.GetMetaData(_imageMetaData);
XnCallbackHandle userCallbacks, calibrationStartCallback, calibrationCompleteCallback;
_userGenerator.RegisterUserCallbacks(newUser, lostUser, 0, userCallbacks);
_userGenerator.GetSkeletonCap().RegisterToCalibrationStart(calibrationStarted, 0, calibrationStartCallback);
_userGenerator.GetSkeletonCap().RegisterToCalibrationComplete(calibrationCompleted, 0, calibrationCompleteCallback);
_userGenerator.GetSkeletonCap().SetSkeletonProfile(XN_SKEL_PROFILE_UPPER);
_xnContext.StartGeneratingAll();
return true;
}
#endif
// next, an ordinary webcam
if ((_capture = cvCaptureFromCAM(-1)) == 0) {
printLog("Failed to open webcam.\n");
return false;
}
const int IDEAL_FRAME_WIDTH = 320;
const int IDEAL_FRAME_HEIGHT = 240;
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_WIDTH, IDEAL_FRAME_WIDTH);
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_HEIGHT, IDEAL_FRAME_HEIGHT);
#ifdef __APPLE__
configureCamera(0x5ac, 0x8510, false, 0.975, 0.5, 1.0, 0.5, true, 0.5);
#else
cvSetCaptureProperty(_capture, CV_CAP_PROP_EXPOSURE, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_CONTRAST, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_SATURATION, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_BRIGHTNESS, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_HUE, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_GAIN, 0.5);
#endif
return true;
}
void FrameGrabber::updateHSVFrame(const Mat& frame, int format) {
cvtColor(frame, _hsvFrame, format == GL_RGB ? CV_RGB2HSV : CV_BGR2HSV);
inRange(_hsvFrame, Scalar(0, 55, 65), Scalar(180, 256, 256), _mask);
}
Joint::Joint(const glm::vec3& position, const glm::quat& rotation, const glm::vec3& projected) :
isValid(true), position(position), rotation(rotation), projected(projected) {
}
Joint::Joint() : isValid(false) {
}

View file

@ -12,11 +12,17 @@
#include <QMetaType>
#include <QObject>
#include <QThread>
#include <QVector>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
#include <opencv2/opencv.hpp>
#ifdef HAVE_OPENNI
#include <XnCppWrapper.h>
#endif
#include "InterfaceConfig.h"
class QImage;
@ -24,6 +30,9 @@ class QImage;
struct CvCapture;
class FrameGrabber;
class Joint;
typedef QVector<Joint> JointVector;
class Webcam : public QObject {
Q_OBJECT
@ -36,6 +45,7 @@ public:
const bool isActive() const { return _active; }
const glm::vec3& getEstimatedPosition() const { return _estimatedPosition; }
const glm::vec3& getEstimatedRotation() const { return _estimatedRotation; }
const JointVector& getEstimatedJoints() const { return _estimatedJoints; }
void reset();
void renderPreview(int screenWidth, int screenHeight);
@ -43,7 +53,8 @@ public:
public slots:
void setEnabled(bool enabled);
void setFrame(const cv::Mat& image, const cv::RotatedRect& faceRect);
void setFrame(const cv::Mat& video, int format, const cv::Mat& depth,
const cv::RotatedRect& faceRect, const JointVector& joints);
private:
@ -54,9 +65,13 @@ private:
bool _active;
int _frameWidth;
int _frameHeight;
int _depthWidth;
int _depthHeight;
GLuint _frameTextureID;
GLuint _depthTextureID;
cv::RotatedRect _faceRect;
cv::RotatedRect _initialFaceRect;
JointVector _joints;
long long _startTimestamp;
int _frameCount;
@ -65,6 +80,7 @@ private:
glm::vec3 _estimatedPosition;
glm::vec3 _estimatedRotation;
JointVector _estimatedJoints;
};
class FrameGrabber : public QObject {
@ -82,8 +98,10 @@ public slots:
private:
void updateHSVFrame(const cv::Mat& frame);
bool init();
void updateHSVFrame(const cv::Mat& frame, int format);
bool _initialized;
CvCapture* _capture;
cv::CascadeClassifier _faceCascade;
cv::Mat _hsvFrame;
@ -91,8 +109,32 @@ private:
cv::SparseMat _histogram;
cv::Mat _backProject;
cv::Rect _searchWindow;
cv::Mat _grayDepthFrame;
#ifdef HAVE_OPENNI
xn::Context _xnContext;
xn::DepthGenerator _depthGenerator;
xn::ImageGenerator _imageGenerator;
xn::UserGenerator _userGenerator;
xn::DepthMetaData _depthMetaData;
xn::ImageMetaData _imageMetaData;
XnUserID _userID;
#endif
};
class Joint {
public:
Joint(const glm::vec3& position, const glm::quat& rotation, const glm::vec3& projected);
Joint();
bool isValid;
glm::vec3 position;
glm::quat rotation;
glm::vec3 projected;
};
Q_DECLARE_METATYPE(JointVector)
Q_DECLARE_METATYPE(cv::Mat)
Q_DECLARE_METATYPE(cv::RotatedRect)

View file

@ -152,6 +152,13 @@ int AvatarData::getBroadcastData(unsigned char* destinationBuffer) {
}
}
// skeleton joints
*destinationBuffer++ = (unsigned char)_joints.size();
for (vector<JointData>::iterator it = _joints.begin(); it != _joints.end(); it++) {
*destinationBuffer++ = (unsigned char)it->jointID;
destinationBuffer += packOrientationQuatToBytes(destinationBuffer, it->rotation);
}
return destinationBuffer - bufferStart;
}
@ -263,6 +270,16 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
_handData->setFingerRoots(fingerRoots);
}
// skeleton joints
if (sourceBuffer - startPosition < numBytes) // safety check
{
_joints.resize(*sourceBuffer++);
for (vector<JointData>::iterator it = _joints.begin(); it != _joints.end(); it++) {
it->jointID = *sourceBuffer++;
sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, it->rotation);
}
}
return sourceBuffer - startPosition;
}

View file

@ -11,6 +11,7 @@
#include <string>
#include <inttypes.h>
#include <vector>
#include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp>
@ -36,6 +37,8 @@ enum KeyState
DELETE_KEY_DOWN
};
class JointData;
class AvatarData : public NodeData {
public:
AvatarData(Node* owningNode = NULL);
@ -132,14 +135,23 @@ protected:
bool _wantDelta;
bool _wantOcclusionCulling;
std::vector<JointData> _joints;
HeadData* _headData;
HandData* _handData;
private:
// privatize the copy constructor and assignment operator so they cannot be called
AvatarData(const AvatarData&);
AvatarData& operator= (const AvatarData&);
};
class JointData {
public:
int jointID;
glm::quat rotation;
};
// These pack/unpack functions are designed to start specific known types in as efficient a manner
// as possible. Taking advantage of the known characteristics of the semantic types.

View file

@ -22,7 +22,7 @@ int SimpleMovingAverage::updateAverage(float sample) {
if (_numSamples > 0) {
_average = (ONE_MINUS_WEIGHTING * _average) + (WEIGHTING * sample);
float eventDelta = (usecTimestampNow() - _lastEventTimestamp) / 1000000;
float eventDelta = (usecTimestampNow() - _lastEventTimestamp) / 1000000.0f;
if (_numSamples > 1) {
_eventDeltaAverage = (ONE_MINUS_WEIGHTING * _eventDeltaAverage) +
@ -46,7 +46,7 @@ void SimpleMovingAverage::reset() {
float SimpleMovingAverage::getEventDeltaAverage() {
return (ONE_MINUS_WEIGHTING * _eventDeltaAverage) +
(WEIGHTING * ((usecTimestampNow() - _lastEventTimestamp) / 1000000));
(WEIGHTING * ((usecTimestampNow() - _lastEventTimestamp) / 1000000.0f));
}
float SimpleMovingAverage::getAverageSampleValuePerSecond() {

View file

@ -15,4 +15,9 @@ include(${MACRO_DIR}/IncludeGLM.cmake)
include_glm(${TARGET_NAME} ${ROOT_DIR})
include(${MACRO_DIR}/LinkHifiLibrary.cmake)
link_hifi_library(shared ${TARGET_NAME} ${ROOT_DIR})
link_hifi_library(shared ${TARGET_NAME} ${ROOT_DIR})
# link ZLIB
find_package(ZLIB)
include_directories(${ZLIB_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${ZLIB_LIBRARIES})

View file

@ -0,0 +1,245 @@
//
// Tags.h
// hifi
//
// Created by Clement Brisset on 7/3/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#include "Tags.h"
#include <Log.h>
#include <zlib.h>
#include <zconf.h>
#include <iostream>
Tag::Tag(int tagId, std::stringstream &ss) : _tagId(tagId) {
int size = ss.get() << 8 | ss.get();
_name.clear();
for (int i = 0; i < size; ++i) { _name += ss.get();
}
}
Tag* Tag::readTag(int tagId, std::stringstream &ss) {
switch (tagId) {
case TAG_Byte:
return new TagByte(ss);
case TAG_Short:
return new TagShort(ss);
case TAG_Int:
return new TagInt(ss);
case TAG_Long:
return new TagLong(ss);
case TAG_Float:
return new TagFloat(ss);
case TAG_Double:
return new TagDouble(ss);
case TAG_Byte_Array:
return new TagByteArray(ss);
case TAG_String:
return new TagString(ss);
case TAG_List:
return new TagList(ss);
case TAG_Compound:
return new TagCompound(ss);
case TAG_Int_Array:
return new TagIntArray(ss);
default:
return NULL;
}
}
TagByte::TagByte(std::stringstream &ss) : Tag(TAG_Byte, ss) {
_data = ss.get();
}
TagShort::TagShort(std::stringstream &ss) : Tag(TAG_Short, ss) {
_data = ss.get() << 8 | ss.get();
}
TagInt::TagInt(std::stringstream &ss) : Tag(TAG_Int, ss) {
_data = ss.get() << 24 | ss.get() << 16 | ss.get() << 8 | ss.get();
}
TagLong::TagLong(std::stringstream &ss) : Tag(TAG_Long, ss) {
_data = (((int64_t) ss.get()) << 56 | ((int64_t) ss.get()) << 48
|((int64_t) ss.get()) << 40 | ((int64_t) ss.get()) << 32
| ss.get() << 24 | ss.get() << 16
| ss.get() << 8 | ss.get());
}
// We don't need Float and double, so we just ignore the bytes
TagFloat::TagFloat(std::stringstream &ss) : Tag(TAG_Float, ss) {
ss.seekg(4, ss.cur);
}
TagDouble::TagDouble(std::stringstream &ss) : Tag(TAG_Double, ss) {
ss.seekg(8, ss.cur);
}
TagByteArray::TagByteArray(std::stringstream &ss) : Tag(TAG_Byte_Array, ss) {
_size = ss.get() << 24 | ss.get() << 16 | ss.get() << 8 | ss.get();
_data = new char[_size];
for (int i = 0; i < _size; ++i) {
_data[i] = ss.get();
}
}
TagString::TagString(std::stringstream &ss) : Tag(TAG_String, ss) {
_size = ss.get() << 8 | ss.get();
for (int i = 0; i < _size; ++i) {
_data += ss.get();
}
}
TagList::TagList(std::stringstream &ss) :
Tag(TAG_List, ss) {
_tagId = ss.get();
_size = ss.get() << 24 | ss.get() << 16 | ss.get() << 8 | ss.get();
for (int i = 0; i < _size; ++i) {
ss.putback(0);
ss.putback(0);
_data.push_back(readTag(_tagId, ss));
}
}
TagCompound::TagCompound(std::stringstream &ss) :
Tag(TAG_Compound, ss),
_size(0),
_width(0),
_length(0),
_height(0),
_blocksId(NULL),
_blocksData(NULL)
{
int tagId;
while (TAG_End != (tagId = ss.get())) {
_data.push_back(readTag(tagId, ss));
++_size;
if (NULL == _data.back()) {
_blocksId = NULL;
_blocksData = NULL;
return;
} else if (TAG_Short == tagId) {
if ("Width" == _data.back()->getName()) {
_width = ((TagShort*) _data.back())->getData();
} else if ("Height" == _data.back()->getName()) {
_height = ((TagShort*) _data.back())->getData();
} else if ("Length" == _data.back()->getName()) {
_length = ((TagShort*) _data.back())->getData();
}
} else if (TAG_Byte_Array == tagId) {
if ("Blocks" == _data.back()->getName()) {
_blocksId = ((TagByteArray*) _data.back())->getData();
} else if ("Data" == _data.back()->getName()) {
_blocksData = ((TagByteArray*) _data.back())->getData();
}
}
}
}
TagIntArray::TagIntArray(std::stringstream &ss) : Tag(TAG_Int_Array, ss) {
_size = ss.get() << 24 | ss.get() << 16 | ss.get() << 8 | ss.get();
_data = new int[_size];
for (int i = 0; i < _size; ++i) {
_data[i] = ss.get();
}
}
int retrieveData(std::string filename, std::stringstream &ss) {
std::ifstream file(filename.c_str(), std::ios::binary);
int type = file.peek();
if (type == 0x0A) {
ss.flush();
ss << file;
return 0;
}
if (type == 0x1F) {
return ungzip(file, ss);
}
return 1;
}
int ungzip(std::ifstream &file, std::stringstream &ss) {
std::string gzipedBytes;
gzipedBytes.clear();
ss.flush();
while (!file.eof()) {
gzipedBytes += (char) file.get();
}
file.close();
if (gzipedBytes.size() == 0) {
ss << gzipedBytes;
return 0;
}
unsigned int full_length = gzipedBytes.size();
unsigned int half_length = gzipedBytes.size()/2;
unsigned int uncompLength = full_length;
char* uncomp = (char*) calloc(sizeof(char), uncompLength);
z_stream strm;
strm.next_in = (Bytef *) gzipedBytes.c_str();
strm.avail_in = full_length;
strm.total_out = 0;
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
bool done = false;
if (inflateInit2(&strm, (16 + MAX_WBITS)) != Z_OK) {
free(uncomp);
return 1;
}
while (!done) {
// If our output buffer is too small
if (strm.total_out >= uncompLength) {
// Increase size of output buffer
char* uncomp2 = (char*) calloc(sizeof(char), uncompLength + half_length);
memcpy(uncomp2, uncomp, uncompLength);
uncompLength += half_length;
free(uncomp);
uncomp = uncomp2;
}
strm.next_out = (Bytef *) (uncomp + strm.total_out);
strm.avail_out = uncompLength - strm.total_out;
// Inflate another chunk.
int err = inflate (&strm, Z_SYNC_FLUSH);
if (err == Z_STREAM_END) {
done = true;
} else if (err != Z_OK) {
break;
}
}
if (inflateEnd (&strm) != Z_OK) {
free(uncomp);
return 1;
}
for (size_t i = 0; i < strm.total_out; ++i) {
ss << uncomp[i];
}
free(uncomp);
return 0;
}

175
libraries/voxels/src/Tags.h Normal file
View file

@ -0,0 +1,175 @@
//
// Tags.h
// hifi
//
// Created by Clement Brisset on 7/3/13.
// Copyright (c) 2013 High Fidelity, Inc. All rights reserved.
//
#ifndef __hifi__Tags__
#define __hifi__Tags__
#include <cstdlib>
#include <cstring>
#include <fstream>
#include <sstream>
#include <list>
#define TAG_End 0
#define TAG_Byte 1
#define TAG_Short 2
#define TAG_Int 3
#define TAG_Long 4
#define TAG_Float 5
#define TAG_Double 6
#define TAG_Byte_Array 7
#define TAG_String 8
#define TAG_List 9
#define TAG_Compound 10
#define TAG_Int_Array 11
int retrieveData(std::string filename, std::stringstream &ss);
int ungzip(std::ifstream &file, std::stringstream &ss);
class Tag {
public:
Tag(int tagId, std::stringstream &ss);
int getTagId() const {return _tagId;}
std::string getName () const {return _name; }
static Tag* readTag(int tagId, std::stringstream &ss);
protected:
int _tagId;
std::string _name;
};
class TagByte : public Tag {
public:
TagByte(std::stringstream &ss);
int8_t getData() const {return _data;}
private:
int8_t _data;
};
class TagShort : public Tag {
public:
TagShort(std::stringstream &ss);
int16_t getData() const {return _data;}
private:
int16_t _data;
};
class TagInt : public Tag {
public:
TagInt(std::stringstream &ss);
int32_t getData() const {return _data;}
private:
int32_t _data;
};
class TagLong : public Tag {
public:
TagLong(std::stringstream &ss);
int64_t getData() const {return _data;}
private:
int64_t _data;
};
class TagFloat : public Tag {
public:
TagFloat(std::stringstream &ss);
};
class TagDouble : public Tag {
public:
TagDouble(std::stringstream &ss);
};
class TagByteArray : public Tag {
public:
TagByteArray(std::stringstream &ss);
int getSize() const {return _size;}
char* getData() const {return _data;}
private:
int _size;
char* _data;
};
class TagString : public Tag {
public:
TagString(std::stringstream &ss);
int getSize() const {return _size;}
std::string getData() const {return _data;}
private:
int _size;
std::string _data;
};
class TagList : public Tag {
public:
TagList(std::stringstream &ss);
int getTagId() const {return _tagId;}
int getSize () const {return _size; }
std::list<Tag*> getData () const {return _data; }
private:
int _tagId;
int _size;
std::list<Tag*> _data;
};
class TagCompound : public Tag {
public:
TagCompound(std::stringstream &ss);
int getSize () const {return _size; }
std::list<Tag*> getData () const {return _data; }
int getWidth () const {return _width; }
int getLength () const {return _length; }
int getHeight () const {return _height; }
char* getBlocksId () const {return _blocksId; }
char* getBlocksData() const {return _blocksData;}
private:
int _size;
std::list<Tag*> _data;
// Specific to schematics file
int _width;
int _length;
int _height;
char* _blocksData;
char* _blocksId;
};
class TagIntArray : public Tag {
public:
TagIntArray(std::stringstream &ss);
~TagIntArray() {delete _data;}
int getSize() const {return _size;}
int* getData() const {return _data;}
private:
int _size;
int* _data;
};
#endif /* defined(__hifi__Tags__) */

File diff suppressed because it is too large Load diff

View file

@ -137,6 +137,8 @@ public:
bool readFromSVOFile(const char* filename);
// reads voxels from square image with alpha as a Y-axis
bool readFromSquareARGB32Pixels(const uint32_t* pixels, int dimension);
bool readFromSchematicFile(const char* filename);
void computeBlockColor(int id, int data, int& r, int& g, int& b, int& create);
unsigned long getVoxelCount();