mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 03:44:02 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into metavoxels
This commit is contained in:
commit
afb5711879
13 changed files with 299 additions and 106 deletions
|
@ -23,10 +23,10 @@ function printVector(string, vector) {
|
|||
}
|
||||
|
||||
var CHANCE_OF_MOVING = 0.005;
|
||||
var CHANCE_OF_SOUND = 0.005;
|
||||
var CHANCE_OF_SOUND = 0.000;
|
||||
var CHANCE_OF_HEAD_TURNING = 0.05;
|
||||
var CHANCE_OF_BIG_MOVE = 0.1;
|
||||
var CHANCE_OF_WAVING = 0.005; // Currently this isn't working
|
||||
var CHANCE_OF_WAVING = 0.009;
|
||||
|
||||
var shouldReceiveVoxels = true;
|
||||
var VOXEL_FPS = 60.0;
|
||||
|
@ -39,12 +39,16 @@ var isWaving = false;
|
|||
var waveFrequency = 0.0;
|
||||
var waveAmplitude = 0.0;
|
||||
|
||||
var X_MIN = 0.0;
|
||||
var X_MAX = 5.0;
|
||||
var Z_MIN = 0.0;
|
||||
var Z_MAX = 5.0;
|
||||
var X_MIN = 20.0;
|
||||
var X_MAX = 25.0;
|
||||
var Z_MIN = 20.0;
|
||||
var Z_MAX = 25.0;
|
||||
var Y_PELVIS = 2.5;
|
||||
var SHOULDER_JOINT_NUMBER = 15;
|
||||
var SPINE_JOINT_NUMBER = 13;
|
||||
var SHOULDER_JOINT_NUMBER = 17;
|
||||
var ELBOW_JOINT_NUMBER = 18;
|
||||
var JOINT_R_HIP = 1;
|
||||
var JOINT_R_KNEE = 2;
|
||||
|
||||
var MOVE_RANGE_SMALL = 0.5;
|
||||
var MOVE_RANGE_BIG = Math.max(X_MAX - X_MIN, Z_MAX - Z_MIN) / 2.0;
|
||||
|
@ -61,6 +65,9 @@ var targetDirection = { x: 0, y: 0, z: 0, w: 0 };
|
|||
var currentDirection = { x: 0, y: 0, z: 0, w: 0 };
|
||||
var targetHeadPitch = 0.0;
|
||||
|
||||
var walkFrequency = 5.0;
|
||||
var walkAmplitude = 45.0;
|
||||
|
||||
var cumulativeTime = 0.0;
|
||||
|
||||
var sounds = [];
|
||||
|
@ -115,12 +122,30 @@ printVector("New bot, position = ", Avatar.position);
|
|||
function stopWaving() {
|
||||
isWaving = false;
|
||||
Avatar.clearJointData(SHOULDER_JOINT_NUMBER);
|
||||
Avatar.clearJointData(ELBOW_JOINT_NUMBER);
|
||||
Avatar.clearJointData(SPINE_JOINT_NUMBER);
|
||||
}
|
||||
|
||||
function keepWalking() {
|
||||
Avatar.setJointData(JOINT_R_HIP, Quat.fromPitchYawRollDegrees(walkAmplitude * Math.sin(cumulativeTime * walkFrequency), 0.0, 0.0));
|
||||
Avatar.setJointData(JOINT_R_KNEE, Quat.fromPitchYawRollDegrees(walkAmplitude * Math.sin(cumulativeTime * walkFrequency), 0.0, 0.0));
|
||||
}
|
||||
|
||||
function stopWalking() {
|
||||
Avatar.clearJointData(JOINT_R_HIP);
|
||||
Avatar.clearJointData(JOINT_R_KNEE);
|
||||
}
|
||||
|
||||
function updateBehavior(deltaTime) {
|
||||
|
||||
cumulativeTime += deltaTime;
|
||||
|
||||
// Hack - right now you need to set the avatar position a bit after the avatar is made to make sure it's there.
|
||||
|
||||
if (CHANCE_OF_MOVING == 0.000) {
|
||||
Avatar.position = firstPosition;
|
||||
}
|
||||
|
||||
if (shouldReceiveVoxels && ((cumulativeTime - lastVoxelQueryTime) > (1.0 / VOXEL_FPS))) {
|
||||
VoxelViewer.setPosition(Avatar.position);
|
||||
VoxelViewer.setOrientation(Avatar.orientation);
|
||||
|
@ -134,13 +159,18 @@ function updateBehavior(deltaTime) {
|
|||
|
||||
if (!isWaving && (Math.random() < CHANCE_OF_WAVING)) {
|
||||
isWaving = true;
|
||||
waveFrequency = 1.0 + Math.random() * 5.0;
|
||||
waveFrequency = 3.0 + Math.random() * 5.0;
|
||||
waveAmplitude = 5.0 + Math.random() * 60.0;
|
||||
Script.setTimeout(stopWaving, 1000 + Math.random() * 2000);
|
||||
Avatar.setJointData(ELBOW_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 45, 0.0)); // Initially turn the palm outward
|
||||
} else if (isWaving) {
|
||||
Avatar.setJointData(SHOULDER_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 0.0, waveAmplitude * Math.sin(cumulativeTime * waveFrequency)));
|
||||
Avatar.setJointData(SHOULDER_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 0.0, 60 + waveAmplitude * Math.sin((cumulativeTime - 0.25) * waveFrequency)));
|
||||
Avatar.setJointData(ELBOW_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 0.0, 25 + waveAmplitude/2.0 * Math.sin(cumulativeTime * 1.2 * waveFrequency)));
|
||||
Avatar.setJointData(SPINE_JOINT_NUMBER, Quat.fromPitchYawRollDegrees(0.0, 0.0, 60 + waveAmplitude/4.0 * Math.sin(cumulativeTime * waveFrequency)));
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (Math.random() < CHANCE_OF_SOUND) {
|
||||
playRandomSound();
|
||||
}
|
||||
|
@ -168,11 +198,13 @@ function updateBehavior(deltaTime) {
|
|||
targetPosition.y = Y_PELVIS;
|
||||
|
||||
isMoving = true;
|
||||
} else {
|
||||
} else if (isMoving) {
|
||||
keepWalking();
|
||||
Avatar.position = Vec3.sum(Avatar.position, Vec3.multiply(Vec3.subtract(targetPosition, Avatar.position), MOVE_RATE));
|
||||
Avatar.orientation = Quat.mix(Avatar.orientation, targetDirection, TURN_RATE);
|
||||
if (Vec3.length(Vec3.subtract(Avatar.position, targetPosition)) < STOP_TOLERANCE) {
|
||||
isMoving = false;
|
||||
isMoving = false;
|
||||
stopWalking();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1326,7 +1326,7 @@ function wheelEvent(event) {
|
|||
}
|
||||
}
|
||||
|
||||
Controller.wheelEvent.connect(wheelEvent);
|
||||
// Controller.wheelEvent.connect(wheelEvent);
|
||||
Controller.mousePressEvent.connect(mousePressEvent);
|
||||
Controller.mouseReleaseEvent.connect(mouseReleaseEvent);
|
||||
Controller.mouseMoveEvent.connect(mouseMoveEvent);
|
||||
|
|
|
@ -27,6 +27,9 @@ var BULLET_VELOCITY = 5.0;
|
|||
var MIN_THROWER_DELAY = 1000;
|
||||
var MAX_THROWER_DELAY = 1000;
|
||||
var LEFT_BUTTON_3 = 3;
|
||||
var RELOAD_INTERVAL = 9;
|
||||
|
||||
var showScore = false;
|
||||
|
||||
// Load some sound to use for loading and firing
|
||||
var fireSound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guns/GUN-SHOT2.raw");
|
||||
|
@ -38,6 +41,8 @@ var targetLaunchSound = new Sound("http://highfidelity-public.s3-us-west-1.amazo
|
|||
var audioOptions = new AudioInjectionOptions();
|
||||
audioOptions.volume = 0.9;
|
||||
|
||||
var shotsFired = 0;
|
||||
|
||||
var shotTime = new Date();
|
||||
|
||||
// initialize our triggers
|
||||
|
@ -63,7 +68,8 @@ var reticle = Overlays.addOverlay("image", {
|
|||
alpha: 1
|
||||
});
|
||||
|
||||
var text = Overlays.addOverlay("text", {
|
||||
if (showScore) {
|
||||
var text = Overlays.addOverlay("text", {
|
||||
x: screenSize.x / 2 - 100,
|
||||
y: screenSize.y / 2 - 50,
|
||||
width: 150,
|
||||
|
@ -74,6 +80,8 @@ var text = Overlays.addOverlay("text", {
|
|||
leftMargin: 4,
|
||||
text: "Score: " + score
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
function printVector(string, vector) {
|
||||
|
@ -94,6 +102,10 @@ function shootBullet(position, velocity) {
|
|||
// Play firing sounds
|
||||
audioOptions.position = position;
|
||||
Audio.playSound(fireSound, audioOptions);
|
||||
shotsFired++;
|
||||
if ((shotsFired % RELOAD_INTERVAL) == 0) {
|
||||
Audio.playSound(loadSound, audioOptions);
|
||||
}
|
||||
}
|
||||
|
||||
function shootTarget() {
|
||||
|
@ -147,12 +159,15 @@ function particleCollisionWithVoxel(particle, voxel, penetration) {
|
|||
Voxels.eraseVoxel(position.x, position.y, position.z, HOLE_SIZE);
|
||||
//audioOptions.position = position;
|
||||
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
Audio.playSound(targetHitSound, audioOptions);
|
||||
Audio.playSound(impactSound, audioOptions);
|
||||
}
|
||||
|
||||
function particleCollisionWithParticle(particle1, particle2) {
|
||||
score++;
|
||||
Overlays.editOverlay(text, { text: "Score: " + score } );
|
||||
if (showScore) {
|
||||
Overlays.editOverlay(text, { text: "Score: " + score } );
|
||||
}
|
||||
|
||||
// Sort out which particle is which
|
||||
|
||||
// Record shot time
|
||||
|
@ -171,12 +186,12 @@ function keyPressEvent(event) {
|
|||
if (event.text == "t") {
|
||||
var time = MIN_THROWER_DELAY + Math.random() * MAX_THROWER_DELAY;
|
||||
Script.setTimeout(shootTarget, time);
|
||||
} if (event.text == ".") {
|
||||
shootFromMouse();
|
||||
}
|
||||
}
|
||||
|
||||
function update(deltaTime) {
|
||||
|
||||
|
||||
// Check for mouseLook movement, update rotation
|
||||
// rotate body yaw for yaw received from mouse
|
||||
var newOrientation = Quat.multiply(MyAvatar.orientation, Quat.fromVec3Radians( { x: 0, y: yawFromMouse, z: 0 } ));
|
||||
|
@ -257,18 +272,21 @@ function mousePressEvent(event) {
|
|||
isMouseDown = true;
|
||||
lastX = event.x;
|
||||
lastY = event.y;
|
||||
audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
Audio.playSound(loadSound, audioOptions);
|
||||
//audioOptions.position = Vec3.sum(Camera.getPosition(), Quat.getFront(Camera.getOrientation()));
|
||||
//Audio.playSound(loadSound, audioOptions);
|
||||
}
|
||||
|
||||
function mouseReleaseEvent(event) {
|
||||
// position
|
||||
function shootFromMouse() {
|
||||
var DISTANCE_FROM_CAMERA = 2.0;
|
||||
var camera = Camera.getPosition();
|
||||
var forwardVector = Quat.getFront(Camera.getOrientation());
|
||||
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, DISTANCE_FROM_CAMERA));
|
||||
var velocity = Vec3.multiply(forwardVector, BULLET_VELOCITY);
|
||||
shootBullet(newPosition, velocity);
|
||||
}
|
||||
|
||||
function mouseReleaseEvent(event) {
|
||||
// position
|
||||
isMouseDown = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -4,22 +4,22 @@
|
|||
<context>
|
||||
<name>Application</name>
|
||||
<message>
|
||||
<location filename="src/Application.cpp" line="1381"/>
|
||||
<location filename="src/Application.cpp" line="1382"/>
|
||||
<source>Export Voxels</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
<message>
|
||||
<location filename="src/Application.cpp" line="1382"/>
|
||||
<location filename="src/Application.cpp" line="1383"/>
|
||||
<source>Sparse Voxel Octree Files (*.svo)</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
<message>
|
||||
<location filename="src/Application.cpp" line="3623"/>
|
||||
<location filename="src/Application.cpp" line="3703"/>
|
||||
<source>Open Script</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
<message>
|
||||
<location filename="src/Application.cpp" line="3624"/>
|
||||
<location filename="src/Application.cpp" line="3704"/>
|
||||
<source>JavaScript Files (*.js)</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
|
@ -113,18 +113,18 @@
|
|||
<context>
|
||||
<name>Menu</name>
|
||||
<message>
|
||||
<location filename="src/Menu.cpp" line="456"/>
|
||||
<location filename="src/Menu.cpp" line="462"/>
|
||||
<source>Open .ini config file</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
<message>
|
||||
<location filename="src/Menu.cpp" line="458"/>
|
||||
<location filename="src/Menu.cpp" line="470"/>
|
||||
<location filename="src/Menu.cpp" line="464"/>
|
||||
<location filename="src/Menu.cpp" line="476"/>
|
||||
<source>Text files (*.ini)</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
<message>
|
||||
<location filename="src/Menu.cpp" line="468"/>
|
||||
<location filename="src/Menu.cpp" line="474"/>
|
||||
<source>Save .ini config file</source>
|
||||
<translation type="unfinished"></translation>
|
||||
</message>
|
||||
|
|
|
@ -151,6 +151,7 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
|
|||
_lastQueriedViewFrustum(),
|
||||
_lastQueriedTime(usecTimestampNow()),
|
||||
_audioScope(256, 200, true),
|
||||
_trailingAudioLoudness(0.f),
|
||||
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT)),
|
||||
_mouseX(0),
|
||||
_mouseY(0),
|
||||
|
@ -1845,15 +1846,6 @@ void Application::updateDialogs(float deltaTime) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::updateAudio(float deltaTime) {
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateAudio()");
|
||||
|
||||
// Update audio stats for procedural sounds
|
||||
_audio.setLastAcceleration(_myAvatar->getThrust());
|
||||
_audio.setLastVelocity(_myAvatar->getVelocity());
|
||||
}
|
||||
|
||||
void Application::updateCursor(float deltaTime) {
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateCursor()");
|
||||
|
@ -1902,7 +1894,6 @@ void Application::update(float deltaTime) {
|
|||
updateMetavoxels(deltaTime); // update metavoxels
|
||||
updateCamera(deltaTime); // handle various camera tweaks like off axis projection
|
||||
updateDialogs(deltaTime); // update various stats dialogs if present
|
||||
updateAudio(deltaTime); // Update audio stats for procedural sounds
|
||||
updateCursor(deltaTime); // Handle cursor updates
|
||||
|
||||
_particles.update(); // update the particles...
|
||||
|
@ -2504,14 +2495,103 @@ void Application::displayOverlay() {
|
|||
renderCollisionOverlay(_glWidget->width(), _glWidget->height(), _audio.getCollisionSoundMagnitude());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Audio Scope
|
||||
const int AUDIO_SCOPE_Y_OFFSET = 135;
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
|
||||
_audio.renderMuteIcon(1, _glWidget->height() - 50);
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Oscilloscope)) {
|
||||
int oscilloscopeTop = _glWidget->height() - 135;
|
||||
_audioScope.render(25, oscilloscopeTop);
|
||||
int oscilloscopeTop = _glWidget->height() - AUDIO_SCOPE_Y_OFFSET;
|
||||
_audioScope.render(MIRROR_VIEW_LEFT_PADDING, oscilloscopeTop);
|
||||
}
|
||||
}
|
||||
|
||||
// Audio VU Meter and Mute Icon
|
||||
const int MUTE_ICON_SIZE = 24;
|
||||
const int AUDIO_METER_INSET = 2;
|
||||
const int AUDIO_METER_WIDTH = MIRROR_VIEW_WIDTH - MUTE_ICON_SIZE - AUDIO_METER_INSET;
|
||||
const int AUDIO_METER_SCALE_WIDTH = AUDIO_METER_WIDTH - 2 * AUDIO_METER_INSET;
|
||||
const int AUDIO_METER_HEIGHT = 8;
|
||||
const int AUDIO_METER_Y_GAP = 8;
|
||||
const int AUDIO_METER_X = MIRROR_VIEW_LEFT_PADDING + MUTE_ICON_SIZE + AUDIO_METER_INSET;
|
||||
|
||||
int audioMeterY;
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
audioMeterY = MIRROR_VIEW_HEIGHT + AUDIO_METER_Y_GAP;
|
||||
} else {
|
||||
audioMeterY = AUDIO_METER_Y_GAP;
|
||||
}
|
||||
_audio.renderMuteIcon(MIRROR_VIEW_LEFT_PADDING, audioMeterY);
|
||||
|
||||
|
||||
const float AUDIO_METER_BLUE[] = {0.0, 0.0, 1.0};
|
||||
const float AUDIO_METER_GREEN[] = {0.0, 1.0, 0.0};
|
||||
const float AUDIO_METER_RED[] = {1.0, 0.0, 0.0};
|
||||
const float AUDIO_GREEN_START = 0.25 * AUDIO_METER_SCALE_WIDTH;
|
||||
const float AUDIO_RED_START = 0.80 * AUDIO_METER_SCALE_WIDTH;
|
||||
const float CLIPPING_INDICATOR_TIME = 1.0f;
|
||||
const float AUDIO_METER_AVERAGING = 0.5;
|
||||
const float LOG2 = log(2.f);
|
||||
const float MAX_LOG2_SAMPLE = 15.f;
|
||||
float audioLevel = 0.f;
|
||||
float loudness = _audio.getLastInputLoudness() + 1.f;
|
||||
_trailingAudioLoudness = AUDIO_METER_AVERAGING * _trailingAudioLoudness + (1.f - AUDIO_METER_AVERAGING) * loudness;
|
||||
|
||||
float log2loudness = log(_trailingAudioLoudness) / LOG2;
|
||||
|
||||
audioLevel = log2loudness / MAX_LOG2_SAMPLE * AUDIO_METER_SCALE_WIDTH;
|
||||
|
||||
bool isClipping = ((_audio.getTimeSinceLastClip() > 0.f) && (_audio.getTimeSinceLastClip() < CLIPPING_INDICATOR_TIME));
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
if (isClipping) {
|
||||
glColor3f(1, 0, 0);
|
||||
} else {
|
||||
glColor3f(0, 0, 0);
|
||||
}
|
||||
// Draw audio meter background Quad
|
||||
glVertex2i(AUDIO_METER_X, audioMeterY);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_WIDTH, audioMeterY);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_WIDTH, audioMeterY + AUDIO_METER_HEIGHT);
|
||||
glVertex2i(AUDIO_METER_X, audioMeterY + AUDIO_METER_HEIGHT);
|
||||
|
||||
if (audioLevel > AUDIO_RED_START) {
|
||||
if (!isClipping) {
|
||||
glColor3fv(AUDIO_METER_RED);
|
||||
} else {
|
||||
glColor3f(1, 1, 1);
|
||||
}
|
||||
// Draw Red Quad
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + AUDIO_RED_START, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + AUDIO_RED_START, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
audioLevel = AUDIO_RED_START;
|
||||
}
|
||||
if (audioLevel > AUDIO_GREEN_START) {
|
||||
if (!isClipping) {
|
||||
glColor3fv(AUDIO_METER_GREEN);
|
||||
} else {
|
||||
glColor3f(1, 1, 1);
|
||||
}
|
||||
// Draw Green Quad
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + AUDIO_GREEN_START, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + AUDIO_GREEN_START, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
audioLevel = AUDIO_GREEN_START;
|
||||
}
|
||||
// Draw Blue Quad
|
||||
if (!isClipping) {
|
||||
glColor3fv(AUDIO_METER_BLUE);
|
||||
} else {
|
||||
glColor3f(1, 1, 1);
|
||||
}
|
||||
// Draw Blue (low level) quad
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET + audioLevel, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
glVertex2i(AUDIO_METER_X + AUDIO_METER_INSET, audioMeterY + AUDIO_METER_HEIGHT - AUDIO_METER_INSET);
|
||||
glEnd();
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::HeadMouse)) {
|
||||
_myAvatar->renderHeadMouse();
|
||||
|
@ -3533,13 +3613,21 @@ void Application::reloadAllScripts() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::uploadFST() {
|
||||
FstReader reader;
|
||||
void Application::uploadFST(bool isHead) {
|
||||
FstReader reader(isHead);
|
||||
if (reader.zip()) {
|
||||
reader.send();
|
||||
}
|
||||
}
|
||||
|
||||
void Application::uploadHead() {
|
||||
uploadFST(true);
|
||||
}
|
||||
|
||||
void Application::uploadSkeleton() {
|
||||
uploadFST(false);
|
||||
}
|
||||
|
||||
void Application::removeScriptName(const QString& fileNameString) {
|
||||
_activeScripts.removeOne(fileNameString);
|
||||
}
|
||||
|
|
|
@ -261,7 +261,9 @@ public slots:
|
|||
void stopAllScripts();
|
||||
void reloadAllScripts();
|
||||
|
||||
void uploadFST();
|
||||
void uploadFST(bool isHead);
|
||||
void uploadHead();
|
||||
void uploadSkeleton();
|
||||
|
||||
private slots:
|
||||
void timer();
|
||||
|
@ -312,7 +314,6 @@ private:
|
|||
void updateMetavoxels(float deltaTime);
|
||||
void updateCamera(float deltaTime);
|
||||
void updateDialogs(float deltaTime);
|
||||
void updateAudio(float deltaTime);
|
||||
void updateCursor(float deltaTime);
|
||||
|
||||
Avatar* findLookatTargetAvatar(glm::vec3& eyePosition, QUuid &nodeUUID);
|
||||
|
@ -397,6 +398,7 @@ private:
|
|||
quint64 _lastQueriedTime;
|
||||
|
||||
Oscilloscope _audioScope;
|
||||
float _trailingAudioLoudness;
|
||||
|
||||
OctreeQuery _octreeQuery; // NodeData derived class for querying voxels from voxel server
|
||||
|
||||
|
|
|
@ -60,15 +60,16 @@ Audio::Audio(Oscilloscope* scope, int16_t initialJitterBufferSamples, QObject* p
|
|||
_measuredJitter(0),
|
||||
_jitterBufferSamples(initialJitterBufferSamples),
|
||||
_lastInputLoudness(0),
|
||||
_timeSinceLastClip(-1.0),
|
||||
_dcOffset(0),
|
||||
_noiseGateMeasuredFloor(0),
|
||||
_noiseGateSampleCounter(0),
|
||||
_noiseGateOpen(false),
|
||||
_noiseGateEnabled(true),
|
||||
_toneInjectionEnabled(false),
|
||||
_noiseGateFramesToClose(0),
|
||||
_lastVelocity(0),
|
||||
_lastAcceleration(0),
|
||||
_totalPacketsReceived(0),
|
||||
_totalInputAudioSamples(0),
|
||||
_collisionSoundMagnitude(0.0f),
|
||||
_collisionSoundFrequency(0.0f),
|
||||
_collisionSoundNoise(0.0f),
|
||||
|
@ -391,7 +392,7 @@ void Audio::handleAudioInput() {
|
|||
inputSamplesRequired,
|
||||
NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
|
||||
_inputFormat, _desiredInputFormat);
|
||||
|
||||
|
||||
//
|
||||
// Impose Noise Gate
|
||||
//
|
||||
|
@ -420,13 +421,24 @@ void Audio::handleAudioInput() {
|
|||
const int NOISE_GATE_CLOSE_FRAME_DELAY = 5;
|
||||
const int NOISE_GATE_FRAMES_TO_AVERAGE = 5;
|
||||
const float DC_OFFSET_AVERAGING = 0.99f;
|
||||
const float CLIPPING_THRESHOLD = 0.90f;
|
||||
|
||||
//
|
||||
// Check clipping, adjust DC offset, and check if should open noise gate
|
||||
//
|
||||
float measuredDcOffset = 0.f;
|
||||
|
||||
// Increment the time since the last clip
|
||||
if (_timeSinceLastClip >= 0.0f) {
|
||||
_timeSinceLastClip += (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL / (float) SAMPLE_RATE;
|
||||
}
|
||||
|
||||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
measuredDcOffset += monoAudioSamples[i];
|
||||
monoAudioSamples[i] -= (int16_t) _dcOffset;
|
||||
thisSample = fabsf(monoAudioSamples[i]);
|
||||
if (thisSample > (32767.f * CLIPPING_THRESHOLD)) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
}
|
||||
loudness += thisSample;
|
||||
// Noise Reduction: Count peaks above the average loudness
|
||||
if (thisSample > (_noiseGateMeasuredFloor * NOISE_GATE_HEIGHT)) {
|
||||
|
@ -481,6 +493,16 @@ void Audio::handleAudioInput() {
|
|||
_lastInputLoudness = 0;
|
||||
}
|
||||
}
|
||||
//
|
||||
// Add tone injection if enabled
|
||||
//
|
||||
const float TONE_FREQ = 220.f / SAMPLE_RATE * TWO_PI;
|
||||
const float QUARTER_VOLUME = 8192.f;
|
||||
if (_toneInjectionEnabled) {
|
||||
for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
|
||||
monoAudioSamples[i] = QUARTER_VOLUME * sinf(TONE_FREQ * (float)(i + _proceduralEffectSample));
|
||||
}
|
||||
}
|
||||
|
||||
// add input data just written to the scope
|
||||
QMetaObject::invokeMethod(_scope, "addSamples", Qt::QueuedConnection,
|
||||
|
@ -675,7 +697,9 @@ void Audio::toggleAudioNoiseReduction() {
|
|||
_noiseGateEnabled = !_noiseGateEnabled;
|
||||
}
|
||||
|
||||
|
||||
void Audio::toggleToneInjection() {
|
||||
_toneInjectionEnabled = !_toneInjectionEnabled;
|
||||
}
|
||||
|
||||
// Take a pointer to the acquired microphone input samples and add procedural sounds
|
||||
void Audio::addProceduralSounds(int16_t* monoInput, int numSamples) {
|
||||
|
|
|
@ -47,13 +47,11 @@ public:
|
|||
Audio(Oscilloscope* scope, int16_t initialJitterBufferSamples, QObject* parent = 0);
|
||||
|
||||
float getLastInputLoudness() const { return glm::max(_lastInputLoudness - _noiseGateMeasuredFloor, 0.f); }
|
||||
float getTimeSinceLastClip() const { return _timeSinceLastClip; }
|
||||
float getAudioAverageInputLoudness() const { return _lastInputLoudness; }
|
||||
|
||||
void setNoiseGateEnabled(bool noiseGateEnabled) { _noiseGateEnabled = noiseGateEnabled; }
|
||||
|
||||
void setLastAcceleration(const glm::vec3 lastAcceleration) { _lastAcceleration = lastAcceleration; }
|
||||
void setLastVelocity(const glm::vec3 lastVelocity) { _lastVelocity = lastVelocity; }
|
||||
|
||||
|
||||
void setJitterBufferSamples(int samples) { _jitterBufferSamples = samples; }
|
||||
int getJitterBufferSamples() { return _jitterBufferSamples; }
|
||||
|
||||
|
@ -83,6 +81,7 @@ public slots:
|
|||
void reset();
|
||||
void toggleMute();
|
||||
void toggleAudioNoiseReduction();
|
||||
void toggleToneInjection();
|
||||
|
||||
virtual void handleAudioByteArray(const QByteArray& audioByteArray);
|
||||
|
||||
|
@ -130,16 +129,17 @@ private:
|
|||
float _measuredJitter;
|
||||
int16_t _jitterBufferSamples;
|
||||
float _lastInputLoudness;
|
||||
float _timeSinceLastClip;
|
||||
float _dcOffset;
|
||||
float _noiseGateMeasuredFloor;
|
||||
float* _noiseSampleFrames;
|
||||
int _noiseGateSampleCounter;
|
||||
bool _noiseGateOpen;
|
||||
bool _noiseGateEnabled;
|
||||
bool _toneInjectionEnabled;
|
||||
int _noiseGateFramesToClose;
|
||||
glm::vec3 _lastVelocity;
|
||||
glm::vec3 _lastAcceleration;
|
||||
int _totalPacketsReceived;
|
||||
int _totalInputAudioSamples;
|
||||
|
||||
float _collisionSoundMagnitude;
|
||||
float _collisionSoundFrequency;
|
||||
|
|
|
@ -146,7 +146,8 @@ Menu::Menu() :
|
|||
SLOT(goTo()));
|
||||
|
||||
addDisabledActionAndSeparator(fileMenu, "Upload Avatar Model");
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::UploadFST, 0, Application::getInstance(), SLOT(uploadFST()));
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::UploadHead, 0, Application::getInstance(), SLOT(uploadHead()));
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::UploadSkeleton, 0, Application::getInstance(), SLOT(uploadSkeleton()));
|
||||
|
||||
addDisabledActionAndSeparator(fileMenu, "Settings");
|
||||
addActionToQMenuAndActionHash(fileMenu, MenuOption::SettingsImport, 0, this, SLOT(importSettings()));
|
||||
|
@ -241,7 +242,7 @@ Menu::Menu() :
|
|||
addDisabledActionAndSeparator(viewMenu, "Stats");
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats, Qt::Key_Slash);
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::Log, Qt::CTRL | Qt::Key_L, appInstance, SLOT(toggleLogDialog()));
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Oscilloscope, 0, true);
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Oscilloscope, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Bandwidth, 0, true);
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::BandwidthDetails, 0, this, SLOT(bandwidthDetails()));
|
||||
addActionToQMenuAndActionHash(viewMenu, MenuOption::OctreeStats, 0, this, SLOT(octreeStatsDetails()));
|
||||
|
@ -360,6 +361,12 @@ Menu::Menu() :
|
|||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleMute()));
|
||||
addCheckableActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioToneInjection,
|
||||
0,
|
||||
false,
|
||||
appInstance->getAudio(),
|
||||
SLOT(toggleToneInjection()));
|
||||
|
||||
|
||||
addActionToQMenuAndActionHash(developerMenu, MenuOption::PasteToVoxel,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_V,
|
||||
|
|
|
@ -242,6 +242,7 @@ namespace MenuOption {
|
|||
const QString FilterSixense = "Smooth Sixense Movement";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString AudioNoiseReduction = "Audio Noise Reduction";
|
||||
const QString AudioToneInjection = "Inject Test Tone";
|
||||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EchoLocalAudio = "Echo Local Audio";
|
||||
const QString MuteAudio = "Mute Microphone";
|
||||
|
@ -301,7 +302,8 @@ namespace MenuOption {
|
|||
const QString StopAllScripts = "Stop All Scripts";
|
||||
const QString TestPing = "Test Ping";
|
||||
const QString TransmitterDrive = "Transmitter Drive";
|
||||
const QString UploadFST = "Upload FST file";
|
||||
const QString UploadHead = "Upload Head Model";
|
||||
const QString UploadSkeleton = "Upload Skeleton Model";
|
||||
const QString Visage = "Visage";
|
||||
const QString Quit = "Quit";
|
||||
const QString Voxels = "Voxels";
|
||||
|
|
|
@ -144,14 +144,15 @@ void AccountManager::invokedRequest(const QString& path, QNetworkAccessManager::
|
|||
break;
|
||||
case QNetworkAccessManager::PostOperation:
|
||||
case QNetworkAccessManager::PutOperation:
|
||||
authenticatedRequest.setHeader(QNetworkRequest::ContentTypeHeader, "application/json");
|
||||
if (dataMultiPart) {
|
||||
if (operation == QNetworkAccessManager::PostOperation) {
|
||||
networkReply = _networkAccessManager->post(authenticatedRequest, dataMultiPart);
|
||||
} else {
|
||||
networkReply = _networkAccessManager->put(authenticatedRequest, dataMultiPart);
|
||||
}
|
||||
dataMultiPart->setParent(networkReply);
|
||||
} else {
|
||||
authenticatedRequest.setHeader(QNetworkRequest::ContentTypeHeader, "application/json");
|
||||
if (operation == QNetworkAccessManager::PostOperation) {
|
||||
networkReply = _networkAccessManager->post(authenticatedRequest, dataByteArray);
|
||||
} else {
|
||||
|
@ -199,6 +200,7 @@ void AccountManager::passSuccessToCallback() {
|
|||
qDebug() << jsonResponse;
|
||||
}
|
||||
}
|
||||
delete requestReply;
|
||||
}
|
||||
|
||||
void AccountManager::passErrorToCallback(QNetworkReply::NetworkError errorCode) {
|
||||
|
@ -219,6 +221,7 @@ void AccountManager::passErrorToCallback(QNetworkReply::NetworkError errorCode)
|
|||
qDebug() << "Error" << errorCode << "-" << requestReply->errorString();
|
||||
}
|
||||
}
|
||||
delete requestReply;
|
||||
}
|
||||
|
||||
bool AccountManager::hasValidAccessToken() {
|
||||
|
|
|
@ -13,11 +13,11 @@
|
|||
#include <QFileDialog>
|
||||
#include <QStandardPaths>
|
||||
#include <QHttpMultiPart>
|
||||
#include <QTemporaryDir>
|
||||
#include <QVariant>
|
||||
#include <QMessageBox>
|
||||
|
||||
#include "AccountManager.h"
|
||||
|
||||
#include "FstReader.h"
|
||||
|
||||
|
||||
|
@ -25,20 +25,30 @@ static const QString NAME_FIELD = "name";
|
|||
static const QString FILENAME_FIELD = "filename";
|
||||
static const QString TEXDIR_FIELD = "texdir";
|
||||
static const QString LOD_FIELD = "lod";
|
||||
static const QString HEAD_SPECIFIC_FIELD = "bs";
|
||||
|
||||
static const QString MODEL_URL = "/api/v1/models";
|
||||
|
||||
static const int MAX_SIZE = 10 * 1024 * 1024; // 10 MB
|
||||
|
||||
FstReader::FstReader() :
|
||||
// Class providing the QObject parent system to QTemporaryDir
|
||||
class TemporaryDir : public QTemporaryDir, public QObject {
|
||||
public:
|
||||
virtual ~TemporaryDir() {
|
||||
// ensuring the entire object gets deleted by the QObject parent.
|
||||
}
|
||||
};
|
||||
|
||||
FstReader::FstReader(bool isHead) :
|
||||
_zipDir(new TemporaryDir()),
|
||||
_lodCount(-1),
|
||||
_texturesCount(-1),
|
||||
_totalSize(0),
|
||||
_isHead(false),
|
||||
_isHead(isHead),
|
||||
_readyToSend(false),
|
||||
_dataMultiPart(new QHttpMultiPart(QHttpMultiPart::FormDataType))
|
||||
{
|
||||
_zipDir->setParent(_dataMultiPart);
|
||||
|
||||
}
|
||||
|
||||
FstReader::~FstReader() {
|
||||
|
@ -63,20 +73,20 @@ bool FstReader::zip() {
|
|||
QString("ModelUploader::zip()"),
|
||||
QString("Could not open FST file."),
|
||||
QMessageBox::Ok);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Compress and copy the fst
|
||||
if (!compressFile(QFileInfo(fst).filePath(), _zipDir.path() + "/" + QFileInfo(fst).fileName())) {
|
||||
return false;
|
||||
}
|
||||
_totalSize += QFileInfo(fst).size();
|
||||
if (!addPart(_zipDir.path() + "/" + QFileInfo(fst).fileName(),
|
||||
QString("fst"))) {
|
||||
qDebug() << "[Warning] " << QString("Could not open FST file.");
|
||||
return false;
|
||||
}
|
||||
qDebug() << "Reading FST file : " << QFileInfo(fst).filePath();
|
||||
|
||||
// Compress and copy the fst
|
||||
if (!compressFile(QFileInfo(fst).filePath(), _zipDir->path() + "/" + QFileInfo(fst).fileName())) {
|
||||
return false;
|
||||
}
|
||||
if (!addPart(_zipDir->path() + "/" + QFileInfo(fst).fileName(),
|
||||
QString("fst"))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Let's read through the FST file
|
||||
QTextStream stream(&fst);
|
||||
QList<QString> line;
|
||||
|
@ -86,73 +96,63 @@ bool FstReader::zip() {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (_totalSize > MAX_SIZE) {
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::zip()"),
|
||||
QString("Model too big, over %1 Bytes.").arg(MAX_SIZE),
|
||||
QMessageBox::Ok);
|
||||
return false;
|
||||
}
|
||||
|
||||
// according to what is read, we modify the command
|
||||
if (line[1] == HEAD_SPECIFIC_FIELD) {
|
||||
_isHead = true;
|
||||
} else if (line[1] == NAME_FIELD) {
|
||||
if (line[0] == NAME_FIELD) {
|
||||
QHttpPart textPart;
|
||||
textPart.setHeader(QNetworkRequest::ContentDispositionHeader, "form-data;"
|
||||
" name=\"model_name\"");
|
||||
textPart.setBody(line[1].toUtf8());
|
||||
_dataMultiPart->append(textPart);
|
||||
} else if (line[1] == FILENAME_FIELD) {
|
||||
} else if (line[0] == FILENAME_FIELD) {
|
||||
QFileInfo fbx(QFileInfo(fst).path() + "/" + line[1]);
|
||||
if (!fbx.exists() || !fbx.isFile()) { // Check existence
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::zip()"),
|
||||
QString("FBX file %1 could not be found.").arg(fbx.fileName()),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("FBX file %1 could not be found.").arg(fbx.fileName());
|
||||
return false;
|
||||
}
|
||||
// Compress and copy
|
||||
if (!compressFile(fbx.filePath(), _zipDir.path() + "/" + line[1])) {
|
||||
if (!compressFile(fbx.filePath(), _zipDir->path() + "/" + line[1])) {
|
||||
return false;
|
||||
}
|
||||
_totalSize += fbx.size();
|
||||
if (!addPart(_zipDir.path() + "/" + line[1], "fbx")) {
|
||||
if (!addPart(_zipDir->path() + "/" + line[1], "fbx")) {
|
||||
return false;
|
||||
}
|
||||
} else if (line[1] == TEXDIR_FIELD) { // Check existence
|
||||
} else if (line[0] == TEXDIR_FIELD) { // Check existence
|
||||
QFileInfo texdir(QFileInfo(fst).path() + "/" + line[1]);
|
||||
if (!texdir.exists() || !texdir.isDir()) {
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::zip()"),
|
||||
QString("Texture directory could not be found."),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("Texture directory could not be found.");
|
||||
return false;
|
||||
}
|
||||
if (!addTextures(texdir)) { // Recursive compress and copy
|
||||
return false;
|
||||
}
|
||||
} else if (line[1] == LOD_FIELD) {
|
||||
} else if (line[0] == LOD_FIELD) {
|
||||
QFileInfo lod(QFileInfo(fst).path() + "/" + line[1]);
|
||||
if (!lod.exists() || !lod.isFile()) { // Check existence
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::zip()"),
|
||||
QString("FBX file %1 could not be found.").arg(lod.fileName()),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("FBX file %1 could not be found.").arg(lod.fileName());
|
||||
return false;
|
||||
}
|
||||
// Compress and copy
|
||||
if (!compressFile(lod.filePath(), _zipDir.path() + "/" + line[1])) {
|
||||
if (!compressFile(lod.filePath(), _zipDir->path() + "/" + line[1])) {
|
||||
return false;
|
||||
}
|
||||
_totalSize += lod.size();
|
||||
if (!addPart(_zipDir.path() + "/" + line[1], QString("lod%1").arg(++_lodCount))) {
|
||||
if (!addPart(_zipDir->path() + "/" + line[1], QString("lod%1").arg(++_lodCount))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
QHttpPart textPart;
|
||||
textPart.setHeader(QNetworkRequest::ContentDispositionHeader, "form-data;"
|
||||
" name=\"model_category\"");
|
||||
|
@ -173,6 +173,9 @@ bool FstReader::send() {
|
|||
}
|
||||
|
||||
AccountManager::getInstance().authenticatedRequest(MODEL_URL, QNetworkAccessManager::PostOperation, JSONCallbackParameters(), QByteArray(), _dataMultiPart);
|
||||
_zipDir = NULL;
|
||||
_dataMultiPart = NULL;
|
||||
qDebug() << "Model sent.";
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -189,11 +192,10 @@ bool FstReader::addTextures(const QFileInfo& texdir) {
|
|||
foreach (QFileInfo info, list) {
|
||||
if (info.isFile()) {
|
||||
// Compress and copy
|
||||
if (!compressFile(info.filePath(), _zipDir.path() + "/" + info.fileName())) {
|
||||
if (!compressFile(info.filePath(), _zipDir->path() + "/" + info.fileName())) {
|
||||
return false;
|
||||
}
|
||||
_totalSize += info.size();
|
||||
if (!addPart(_zipDir.path() + "/" + info.fileName(),
|
||||
if (!addPart(_zipDir->path() + "/" + info.fileName(),
|
||||
QString("texture%1").arg(++_texturesCount))) {
|
||||
return false;
|
||||
}
|
||||
|
@ -214,12 +216,13 @@ bool FstReader::compressFile(const QString &inFileName, const QString &outFileNa
|
|||
|
||||
QFile outFile(outFileName);
|
||||
if (!outFile.open(QIODevice::WriteOnly)) {
|
||||
QDir(_zipDir.path()).mkpath(QFileInfo(outFileName).path());
|
||||
QDir(_zipDir->path()).mkpath(QFileInfo(outFileName).path());
|
||||
if (!outFile.open(QIODevice::WriteOnly)) {
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::compressFile()"),
|
||||
QString("Could not compress %1").arg(inFileName),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("Could not compress %1").arg(inFileName);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -237,6 +240,8 @@ bool FstReader::addPart(const QString &path, const QString& name) {
|
|||
QString("ModelUploader::addPart()"),
|
||||
QString("Could not open %1").arg(path),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("Could not open %1").arg(path);
|
||||
delete file;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -249,6 +254,19 @@ bool FstReader::addPart(const QString &path, const QString& name) {
|
|||
_dataMultiPart->append(part);
|
||||
file->setParent(_dataMultiPart);
|
||||
|
||||
|
||||
qDebug() << "File " << QFileInfo(*file).fileName() << " added to model.";
|
||||
_totalSize += file->size();
|
||||
if (_totalSize > MAX_SIZE) {
|
||||
QMessageBox::warning(NULL,
|
||||
QString("ModelUploader::zip()"),
|
||||
QString("Model too big, over %1 Bytes.").arg(MAX_SIZE),
|
||||
QMessageBox::Ok);
|
||||
qDebug() << "[Warning] " << QString("Model too big, over %1 Bytes.").arg(MAX_SIZE);
|
||||
return false;
|
||||
}
|
||||
qDebug() << "Current model size: " << _totalSize;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -10,20 +10,19 @@
|
|||
#ifndef __hifi__FstReader__
|
||||
#define __hifi__FstReader__
|
||||
|
||||
#include <QTemporaryDir>
|
||||
|
||||
class TemporaryDir;
|
||||
class QHttpMultiPart;
|
||||
|
||||
class FstReader {
|
||||
class FstReader : public QObject {
|
||||
public:
|
||||
FstReader();
|
||||
FstReader(bool isHead);
|
||||
~FstReader();
|
||||
|
||||
bool zip();
|
||||
bool send();
|
||||
|
||||
private:
|
||||
QTemporaryDir _zipDir;
|
||||
TemporaryDir* _zipDir;
|
||||
int _lodCount;
|
||||
int _texturesCount;
|
||||
int _totalSize;
|
||||
|
|
Loading…
Reference in a new issue