mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 03:44:02 +02:00
Merge hifi/master into hifi/out-of-body-experience
This commit is contained in:
commit
ccdf383151
140 changed files with 2503 additions and 911 deletions
|
@ -88,6 +88,10 @@ void Agent::playAvatarSound(SharedSoundPointer sound) {
|
|||
QMetaObject::invokeMethod(this, "playAvatarSound", Q_ARG(SharedSoundPointer, sound));
|
||||
return;
|
||||
} else {
|
||||
// TODO: seems to add occasional artifact in tests. I believe it is
|
||||
// correct to do this, but need to figure out for sure, so commenting this
|
||||
// out until I verify.
|
||||
// _numAvatarSoundSentBytes = 0;
|
||||
setAvatarSound(sound);
|
||||
}
|
||||
}
|
||||
|
@ -404,8 +408,37 @@ QUuid Agent::getSessionUUID() const {
|
|||
return DependencyManager::get<NodeList>()->getSessionUUID();
|
||||
}
|
||||
|
||||
void Agent::setIsListeningToAudioStream(bool isListeningToAudioStream) {
|
||||
// this must happen on Agent's main thread
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setIsListeningToAudioStream", Q_ARG(bool, isListeningToAudioStream));
|
||||
return;
|
||||
}
|
||||
if (_isListeningToAudioStream) {
|
||||
// have to tell just the audio mixer to KillAvatar.
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
return (node->getType() == NodeType::AudioMixer) && node->getActiveSocket();
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
qDebug() << "sending KillAvatar message to Audio Mixers";
|
||||
auto packet = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID, true);
|
||||
packet->write(getSessionUUID().toRfc4122());
|
||||
nodeList->sendPacket(std::move(packet), *node);
|
||||
});
|
||||
|
||||
}
|
||||
_isListeningToAudioStream = isListeningToAudioStream;
|
||||
}
|
||||
|
||||
void Agent::setIsAvatar(bool isAvatar) {
|
||||
// this must happen on Agent's main thread
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "setIsAvatar", Q_ARG(bool, isAvatar));
|
||||
return;
|
||||
}
|
||||
_isAvatar = isAvatar;
|
||||
|
||||
if (_isAvatar && !_avatarIdentityTimer) {
|
||||
|
@ -435,14 +468,16 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
// when we stop sending identity, but then get woken up again by the mixer itself, which sends
|
||||
// identity packets to everyone. Here we explicitly tell the mixer to kill the entry for us.
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto packetList = NLPacketList::create(PacketType::KillAvatar, QByteArray(), true, true);
|
||||
packetList->write(getSessionUUID().toRfc4122());
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& node)->bool {
|
||||
return node->getType() == NodeType::AvatarMixer && node->getActiveSocket();
|
||||
return (node->getType() == NodeType::AvatarMixer || node->getType() == NodeType::AudioMixer)
|
||||
&& node->getActiveSocket();
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
nodeList->sendPacketList(std::move(packetList), *node);
|
||||
qDebug() << "sending KillAvatar message to Avatar and Audio Mixers";
|
||||
auto packet = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID, true);
|
||||
packet->write(getSessionUUID().toRfc4122());
|
||||
nodeList->sendPacket(std::move(packet), *node);
|
||||
});
|
||||
}
|
||||
emit stopAvatarAudioTimer();
|
||||
|
@ -474,24 +509,18 @@ void Agent::processAgentAvatar() {
|
|||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
void Agent::flushEncoder() {
|
||||
void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
_flushEncoder = false;
|
||||
static QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
static QByteArray encodedZeros;
|
||||
static const QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
if (_encoder) {
|
||||
_encoder->encode(zeros, encodedZeros);
|
||||
} else {
|
||||
encodedZeros = zeros;
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::processAgentAvatarAudio() {
|
||||
if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) {
|
||||
// after sound is done playing, encoder has a bit of state in it,
|
||||
// and needs some 0s to forget or you get a little click next time
|
||||
// you play something
|
||||
if (_flushEncoder) {
|
||||
flushEncoder();
|
||||
}
|
||||
|
||||
// if we have an avatar audio stream then send it out to our audio-mixer
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
bool silentFrame = true;
|
||||
|
@ -528,7 +557,7 @@ void Agent::processAgentAvatarAudio() {
|
|||
}
|
||||
}
|
||||
|
||||
auto audioPacket = NLPacket::create(silentFrame
|
||||
auto audioPacket = NLPacket::create(silentFrame && !_flushEncoder
|
||||
? PacketType::SilentAudioFrame
|
||||
: PacketType::MicrophoneAudioNoEcho);
|
||||
|
||||
|
@ -564,13 +593,17 @@ void Agent::processAgentAvatarAudio() {
|
|||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
QByteArray encodedBuffer;
|
||||
// encode it
|
||||
if(_encoder) {
|
||||
_encoder->encode(decodedBuffer, encodedBuffer);
|
||||
if (_flushEncoder) {
|
||||
encodeFrameOfZeros(encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
if (_encoder) {
|
||||
// encode it
|
||||
_encoder->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
}
|
||||
audioPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ public:
|
|||
bool isPlayingAvatarSound() const { return _avatarSound != NULL; }
|
||||
|
||||
bool isListeningToAudioStream() const { return _isListeningToAudioStream; }
|
||||
void setIsListeningToAudioStream(bool isListeningToAudioStream) { _isListeningToAudioStream = isListeningToAudioStream; }
|
||||
void setIsListeningToAudioStream(bool isListeningToAudioStream);
|
||||
|
||||
float getLastReceivedAudioLoudness() const { return _lastReceivedAudioLoudness; }
|
||||
QUuid getSessionUUID() const;
|
||||
|
@ -81,7 +81,7 @@ signals:
|
|||
private:
|
||||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
void flushEncoder();
|
||||
void encodeFrameOfZeros(QByteArray& encodedZeros);
|
||||
|
||||
std::unique_ptr<ScriptEngine> _scriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
|
|
|
@ -15,19 +15,23 @@
|
|||
// this should send a signal every 10ms, with pretty good precision. Hardcoding
|
||||
// to 10ms since that's what you'd want for audio.
|
||||
void AvatarAudioTimer::start() {
|
||||
qDebug() << "AvatarAudioTimer::start called";
|
||||
qDebug() << __FUNCTION__;
|
||||
auto startTime = usecTimestampNow();
|
||||
quint64 frameCounter = 0;
|
||||
const int TARGET_INTERVAL_USEC = 10000; // 10ms
|
||||
while (!_quit) {
|
||||
frameCounter++;
|
||||
// simplest possible timer
|
||||
++frameCounter;
|
||||
|
||||
// tick every 10ms from startTime
|
||||
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
|
||||
quint64 interval = std::max((quint64)0, targetTime - usecTimestampNow());
|
||||
usleep(interval);
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
// avoid quint64 underflow
|
||||
if (now < targetTime) {
|
||||
usleep(targetTime - now);
|
||||
}
|
||||
|
||||
emit avatarTick();
|
||||
}
|
||||
qDebug() << "AvatarAudioTimer is finished";
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -90,9 +90,10 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::InjectAudio, PacketType::SilentAudioFrame,
|
||||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
}
|
||||
|
@ -481,6 +482,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
}
|
||||
|
||||
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
||||
|
@ -579,18 +581,8 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
|
|||
}
|
||||
}
|
||||
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
|
||||
// FIXME - why would we not have client data at this point??
|
||||
if (!clientData) {
|
||||
qDebug() << "UNEXPECTED -- didn't have node linked data in " << __FUNCTION__;
|
||||
sendingNode->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(sendingNode->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
auto clientData = getOrCreateClientData(sendingNode.data());
|
||||
clientData->setupCodec(selectedCodec, selectedCodecName);
|
||||
|
||||
qDebug() << "selectedCodecName:" << selectedCodecName;
|
||||
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
|
||||
}
|
||||
|
@ -599,14 +591,29 @@ void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
|||
// enumerate the connected listeners to remove HRTF objects for the disconnected node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->eachNode([](const SharedNodePointer& node) {
|
||||
nodeList->eachNode([&killedNode](const SharedNodePointer& node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeHRTFsForNode(node->getUUID());
|
||||
clientData->removeHRTFsForNode(killedNode->getUUID());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void AudioMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
if (clientData) {
|
||||
clientData->removeAgentAvatarAudioStream();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([sendingNode](const SharedNodePointer& node){
|
||||
auto listenerClientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (listenerClientData) {
|
||||
listenerClientData->removeHRTFForStream(sendingNode->getUUID());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
sendingNode->parseIgnoreRequestMessage(packet);
|
||||
}
|
||||
|
@ -646,7 +653,8 @@ void AudioMixer::sendStatsPacket() {
|
|||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
statsObject["avg_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
|
||||
statsObject["avg_streams_per_frame"] = (float)_sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_sumListeners / (float)_numStatFrames;
|
||||
|
||||
QJsonObject mixStats;
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_hrtfRenders);
|
||||
|
@ -660,6 +668,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["mix_stats"] = mixStats;
|
||||
|
||||
_sumStreams = 0;
|
||||
_sumListeners = 0;
|
||||
_hrtfRenders = 0;
|
||||
_hrtfSilentRenders = 0;
|
||||
|
@ -707,17 +716,24 @@ void AudioMixer::run() {
|
|||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
}
|
||||
|
||||
AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
if (!clientData) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
return clientData;
|
||||
}
|
||||
|
||||
void AudioMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
};
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) { getOrCreateClientData(node); };
|
||||
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
|
@ -730,79 +746,71 @@ void AudioMixer::domainSettingsRequestComplete() {
|
|||
}
|
||||
|
||||
void AudioMixer::broadcastMixes() {
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
auto nextFrameTimestamp = p_high_resolution_clock::now();
|
||||
auto timeToSleep = std::chrono::microseconds(0);
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
int currentFrame = 1;
|
||||
int numFramesPerSecond = (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
|
||||
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||
|
||||
int currentFrame { 1 };
|
||||
int numFramesPerSecond { (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC) };
|
||||
|
||||
while (!_isFinished) {
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
// manage mixer load
|
||||
{
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) +
|
||||
// ratio of frame spent sleeping / total frame time
|
||||
((CURRENT_FRAME_RATIO * timeToSleep.count()) / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
if (timeToSleep.count() < 0) {
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||
+ (timeToSleep.count() * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
// we're struggling - change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
|
||||
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
// we've recovered and can back off the required loudness
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||
|
||||
if (_performanceThrottlingRatio < 0) {
|
||||
_performanceThrottlingRatio = 0;
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
qDebug() << "Mixer is struggling";
|
||||
// change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
qDebug() << "Mixer is recovering";
|
||||
// back off the required loudness
|
||||
_performanceThrottlingRatio = std::max(0.0f, _performanceThrottlingRatio - RATIO_BACK_OFF);
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
|
||||
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
framesSinceCutoffEvent = 0;
|
||||
|
||||
qDebug() << "Sleeping" << _trailingSleepRatio << "of frame";
|
||||
qDebug() << "Cutoff is" << _performanceThrottlingRatio;
|
||||
qDebug() << "Minimum audibility to be mixed is" << _minAudibilityThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||
|
||||
framesSinceCutoffEvent = 0;
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
||||
// mix
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
// this function will attempt to pop a frame from each audio stream.
|
||||
// a pointer to the popped data is stored as a member in InboundAudioStream.
|
||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||
nodeData->checkBuffersBeforeFrameSend();
|
||||
_sumStreams += nodeData->checkBuffersBeforeFrameSend();
|
||||
|
||||
// if the stream should be muted, send mute packet
|
||||
if (nodeData->getAvatarAudioStream()
|
||||
|
@ -818,7 +826,8 @@ void AudioMixer::broadcastMixes() {
|
|||
|
||||
std::unique_ptr<NLPacket> mixPacket;
|
||||
|
||||
if (mixHasAudio) {
|
||||
if (mixHasAudio || nodeData->shouldFlushEncoder()) {
|
||||
|
||||
int mixPacketBytes = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE
|
||||
+ AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes);
|
||||
|
@ -831,12 +840,17 @@ void AudioMixer::broadcastMixes() {
|
|||
QString codecInPacket = nodeData->getCodecName();
|
||||
mixPacket->writeString(codecInPacket);
|
||||
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
QByteArray encodedBuffer;
|
||||
nodeData->encode(decodedBuffer, encodedBuffer);
|
||||
|
||||
if (mixHasAudio) {
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
nodeData->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
// time to flush, which resets the shouldFlush until next time we encode something
|
||||
nodeData->encodeFrameOfZeros(encodedBuffer);
|
||||
}
|
||||
// pack mixed audio samples
|
||||
mixPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
|
||||
} else {
|
||||
int silentPacketBytes = sizeof(quint16) + sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE;
|
||||
mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes);
|
||||
|
@ -876,24 +890,32 @@ void AudioMixer::broadcastMixes() {
|
|||
|
||||
++_numStatFrames;
|
||||
|
||||
// since we're a while loop we need to help Qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
// play nice with qt event-looping
|
||||
{
|
||||
// since we're a while loop we need to help qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
if (_isFinished) {
|
||||
// at this point the audio-mixer is done
|
||||
// check if we have a deferred delete event to process (which we should once finished)
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
if (_isFinished) {
|
||||
// alert qt that this is finished
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// push the next frame timestamp to when we should send the next
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
// sleep until the next frame, if necessary
|
||||
{
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
// sleep as long as we need until next frame, if we can
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
if (timeToSleep.count() < 0) {
|
||||
nextFrameTimestamp = now;
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -48,10 +48,12 @@ private slots:
|
|||
void handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
|
||||
void handleNodeKilled(SharedNodePointer killedNode);
|
||||
void handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
|
||||
void removeHRTFsForFinishedInjector(const QUuid& streamID);
|
||||
|
||||
private:
|
||||
AudioMixerClientData* getOrCreateClientData(Node* node);
|
||||
void domainSettingsRequestComplete();
|
||||
|
||||
/// adds one stream to the mix for a listening node
|
||||
|
@ -85,6 +87,7 @@ private:
|
|||
float _attenuationPerDoublingInDistance;
|
||||
float _noiseMutingThreshold;
|
||||
int _numStatFrames { 0 };
|
||||
int _sumStreams { 0 };
|
||||
int _sumListeners { 0 };
|
||||
int _hrtfRenders { 0 };
|
||||
int _hrtfSilentRenders { 0 };
|
||||
|
|
|
@ -73,11 +73,19 @@ void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid&
|
|||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::removeAgentAvatarAudioStream() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
if (it != _audioStreams.end()) {
|
||||
_audioStreams.erase(it);
|
||||
}
|
||||
writeLocker.unlock();
|
||||
}
|
||||
|
||||
int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
||||
PacketType packetType = message.getType();
|
||||
|
||||
if (packetType == PacketType::AudioStreamStats) {
|
||||
|
||||
// skip over header, appendFlag, and num stats packed
|
||||
message.seek(sizeof(quint8) + sizeof(quint16));
|
||||
|
||||
|
@ -180,7 +188,7 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
|
||||
auto it = _audioStreams.begin();
|
||||
|
@ -208,6 +216,8 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
|||
++it;
|
||||
}
|
||||
}
|
||||
|
||||
return (int)_audioStreams.size();
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::shouldSendStats(int frameNumber) {
|
||||
|
@ -355,7 +365,10 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
}
|
||||
|
||||
void AudioMixerClientData::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||
qDebug() << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
||||
qDebug() << __FUNCTION__ <<
|
||||
"sendingNode:" << *node <<
|
||||
"currentCodec:" << currentCodec <<
|
||||
"receivedCodec:" << recievedCodec;
|
||||
sendSelectAudioFormat(node, currentCodec);
|
||||
}
|
||||
|
||||
|
@ -366,6 +379,17 @@ void AudioMixerClientData::sendSelectAudioFormat(SharedNodePointer node, const Q
|
|||
nodeList->sendPacket(std::move(replyPacket), *node);
|
||||
}
|
||||
|
||||
void AudioMixerClientData::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
static QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_STEREO, 0);
|
||||
if (_shouldFlushEncoder) {
|
||||
if (_encoder) {
|
||||
_encoder->encode(zeros, encodedZeros);
|
||||
} else {
|
||||
encodedZeros = zeros;
|
||||
}
|
||||
}
|
||||
_shouldFlushEncoder = false;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setupCodec(CodecPluginPointer codec, const QString& codecName) {
|
||||
cleanupCodec(); // cleanup any previously allocated coders first
|
||||
|
|
|
@ -50,9 +50,12 @@ public:
|
|||
// removes an AudioHRTF object for a given stream
|
||||
void removeHRTFForStream(const QUuid& nodeID, const QUuid& streamID = QUuid());
|
||||
|
||||
void removeAgentAvatarAudioStream();
|
||||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
|
||||
void checkBuffersBeforeFrameSend();
|
||||
// attempt to pop a frame from each audio stream, and return the number of streams from this client
|
||||
int checkBuffersBeforeFrameSend();
|
||||
|
||||
void removeDeadInjectedStreams();
|
||||
|
||||
|
@ -76,7 +79,11 @@ public:
|
|||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
// once you have encoded, you need to flush eventually.
|
||||
_shouldFlushEncoder = true;
|
||||
}
|
||||
void encodeFrameOfZeros(QByteArray& encodedZeros);
|
||||
bool shouldFlushEncoder() { return _shouldFlushEncoder; }
|
||||
|
||||
QString getCodecName() { return _selectedCodecName; }
|
||||
|
||||
|
@ -105,6 +112,8 @@ private:
|
|||
QString _selectedCodecName;
|
||||
Encoder* _encoder{ nullptr }; // for outbound mixed stream
|
||||
Decoder* _decoder{ nullptr }; // for mic stream
|
||||
|
||||
bool _shouldFlushEncoder { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerClientData_h
|
||||
|
|
4
cmake/externals/openvr/CMakeLists.txt
vendored
4
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.2.zip
|
||||
URL_MD5 0d1cf5f579cf092e33f34759967b7046
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.3.zip
|
||||
URL_MD5 b484b12901917cc739e40389583c8b0d
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -14,7 +14,7 @@ endif ()
|
|||
|
||||
if (HIFI_MEMORY_DEBUGGING)
|
||||
if (UNIX)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -fno-omit-frame-pointer")
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -U_FORTIFY_SOURCE -fno-stack-protector -fno-omit-frame-pointer")
|
||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan -static-libstdc++ -fsanitize=address")
|
||||
endif (UNIX)
|
||||
endif ()
|
||||
|
|
|
@ -17,6 +17,12 @@ macro(SETUP_HIFI_PLUGIN)
|
|||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
# produce PDB files for plugins as well
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Zi")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DEBUG")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux" OR CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${PLUGIN_PATH}/")
|
||||
else()
|
||||
|
|
|
@ -571,7 +571,9 @@ Function HandlePostInstallOptions
|
|||
; both launches use the explorer trick in case the user has elevated permissions for the installer
|
||||
; it won't be possible to use this approach if either application should be launched with a command line param
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@"'
|
||||
; create shortcut with ARGUMENTS
|
||||
CreateShortCut "$TEMP\SandboxShortcut.lnk" "$INSTDIR\@CONSOLE_INSTALL_SUBDIR@\@CONSOLE_WIN_EXEC_NAME@" "-- --launchInterface"
|
||||
Exec '"$WINDIR\explorer.exe" "$TEMP\SandboxShortcut.lnk"'
|
||||
${Else}
|
||||
Exec '"$WINDIR\explorer.exe" "$INSTDIR\@INTERFACE_WIN_EXEC_NAME@"'
|
||||
${EndIf}
|
||||
|
|
22
cmake/templates/VersionInfo.rc.in
Normal file
22
cmake/templates/VersionInfo.rc.in
Normal file
|
@ -0,0 +1,22 @@
|
|||
// Language and character set information as described at
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/aa381049(v=vs.85).aspx
|
||||
#define US_ENGLISH_UNICODE "040904B0"
|
||||
|
||||
// More information about the format of this file can be found at
|
||||
// https://msdn.microsoft.com/en-us/library/windows/desktop/aa381058(v=vs.85).aspx
|
||||
1 VERSIONINFO
|
||||
BEGIN
|
||||
BLOCK "StringFileInfo"
|
||||
BEGIN
|
||||
BLOCK US_ENGLISH_UNICODE
|
||||
BEGIN
|
||||
VALUE "CompanyName", "@BUILD_ORGANIZATION@"
|
||||
VALUE "FileDescription", "@APP_FULL_NAME@"
|
||||
VALUE "FileVersion", "@BUILD_VERSION@"
|
||||
VALUE "InternalName", "@TARGET_NAME@"
|
||||
VALUE "OriginalFilename", "@TARGET_NAME@.exe"
|
||||
VALUE "ProductName", "@APP_FULL_NAME@"
|
||||
VALUE "ProductVersion", "@BUILD_VERSION@"
|
||||
END
|
||||
END
|
||||
END
|
|
@ -237,6 +237,7 @@ void DomainGatekeeper::updateNodePermissions() {
|
|||
userPerms.permissions |= NodePermissions::Permission::canAdjustLocks;
|
||||
userPerms.permissions |= NodePermissions::Permission::canRezPermanentEntities;
|
||||
userPerms.permissions |= NodePermissions::Permission::canRezTemporaryEntities;
|
||||
userPerms.permissions |= NodePermissions::Permission::canWriteToAssetServer;
|
||||
} else {
|
||||
// this node is an agent
|
||||
const QHostAddress& addr = node->getLocalSocket().getAddress();
|
||||
|
@ -312,6 +313,7 @@ SharedNodePointer DomainGatekeeper::processAssignmentConnectRequest(const NodeCo
|
|||
userPerms.permissions |= NodePermissions::Permission::canAdjustLocks;
|
||||
userPerms.permissions |= NodePermissions::Permission::canRezPermanentEntities;
|
||||
userPerms.permissions |= NodePermissions::Permission::canRezTemporaryEntities;
|
||||
userPerms.permissions |= NodePermissions::Permission::canWriteToAssetServer;
|
||||
newNode->setPermissions(userPerms);
|
||||
return newNode;
|
||||
}
|
||||
|
|
|
@ -133,8 +133,12 @@ elseif (WIN32)
|
|||
set(CONFIGURE_ICON_RC_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/Icon.rc")
|
||||
configure_file("${HF_CMAKE_DIR}/templates/Icon.rc.in" ${CONFIGURE_ICON_RC_OUTPUT})
|
||||
|
||||
set(APP_FULL_NAME "High Fidelity Interface")
|
||||
set(CONFIGURE_VERSION_INFO_RC_OUTPUT "${CMAKE_CURRENT_BINARY_DIR}/VersionInfo.rc")
|
||||
configure_file("${HF_CMAKE_DIR}/templates/VersionInfo.rc.in" ${CONFIGURE_VERSION_INFO_RC_OUTPUT})
|
||||
|
||||
# add an executable that also has the icon itself and the configured rc file as resources
|
||||
add_executable(${TARGET_NAME} WIN32 ${INTERFACE_SRCS} ${QM} ${CONFIGURE_ICON_RC_OUTPUT})
|
||||
add_executable(${TARGET_NAME} WIN32 ${INTERFACE_SRCS} ${QM} ${CONFIGURE_ICON_RC_OUTPUT} ${CONFIGURE_VERSION_INFO_RC_OUTPUT})
|
||||
|
||||
if (NOT DEV_BUILD)
|
||||
add_custom_command(
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
{ "from": "OculusTouch.LT", "to": "Standard.LT" },
|
||||
{ "from": "OculusTouch.LS", "to": "Standard.LS" },
|
||||
{ "from": "OculusTouch.LeftGrip", "filters": { "type": "deadZone", "min": 0.5 }, "to": "Standard.LeftGrip" },
|
||||
{ "from": "OculusTouch.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "OculusTouch.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
||||
|
||||
{ "from": "OculusTouch.RY", "to": "Standard.RY",
|
||||
"filters": [
|
||||
|
@ -39,7 +39,7 @@
|
|||
{ "from": "OculusTouch.RT", "to": "Standard.RT" },
|
||||
{ "from": "OculusTouch.RS", "to": "Standard.RS" },
|
||||
{ "from": "OculusTouch.RightGrip", "filters": { "type": "deadZone", "min": 0.5 }, "to": "Standard.RightGrip" },
|
||||
{ "from": "OculusTouch.RightHand", "to": "Standard.RightHand" },
|
||||
{ "from": "OculusTouch.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
|
||||
|
||||
{ "from": "OculusTouch.LeftApplicationMenu", "to": "Standard.Back" },
|
||||
{ "from": "OculusTouch.RightApplicationMenu", "to": "Standard.Start" },
|
||||
|
@ -58,4 +58,3 @@
|
|||
{ "from": "OculusTouch.RightIndexPoint", "to": "Standard.RightIndexPoint" }
|
||||
]
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
{ "from": "Vive.RSCenter", "to": "Standard.RightPrimaryThumb" },
|
||||
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
||||
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand" }
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,21 +50,33 @@
|
|||
function showKbm() {
|
||||
document.getElementById("main_image").setAttribute("src", "img/controls-help-keyboard.png");
|
||||
}
|
||||
function showHandControllers() {
|
||||
function showViveControllers() {
|
||||
document.getElementById("main_image").setAttribute("src", "img/controls-help-vive.png");
|
||||
}
|
||||
function showGameController() {
|
||||
function showXboxController() {
|
||||
document.getElementById("main_image").setAttribute("src", "img/controls-help-gamepad.png");
|
||||
}
|
||||
function load() {
|
||||
console.log("In help.html: ", window.location.href);
|
||||
parts = window.location.href.split("?");
|
||||
if (parts.length > 0) {
|
||||
var defaultTab = parts[1];
|
||||
if (defaultTab == "xbox") {
|
||||
showXboxController();
|
||||
} else if (defaultTab == "vive") {
|
||||
showViveControllers();
|
||||
}
|
||||
}
|
||||
}
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<body onload="load()">
|
||||
<div id="image_area">
|
||||
<img id="main_image" src="img/controls-help-keyboard.png" width="1024px" height="720px"></img>
|
||||
<a href="#" id="kbm_button" onmousedown="showKbm()"></a>
|
||||
<a href="#" id="hand_controllers_button" onmousedown="showHandControllers()"></a>
|
||||
<a href="#" id="game_controller_button" onmousedown="showGameController()"></a>
|
||||
<a href="#" id="hand_controllers_button" onmousedown="showViveControllers()"></a>
|
||||
<a href="#" id="game_controller_button" onmousedown="showXboxController()"></a>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 246 KiB After Width: | Height: | Size: 106 KiB |
BIN
interface/resources/images/steam-min-spec-failed.png
Normal file
BIN
interface/resources/images/steam-min-spec-failed.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 82 KiB |
Binary file not shown.
|
@ -95,46 +95,10 @@ Hifi.AvatarInputs {
|
|||
anchors.fill: parent
|
||||
color: root.mirrorVisible ? (root.audioClipping ? "red" : "#696969") : "#00000000"
|
||||
|
||||
Image {
|
||||
id: faceMute
|
||||
width: root.iconSize
|
||||
height: root.iconSize
|
||||
visible: root.cameraEnabled
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
source: root.cameraMuted ? "../images/face-mute.svg" : "../images/face.svg"
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
root.toggleCameraMute()
|
||||
}
|
||||
onDoubleClicked: {
|
||||
root.resetSensors();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: micMute
|
||||
width: root.iconSize
|
||||
height: root.iconSize
|
||||
anchors.left: root.cameraEnabled ? faceMute.right : parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
source: root.audioMuted ? "../images/mic-mute.svg" : "../images/mic.svg"
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
root.toggleAudioMute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
id: audioMeter
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: micMute.right
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: root.iconPadding
|
||||
|
|
|
@ -20,10 +20,10 @@ ScrollingWindow {
|
|||
anchors.centerIn: parent
|
||||
UpdateDialog {
|
||||
id: updateDialog
|
||||
|
||||
|
||||
implicitWidth: backgroundRectangle.width
|
||||
implicitHeight: backgroundRectangle.height
|
||||
|
||||
|
||||
readonly property int contentWidth: 500
|
||||
readonly property int logoSize: 60
|
||||
readonly property int borderWidth: 30
|
||||
|
@ -36,7 +36,7 @@ ScrollingWindow {
|
|||
|
||||
signal triggerBuildDownload
|
||||
signal closeUpdateDialog
|
||||
|
||||
|
||||
Rectangle {
|
||||
id: backgroundRectangle
|
||||
color: "#ffffff"
|
||||
|
@ -47,7 +47,7 @@ ScrollingWindow {
|
|||
|
||||
Image {
|
||||
id: logo
|
||||
source: "../images/interface-logo.svg"
|
||||
source: "../images/hifi-logo.svg"
|
||||
width: updateDialog.logoSize
|
||||
height: updateDialog.logoSize
|
||||
anchors {
|
||||
|
@ -65,7 +65,7 @@ ScrollingWindow {
|
|||
topMargin: updateDialog.borderWidth
|
||||
top: parent.top
|
||||
}
|
||||
|
||||
|
||||
Rectangle {
|
||||
id: header
|
||||
width: parent.width - updateDialog.logoSize - updateDialog.inputSpacing
|
||||
|
|
|
@ -27,6 +27,7 @@ Item {
|
|||
|
||||
WebEngineView {
|
||||
id: root
|
||||
objectName: "webEngineView"
|
||||
x: 0
|
||||
y: 0
|
||||
width: parent.width
|
||||
|
|
|
@ -312,6 +312,7 @@ FocusScope {
|
|||
|
||||
onPinnedChanged: {
|
||||
if (pinned) {
|
||||
d.raiseWindow(desktop);
|
||||
desktop.focus = true;
|
||||
desktop.forceActiveFocus();
|
||||
|
||||
|
|
|
@ -113,9 +113,8 @@ Rectangle {
|
|||
}
|
||||
FiraSansRegular {
|
||||
id: users;
|
||||
visible: action === 'concurrency';
|
||||
text: onlineUsers;
|
||||
size: textSize;
|
||||
text: (action === 'concurrency') ? onlineUsers : 'snapshot';
|
||||
size: (action === 'concurrency') ? textSize : textSizeSmall;
|
||||
color: hifi.colors.white;
|
||||
anchors {
|
||||
verticalCenter: usersImage.verticalCenter;
|
||||
|
|
|
@ -51,27 +51,41 @@ OriginalDesktop.Desktop {
|
|||
Toolbar {
|
||||
id: sysToolbar;
|
||||
objectName: "com.highfidelity.interface.toolbar.system";
|
||||
// These values will be overridden by sysToolbar.x/y if there is a saved position in Settings
|
||||
// On exit, the sysToolbar position is saved to settings
|
||||
x: 30
|
||||
anchors.horizontalCenter: settings.constrainToolbarToCenterX ? desktop.horizontalCenter : undefined;
|
||||
// Literal 50 is overwritten by settings from previous session, and sysToolbar.x comes from settings when not constrained.
|
||||
x: sysToolbar.x
|
||||
y: 50
|
||||
}
|
||||
Settings {
|
||||
id: settings;
|
||||
category: "toolbar";
|
||||
property bool constrainToolbarToCenterX: true;
|
||||
}
|
||||
function setConstrainToolbarToCenterX(constrain) { // Learn about c++ preference change.
|
||||
settings.constrainToolbarToCenterX = constrain;
|
||||
}
|
||||
property var toolbars: (function (map) { // answer dictionary preloaded with sysToolbar
|
||||
map[sysToolbar.objectName] = sysToolbar;
|
||||
return map; })({});
|
||||
|
||||
|
||||
Component.onCompleted: {
|
||||
WebEngine.settings.javascriptCanOpenWindows = true;
|
||||
WebEngine.settings.javascriptCanAccessClipboard = false;
|
||||
WebEngine.settings.spatialNavigationEnabled = false;
|
||||
WebEngine.settings.localContentCanAccessRemoteUrls = true;
|
||||
|
||||
var toggleHudButton = sysToolbar.addButton({
|
||||
objectName: "hudToggle",
|
||||
imageURL: "../../../icons/hud.svg",
|
||||
visible: true,
|
||||
pinned: true,
|
||||
[ // Allocate the standard buttons in the correct order. They will get images, etc., via scripts.
|
||||
"hmdToggle", "mute", "mod", "help",
|
||||
"hudToggle",
|
||||
"com.highfidelity.interface.system.editButton", "marketplace", "snapshot", "goto"
|
||||
].forEach(function (name) {
|
||||
sysToolbar.addButton({objectName: name});
|
||||
});
|
||||
var toggleHudButton = sysToolbar.findButton("hudToggle");
|
||||
toggleHudButton.imageURL = "../../../icons/hud.svg";
|
||||
toggleHudButton.pinned = true;
|
||||
sysToolbar.updatePinned(); // automatic when adding buttons only IFF button is pinned at creation.
|
||||
|
||||
toggleHudButton.buttonState = Qt.binding(function(){
|
||||
return desktop.pinned ? 1 : 0
|
||||
|
|
|
@ -17,7 +17,7 @@ PreferencesDialog {
|
|||
id: root
|
||||
objectName: "GeneralPreferencesDialog"
|
||||
title: "General Settings"
|
||||
showCategories: ["Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers"]
|
||||
showCategories: ["UI", "Snapshots", "Scripts", "Privacy", "Octree", "HMD", "Sixense Controllers"]
|
||||
property var settings: Settings {
|
||||
category: root.objectName
|
||||
property alias x: root.x
|
||||
|
|
|
@ -114,6 +114,9 @@ Window {
|
|||
// and allow scripts to be idempotent so they don't duplicate buttons if they're reloaded
|
||||
var result = findButton(properties.objectName);
|
||||
if (result) {
|
||||
for (var property in properties) {
|
||||
result[property] = properties[property];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
properties.toolbar = this;
|
||||
|
|
|
@ -523,6 +523,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_mirrorViewRect(QRect(MIRROR_VIEW_LEFT_PADDING, MIRROR_VIEW_TOP_PADDING, MIRROR_VIEW_WIDTH, MIRROR_VIEW_HEIGHT)),
|
||||
_previousScriptLocation("LastScriptLocation", DESKTOP_LOCATION),
|
||||
_fieldOfView("fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES),
|
||||
_constrainToolbarPosition("toolbar/constrainToolbarToCenterX", true),
|
||||
_scaleMirror(1.0f),
|
||||
_rotateMirror(0.0f),
|
||||
_raiseMirror(0.0f),
|
||||
|
@ -534,6 +535,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
|
||||
_lastFaceTrackerUpdate(0)
|
||||
{
|
||||
setProperty("com.highfidelity.launchedFromSteam", SteamClient::isRunning());
|
||||
|
||||
_runningMarker.startRunningMarker();
|
||||
|
||||
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
|
||||
|
@ -569,6 +572,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
_deadlockWatchdogThread = new DeadlockWatchdogThread();
|
||||
_deadlockWatchdogThread->start();
|
||||
|
||||
qCDebug(interfaceapp) << "[VERSION] SteamVR buildID:" << SteamClient::getSteamVRBuildID();
|
||||
qCDebug(interfaceapp) << "[VERSION] Build sequence:" << qPrintable(applicationVersion());
|
||||
qCDebug(interfaceapp) << "[VERSION] MODIFIED_ORGANIZATION:" << BuildInfo::MODIFIED_ORGANIZATION;
|
||||
qCDebug(interfaceapp) << "[VERSION] VERSION:" << BuildInfo::VERSION;
|
||||
|
@ -1142,7 +1146,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
});
|
||||
|
||||
// If the user clicks somewhere where there is NO entity at all, we will release focus
|
||||
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity, [=]() {
|
||||
connect(getEntities().data(), &EntityTreeRenderer::mousePressOffEntity, [=]() {
|
||||
setKeyboardFocusEntity(UNKNOWN_ENTITY_ID);
|
||||
});
|
||||
|
||||
|
@ -1191,6 +1195,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
||||
properties["sim_rate"] = getAverageSimsPerSecond();
|
||||
properties["avatar_sim_rate"] = getAvatarSimrate();
|
||||
properties["has_async_reprojection"] = displayPlugin->hasAsyncReprojection();
|
||||
|
||||
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
|
||||
properties["packet_rate_in"] = bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond();
|
||||
|
@ -1234,6 +1239,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
auto glInfo = getGLContextData();
|
||||
properties["gl_info"] = glInfo;
|
||||
properties["gpu_free_memory"] = (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory());
|
||||
properties["ideal_thread_count"] = QThread::idealThreadCount();
|
||||
|
||||
auto hmdHeadPose = getHMDSensorPose();
|
||||
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
||||
|
@ -2145,12 +2151,27 @@ void Application::setFieldOfView(float fov) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::setSettingConstrainToolbarPosition(bool setting) {
|
||||
_constrainToolbarPosition.set(setting);
|
||||
DependencyManager::get<OffscreenUi>()->setConstrainToolbarToCenterX(setting);
|
||||
}
|
||||
|
||||
void Application::aboutApp() {
|
||||
InfoView::show(INFO_WELCOME_PATH);
|
||||
}
|
||||
|
||||
void Application::showHelp() {
|
||||
InfoView::show(INFO_HELP_PATH);
|
||||
static const QString QUERY_STRING_XBOX = "xbox";
|
||||
static const QString QUERY_STRING_VIVE = "vive";
|
||||
|
||||
QString queryString = "";
|
||||
if (PluginUtils::isViveControllerAvailable()) {
|
||||
queryString = QUERY_STRING_VIVE;
|
||||
} else if (PluginUtils::isXboxControllerAvailable()) {
|
||||
queryString = QUERY_STRING_XBOX;
|
||||
}
|
||||
|
||||
InfoView::show(INFO_HELP_PATH, false, queryString);
|
||||
}
|
||||
|
||||
void Application::resizeEvent(QResizeEvent* event) {
|
||||
|
@ -3467,7 +3488,7 @@ void Application::init() {
|
|||
|
||||
// connect the _entityCollisionSystem to our EntityTreeRenderer since that's what handles running entity scripts
|
||||
connect(_entitySimulation.get(), &EntitySimulation::entityCollisionWithEntity,
|
||||
getEntities(), &EntityTreeRenderer::entityCollisionWithEntity);
|
||||
getEntities().data(), &EntityTreeRenderer::entityCollisionWithEntity);
|
||||
|
||||
// connect the _entities (EntityTreeRenderer) to our script engine's EntityScriptingInterface for firing
|
||||
// of events related clicking, hovering over, and entering entities
|
||||
|
|
|
@ -180,7 +180,7 @@ public:
|
|||
void copyDisplayViewFrustum(ViewFrustum& viewOut) const;
|
||||
void copyShadowViewFrustum(ViewFrustum& viewOut) const override;
|
||||
const OctreePacketProcessor& getOctreePacketProcessor() const { return _octreeProcessor; }
|
||||
EntityTreeRenderer* getEntities() const { return DependencyManager::get<EntityTreeRenderer>().data(); }
|
||||
QSharedPointer<EntityTreeRenderer> getEntities() const { return DependencyManager::get<EntityTreeRenderer>(); }
|
||||
QUndoStack* getUndoStack() { return &_undoStack; }
|
||||
MainWindow* getWindow() const { return _window; }
|
||||
EntityTreePointer getEntityClipboard() const { return _entityClipboard; }
|
||||
|
@ -206,6 +206,9 @@ public:
|
|||
float getFieldOfView() { return _fieldOfView.get(); }
|
||||
void setFieldOfView(float fov);
|
||||
|
||||
float getSettingConstrainToolbarPosition() { return _constrainToolbarPosition.get(); }
|
||||
void setSettingConstrainToolbarPosition(bool setting);
|
||||
|
||||
NodeToOctreeSceneStats* getOcteeSceneStats() { return &_octreeServerSceneStats; }
|
||||
|
||||
virtual controller::ScriptingInterface* getControllerScriptingInterface() { return _controllerScriptingInterface; }
|
||||
|
@ -229,7 +232,7 @@ public:
|
|||
|
||||
qint64 getCurrentSessionRuntime() const { return _sessionRunTimer.elapsed(); }
|
||||
|
||||
bool isAboutToQuit() const { return _aboutToQuit; }
|
||||
bool isAboutToQuit() const override { return _aboutToQuit; }
|
||||
bool isPhysicsEnabled() const { return _physicsEnabled; }
|
||||
|
||||
// the isHMDMode is true whenever we use the interface from an HMD and not a standard flat display
|
||||
|
@ -506,6 +509,7 @@ private:
|
|||
|
||||
Setting::Handle<QString> _previousScriptLocation;
|
||||
Setting::Handle<float> _fieldOfView;
|
||||
Setting::Handle<bool> _constrainToolbarPosition;
|
||||
|
||||
float _scaleMirror;
|
||||
float _rotateMirror;
|
||||
|
|
|
@ -29,7 +29,7 @@ SpatiallyNestableWeakPointer InterfaceParentFinder::find(QUuid parentID, bool& s
|
|||
if (entityTree) {
|
||||
parent = entityTree->findByID(parentID);
|
||||
} else {
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer tree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
parent = tree ? tree->findEntityByEntityItemID(parentID) : nullptr;
|
||||
}
|
||||
|
|
|
@ -338,6 +338,9 @@ Menu::Menu() {
|
|||
// Developer > Render > Throttle FPS If Not Focus
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true);
|
||||
|
||||
// Developer > Render > OpenVR threaded submit
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::OpenVrThreadedSubmit, 0, true);
|
||||
|
||||
// Developer > Render > Resolution
|
||||
MenuWrapper* resolutionMenu = renderOptionsMenu->addMenu(MenuOption::RenderResolution);
|
||||
QActionGroup* resolutionGroup = new QActionGroup(resolutionMenu);
|
||||
|
@ -631,6 +634,14 @@ Menu::Menu() {
|
|||
// Developer > Audio >>>
|
||||
MenuWrapper* audioDebugMenu = developerMenu->addMenu("Audio");
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Stats...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/stats.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Buffers...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
DependencyManager::get<OffscreenUi>()->toggle(QString("hifi/dialogs/AudioPreferencesDialog.qml"), "AudioPreferencesDialog");
|
||||
|
|
|
@ -139,6 +139,7 @@ namespace MenuOption {
|
|||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit";
|
||||
const QString OutputMenu = "Display";
|
||||
const QString Overlays = "Overlays";
|
||||
const QString PackageModel = "Package Model...";
|
||||
|
|
|
@ -109,7 +109,7 @@ Avatar::Avatar(RigPointer rig) :
|
|||
Avatar::~Avatar() {
|
||||
assert(isDead()); // mark dead before calling the dtor
|
||||
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
entityTree->withWriteLock([&] {
|
||||
|
@ -199,7 +199,7 @@ void Avatar::updateAvatarEntities() {
|
|||
return; // wait until MyAvatar gets an ID before doing this.
|
||||
}
|
||||
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (!entityTree) {
|
||||
return;
|
||||
|
|
|
@ -505,7 +505,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
|
||||
locationChanged();
|
||||
// if a entity-child of this avatar has moved outside of its queryAACube, update the cube and tell the entity server.
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
bool flyingAllowed = true;
|
||||
|
@ -2133,7 +2133,7 @@ void MyAvatar::setAvatarCollisionsEnabled(bool enabled) {
|
|||
}
|
||||
|
||||
bool ghostingAllowed = true;
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
if (entityTreeRenderer) {
|
||||
std::shared_ptr<ZoneEntityItem> zone = entityTreeRenderer->myAvatarZone();
|
||||
if (zone) {
|
||||
|
@ -2467,7 +2467,7 @@ void MyAvatar::removeHoldAction(AvatarActionHold* holdAction) {
|
|||
}
|
||||
|
||||
void MyAvatar::updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose) {
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
// lateAvatarUpdate will modify entity position & orientation, so we need an entity write lock
|
||||
|
|
|
@ -37,7 +37,11 @@
|
|||
#include <CrashReporter.h>
|
||||
#endif
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
extern "C" {
|
||||
typedef int(__stdcall * CHECKMINSPECPROC) ();
|
||||
}
|
||||
#endif
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
#if HAS_BUGSPLAT
|
||||
|
@ -155,15 +159,33 @@ int main(int argc, const char* argv[]) {
|
|||
|
||||
SteamClient::init();
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
// If we're running in steam mode, we need to do an explicit check to ensure we're up to the required min spec
|
||||
if (SteamClient::isRunning()) {
|
||||
QString appPath;
|
||||
{
|
||||
char filename[MAX_PATH];
|
||||
GetModuleFileName(NULL, filename, MAX_PATH);
|
||||
QFileInfo appInfo(filename);
|
||||
appPath = appInfo.absolutePath();
|
||||
}
|
||||
QString openvrDllPath = appPath + "/plugins/openvr.dll";
|
||||
HMODULE openvrDll;
|
||||
CHECKMINSPECPROC checkMinSpecPtr;
|
||||
if ((openvrDll = LoadLibrary(openvrDllPath.toLocal8Bit().data())) &&
|
||||
(checkMinSpecPtr = (CHECKMINSPECPROC)GetProcAddress(openvrDll, "CheckMinSpec"))) {
|
||||
if (!checkMinSpecPtr()) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
int exitCode;
|
||||
{
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime, runServer, serverContentPathOptionValue);
|
||||
|
||||
bool launchedFromSteam = SteamClient::isRunning();
|
||||
app.setProperty("com.highfidelity.launchedFromSteam", launchedFromSteam);
|
||||
|
||||
// If we failed the OpenGLVersion check, log it.
|
||||
if (override) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
|
|
@ -92,13 +92,19 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
|
|||
switch(packetType) {
|
||||
case PacketType::EntityErase: {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
qApp->getEntities()->processEraseMessage(*message, sendingNode);
|
||||
auto renderer = qApp->getEntities();
|
||||
if (renderer) {
|
||||
renderer->processEraseMessage(*message, sendingNode);
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
case PacketType::EntityData: {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
qApp->getEntities()->processDatagram(*message, sendingNode);
|
||||
auto renderer = qApp->getEntities();
|
||||
if (renderer) {
|
||||
renderer->processDatagram(*message, sendingNode);
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
|
|
|
@ -68,6 +68,13 @@ void setupPreferences() {
|
|||
preferences->addPreference(new CheckPreference(AVATAR_BASICS, "Clear overlays when moving", getter, setter));
|
||||
}
|
||||
|
||||
// UI
|
||||
{
|
||||
auto getter = []()->bool { return qApp->getSettingConstrainToolbarPosition(); };
|
||||
auto setter = [](bool value) { qApp->setSettingConstrainToolbarPosition(value); };
|
||||
preferences->addPreference(new CheckPreference("UI", "Constrain Toolbar Position to Horizontal Center", getter, setter));
|
||||
}
|
||||
|
||||
// Snapshots
|
||||
static const QString SNAPSHOTS { "Snapshots" };
|
||||
{
|
||||
|
|
|
@ -89,7 +89,7 @@ QTemporaryFile* Snapshot::saveTempSnapshot(QImage image) {
|
|||
QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary) {
|
||||
|
||||
// adding URL to snapshot
|
||||
QUrl currentURL = DependencyManager::get<AddressManager>()->currentAddress();
|
||||
QUrl currentURL = DependencyManager::get<AddressManager>()->currentShareableAddress();
|
||||
shot.setText(URL, currentURL.toString());
|
||||
|
||||
QString username = DependencyManager::get<AccountManager>()->getAccountInfo().getUsername();
|
||||
|
@ -146,7 +146,10 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary) {
|
|||
void Snapshot::uploadSnapshot(const QString& filename) {
|
||||
|
||||
const QString SNAPSHOT_UPLOAD_URL = "/api/v1/snapshots";
|
||||
static SnapshotUploader uploader;
|
||||
// Alternatively to parseSnapshotData, we could pass the inWorldLocation through the call chain. This way is less disruptive to existing code.
|
||||
SnapshotMetaData* snapshotData = Snapshot::parseSnapshotData(filename);
|
||||
SnapshotUploader* uploader = new SnapshotUploader(snapshotData->getURL(), filename);
|
||||
delete snapshotData;
|
||||
|
||||
QFile* file = new QFile(filename);
|
||||
Q_ASSERT(file->exists());
|
||||
|
@ -163,7 +166,7 @@ void Snapshot::uploadSnapshot(const QString& filename) {
|
|||
multiPart->append(imagePart);
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
JSONCallbackParameters callbackParams(&uploader, "uploadSuccess", &uploader, "uploadFailure");
|
||||
JSONCallbackParameters callbackParams(uploader, "uploadSuccess", uploader, "uploadFailure");
|
||||
|
||||
accountManager->sendRequest(SNAPSHOT_UPLOAD_URL,
|
||||
AccountManagerAuth::Required,
|
||||
|
|
|
@ -15,9 +15,13 @@
|
|||
#include "scripting/WindowScriptingInterface.h"
|
||||
#include "SnapshotUploader.h"
|
||||
|
||||
SnapshotUploader::SnapshotUploader(QUrl inWorldLocation, QString pathname) :
|
||||
_inWorldLocation(inWorldLocation),
|
||||
_pathname(pathname) {
|
||||
}
|
||||
|
||||
void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
|
||||
const QString STORY_UPLOAD_URL = "/api/v1/user_stories";
|
||||
static SnapshotUploader uploader;
|
||||
|
||||
// parse the reply for the thumbnail_url
|
||||
QByteArray contents = reply.readAll();
|
||||
|
@ -28,11 +32,8 @@ void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
|
|||
QString thumbnailUrl = dataObject.value("thumbnail_url").toString();
|
||||
QString imageUrl = dataObject.value("image_url").toString();
|
||||
auto addressManager = DependencyManager::get<AddressManager>();
|
||||
QString placeName = addressManager->getPlaceName();
|
||||
if (placeName.isEmpty()) {
|
||||
placeName = addressManager->getHost();
|
||||
}
|
||||
QString currentPath = addressManager->currentPath(true);
|
||||
QString placeName = _inWorldLocation.authority(); // We currently only upload shareable places, in which case this is just host.
|
||||
QString currentPath = _inWorldLocation.path();
|
||||
|
||||
// create json post data
|
||||
QJsonObject rootObject;
|
||||
|
@ -48,7 +49,7 @@ void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
|
|||
rootObject.insert("user_story", userStoryObject);
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
JSONCallbackParameters callbackParams(&uploader, "createStorySuccess", &uploader, "createStoryFailure");
|
||||
JSONCallbackParameters callbackParams(this, "createStorySuccess", this, "createStoryFailure");
|
||||
|
||||
accountManager->sendRequest(STORY_UPLOAD_URL,
|
||||
AccountManagerAuth::Required,
|
||||
|
@ -56,20 +57,23 @@ void SnapshotUploader::uploadSuccess(QNetworkReply& reply) {
|
|||
callbackParams,
|
||||
QJsonDocument(rootObject).toJson());
|
||||
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(contents);
|
||||
delete this;
|
||||
}
|
||||
}
|
||||
|
||||
void SnapshotUploader::uploadFailure(QNetworkReply& reply) {
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(reply.readAll());
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(reply.readAll()); // maybe someday include _inWorldLocation, _filename?
|
||||
delete this;
|
||||
}
|
||||
|
||||
void SnapshotUploader::createStorySuccess(QNetworkReply& reply) {
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(QString());
|
||||
delete this;
|
||||
}
|
||||
|
||||
void SnapshotUploader::createStoryFailure(QNetworkReply& reply) {
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(reply.readAll());
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotShared(reply.readAll());
|
||||
delete this;
|
||||
}
|
|
@ -14,13 +14,19 @@
|
|||
|
||||
#include <QObject>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
#include <QtCore/QUrl>
|
||||
|
||||
class SnapshotUploader : public QObject {
|
||||
Q_OBJECT
|
||||
public slots:
|
||||
public:
|
||||
SnapshotUploader(QUrl inWorldLocation, QString pathname);
|
||||
public slots:
|
||||
void uploadSuccess(QNetworkReply& reply);
|
||||
void uploadFailure(QNetworkReply& reply);
|
||||
void createStorySuccess(QNetworkReply& reply);
|
||||
void createStoryFailure(QNetworkReply& reply);
|
||||
private:
|
||||
QUrl _inWorldLocation;
|
||||
QString _pathname;
|
||||
};
|
||||
#endif // hifi_SnapshotUploader_h
|
|
@ -302,7 +302,7 @@ void Stats::updateStats(bool force) {
|
|||
STAT_UPDATE(gpuTextureVirtualMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUVirtualMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextureFramebufferMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUFramebufferMemoryUsage()));
|
||||
STAT_UPDATE(gpuTextureSparseMemory, (int)BYTES_TO_MB(gpu::Texture::getTextureGPUSparseMemoryUsage()));
|
||||
STAT_UPDATE(gpuSparseTextureEnabled, gpu::Texture::getEnableSparseTextures() ? 1 : 0);
|
||||
STAT_UPDATE(gpuSparseTextureEnabled, qApp->getGPUContext()->getBackend()->isTextureManagementSparseEnabled() ? 1 : 0);
|
||||
STAT_UPDATE(gpuFreeMemory, (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory()));
|
||||
STAT_UPDATE(rectifiedTextureCount, (int)RECTIFIED_TEXTURE_COUNT.load());
|
||||
STAT_UPDATE(decimatedTextureCount, (int)DECIMATED_TEXTURE_COUNT.load());
|
||||
|
|
|
@ -118,11 +118,26 @@ void AnimSkeleton::convertAbsoluteRotationsToRelative(std::vector<glm::quat>& ro
|
|||
}
|
||||
}
|
||||
|
||||
void AnimSkeleton::saveNonMirroredPoses(const AnimPoseVec& poses) const {
|
||||
_nonMirroredPoses.clear();
|
||||
for (int i = 0; i < (int)_nonMirroredIndices.size(); ++i) {
|
||||
_nonMirroredPoses.push_back(poses[_nonMirroredIndices[i]]);
|
||||
}
|
||||
}
|
||||
|
||||
void AnimSkeleton::restoreNonMirroredPoses(AnimPoseVec& poses) const {
|
||||
for (int i = 0; i < (int)_nonMirroredIndices.size(); ++i) {
|
||||
int index = _nonMirroredIndices[i];
|
||||
poses[index] = _nonMirroredPoses[i];
|
||||
}
|
||||
}
|
||||
|
||||
void AnimSkeleton::mirrorRelativePoses(AnimPoseVec& poses) const {
|
||||
saveNonMirroredPoses(poses);
|
||||
convertRelativePosesToAbsolute(poses);
|
||||
mirrorAbsolutePoses(poses);
|
||||
convertAbsolutePosesToRelative(poses);
|
||||
restoreNonMirroredPoses(poses);
|
||||
}
|
||||
|
||||
void AnimSkeleton::mirrorAbsolutePoses(AnimPoseVec& poses) const {
|
||||
|
@ -189,8 +204,14 @@ void AnimSkeleton::buildSkeletonFromJoints(const std::vector<FBXJoint>& joints)
|
|||
}
|
||||
|
||||
// build mirror map.
|
||||
_nonMirroredIndices.clear();
|
||||
_mirrorMap.reserve(_joints.size());
|
||||
for (int i = 0; i < (int)joints.size(); i++) {
|
||||
if (_joints[i].name.endsWith("tEye")) {
|
||||
// HACK: we don't want to mirror some joints so we remember their indices
|
||||
// so we can restore them after a future mirror operation
|
||||
_nonMirroredIndices.push_back(i);
|
||||
}
|
||||
int mirrorJointIndex = -1;
|
||||
if (_joints[i].name.startsWith("Left")) {
|
||||
QString mirrorJointName = QString(_joints[i].name).replace(0, 4, "Right");
|
||||
|
|
|
@ -57,6 +57,9 @@ public:
|
|||
|
||||
void convertAbsoluteRotationsToRelative(std::vector<glm::quat>& rotations) const;
|
||||
|
||||
void saveNonMirroredPoses(const AnimPoseVec& poses) const;
|
||||
void restoreNonMirroredPoses(AnimPoseVec& poses) const;
|
||||
|
||||
void mirrorRelativePoses(AnimPoseVec& poses) const;
|
||||
void mirrorAbsolutePoses(AnimPoseVec& poses) const;
|
||||
|
||||
|
@ -75,6 +78,8 @@ protected:
|
|||
AnimPoseVec _absoluteDefaultPoses;
|
||||
AnimPoseVec _relativePreRotationPoses;
|
||||
AnimPoseVec _relativePostRotationPoses;
|
||||
mutable AnimPoseVec _nonMirroredPoses;
|
||||
std::vector<int> _nonMirroredIndices;
|
||||
std::vector<int> _mirrorMap;
|
||||
|
||||
// no copies
|
||||
|
|
|
@ -85,18 +85,26 @@ public:
|
|||
}
|
||||
|
||||
void beforeAboutToQuit() {
|
||||
Lock lock(_checkDevicesMutex);
|
||||
_quit = true;
|
||||
}
|
||||
|
||||
void run() override {
|
||||
while (!_quit) {
|
||||
while (true) {
|
||||
{
|
||||
Lock lock(_checkDevicesMutex);
|
||||
if (_quit) {
|
||||
break;
|
||||
}
|
||||
_audioClient->checkDevices();
|
||||
}
|
||||
QThread::msleep(DEVICE_CHECK_INTERVAL_MSECS);
|
||||
_audioClient->checkDevices();
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
AudioClient* _audioClient { nullptr };
|
||||
Mutex _checkDevicesMutex;
|
||||
bool _quit { false };
|
||||
};
|
||||
|
||||
|
|
|
@ -520,7 +520,7 @@ const FBXGeometry* EntityTreeRenderer::getGeometryForEntity(EntityItemPointer en
|
|||
std::shared_ptr<RenderableModelEntityItem> modelEntityItem =
|
||||
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
|
||||
assert(modelEntityItem); // we need this!!!
|
||||
ModelPointer model = modelEntityItem->getModel(this);
|
||||
ModelPointer model = modelEntityItem->getModel(getSharedFromThis());
|
||||
if (model && model->isLoaded()) {
|
||||
result = &model->getFBXGeometry();
|
||||
}
|
||||
|
@ -533,7 +533,7 @@ ModelPointer EntityTreeRenderer::getModelForEntityItem(EntityItemPointer entityI
|
|||
if (entityItem->getType() == EntityTypes::Model) {
|
||||
std::shared_ptr<RenderableModelEntityItem> modelEntityItem =
|
||||
std::dynamic_pointer_cast<RenderableModelEntityItem>(entityItem);
|
||||
result = modelEntityItem->getModel(this);
|
||||
result = modelEntityItem->getModel(getSharedFromThis());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -45,6 +45,10 @@ public:
|
|||
AbstractScriptingServicesInterface* scriptingServices);
|
||||
virtual ~EntityTreeRenderer();
|
||||
|
||||
QSharedPointer<EntityTreeRenderer> getSharedFromThis() {
|
||||
return qSharedPointerCast<EntityTreeRenderer>(sharedFromThis());
|
||||
}
|
||||
|
||||
virtual char getMyNodeType() const override { return NodeType::EntityServer; }
|
||||
virtual PacketType getMyQueryMessageType() const override { return PacketType::EntityQuery; }
|
||||
virtual PacketType getExpectedPacketType() const override { return PacketType::EntityData; }
|
||||
|
|
|
@ -55,7 +55,10 @@ void RenderableModelEntityItem::setModelURL(const QString& url) {
|
|||
auto& currentURL = getParsedModelURL();
|
||||
ModelEntityItem::setModelURL(url);
|
||||
|
||||
if (currentURL != getParsedModelURL() || !_model) {
|
||||
if (currentURL != getParsedModelURL()) {
|
||||
_needsModelReload = true;
|
||||
}
|
||||
if (_needsModelReload || !_model) {
|
||||
EntityTreePointer tree = getTree();
|
||||
if (tree) {
|
||||
QMetaObject::invokeMethod(tree.get(), "callLoader", Qt::QueuedConnection, Q_ARG(EntityItemID, getID()));
|
||||
|
@ -65,7 +68,7 @@ void RenderableModelEntityItem::setModelURL(const QString& url) {
|
|||
|
||||
void RenderableModelEntityItem::loader() {
|
||||
_needsModelReload = true;
|
||||
EntityTreeRenderer* renderer = DependencyManager::get<EntityTreeRenderer>().data();
|
||||
auto renderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
assert(renderer);
|
||||
{
|
||||
PerformanceTimer perfTimer("getModel");
|
||||
|
@ -368,7 +371,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
if (!_model || _needsModelReload) {
|
||||
// TODO: this getModel() appears to be about 3% of model render time. We should optimize
|
||||
PerformanceTimer perfTimer("getModel");
|
||||
EntityTreeRenderer* renderer = static_cast<EntityTreeRenderer*>(args->_renderer);
|
||||
auto renderer = qSharedPointerCast<EntityTreeRenderer>(args->_renderer);
|
||||
getModel(renderer);
|
||||
|
||||
// Remap textures immediately after loading to avoid flicker
|
||||
|
@ -470,7 +473,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
|
|||
}
|
||||
}
|
||||
|
||||
ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
|
||||
ModelPointer RenderableModelEntityItem::getModel(QSharedPointer<EntityTreeRenderer> renderer) {
|
||||
if (!renderer) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -495,7 +498,7 @@ ModelPointer RenderableModelEntityItem::getModel(EntityTreeRenderer* renderer) {
|
|||
_needsInitialSimulation = true;
|
||||
// If we need to change URLs, update it *after rendering* (to avoid access violations)
|
||||
} else if (QUrl(getModelURL()) != _model->getURL()) {
|
||||
QMetaObject::invokeMethod(_myRenderer, "updateModel", Qt::QueuedConnection,
|
||||
QMetaObject::invokeMethod(_myRenderer.data(), "updateModel", Qt::QueuedConnection,
|
||||
Q_ARG(ModelPointer, _model),
|
||||
Q_ARG(const QString&, getModelURL()));
|
||||
_needsInitialSimulation = true;
|
||||
|
@ -523,17 +526,24 @@ bool RenderableModelEntityItem::needsToCallUpdate() const {
|
|||
}
|
||||
|
||||
void RenderableModelEntityItem::update(const quint64& now) {
|
||||
if (!_dimensionsInitialized && _model && _model->isActive()) {
|
||||
if (_model->isLoaded()) {
|
||||
EntityItemProperties properties;
|
||||
properties.setLastEdited(usecTimestampNow()); // we must set the edit time since we're editing it
|
||||
auto extents = _model->getMeshExtents();
|
||||
properties.setDimensions(extents.maximum - extents.minimum);
|
||||
qCDebug(entitiesrenderer) << "Autoresizing:" << (!getName().isEmpty() ? getName() : getModelURL());
|
||||
QMetaObject::invokeMethod(DependencyManager::get<EntityScriptingInterface>().data(), "editEntity",
|
||||
Qt::QueuedConnection,
|
||||
Q_ARG(QUuid, getEntityItemID()),
|
||||
Q_ARG(EntityItemProperties, properties));
|
||||
if (!_dimensionsInitialized) {
|
||||
if (_model) {
|
||||
if (_model->isActive() && _model->isLoaded()) {
|
||||
EntityItemProperties properties;
|
||||
properties.setLastEdited(usecTimestampNow()); // we must set the edit time since we're editing it
|
||||
auto extents = _model->getMeshExtents();
|
||||
properties.setDimensions(extents.maximum - extents.minimum);
|
||||
qCDebug(entitiesrenderer) << "Autoresizing:" << (!getName().isEmpty() ? getName() : getModelURL());
|
||||
QMetaObject::invokeMethod(DependencyManager::get<EntityScriptingInterface>().data(), "editEntity",
|
||||
Qt::QueuedConnection,
|
||||
Q_ARG(QUuid, getEntityItemID()),
|
||||
Q_ARG(EntityItemProperties, properties));
|
||||
}
|
||||
} else if (_needsModelReload) {
|
||||
EntityTreePointer tree = getTree();
|
||||
if (tree) {
|
||||
QMetaObject::invokeMethod(tree.get(), "callLoader", Qt::QueuedConnection, Q_ARG(EntityItemID, getID()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public:
|
|||
bool& keepSearching, OctreeElementPointer& element, float& distance,
|
||||
BoxFace& face, glm::vec3& surfaceNormal,
|
||||
void** intersectedObject, bool precisionPicking) const override;
|
||||
ModelPointer getModel(EntityTreeRenderer* renderer);
|
||||
ModelPointer getModel(QSharedPointer<EntityTreeRenderer> renderer);
|
||||
|
||||
virtual bool needsToCallUpdate() const override;
|
||||
virtual void update(const quint64& now) override;
|
||||
|
@ -105,7 +105,7 @@ private:
|
|||
ModelPointer _model = nullptr;
|
||||
bool _needsInitialSimulation = true;
|
||||
bool _needsModelReload = true;
|
||||
EntityTreeRenderer* _myRenderer = nullptr;
|
||||
QSharedPointer<EntityTreeRenderer> _myRenderer;
|
||||
QString _lastTextures;
|
||||
QVariantMap _currentTextures;
|
||||
QVariantMap _originalTextures;
|
||||
|
|
|
@ -1035,50 +1035,53 @@ void RenderablePolyVoxEntityItem::copyUpperEdgesFromNeighbors() {
|
|||
return;
|
||||
}
|
||||
|
||||
EntityItemPointer currentXPNeighbor = _xPNeighbor.lock();
|
||||
EntityItemPointer currentYPNeighbor = _yPNeighbor.lock();
|
||||
EntityItemPointer currentZPNeighbor = _zPNeighbor.lock();
|
||||
auto currentXPNeighbor = getXPNeighbor();
|
||||
auto currentYPNeighbor = getYPNeighbor();
|
||||
auto currentZPNeighbor = getZPNeighbor();
|
||||
|
||||
if (currentXPNeighbor) {
|
||||
auto polyVoxXPNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentXPNeighbor);
|
||||
if (polyVoxXPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
if (currentXPNeighbor && currentXPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
for (int y = 0; y < _volData->getHeight(); y++) {
|
||||
for (int z = 0; z < _volData->getDepth(); z++) {
|
||||
uint8_t neighborValue = currentXPNeighbor->getVoxel(0, y, z);
|
||||
if ((y == 0 || z == 0) && _volData->getVoxelAt(_volData->getWidth() - 1, y, z) != neighborValue) {
|
||||
bonkNeighbors();
|
||||
}
|
||||
_volData->setVoxelAt(_volData->getWidth() - 1, y, z, neighborValue);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (currentYPNeighbor && currentYPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
for (int x = 0; x < _volData->getWidth(); x++) {
|
||||
for (int z = 0; z < _volData->getDepth(); z++) {
|
||||
uint8_t neighborValue = currentYPNeighbor->getVoxel(x, 0, z);
|
||||
if ((x == 0 || z == 0) && _volData->getVoxelAt(x, _volData->getHeight() - 1, z) != neighborValue) {
|
||||
bonkNeighbors();
|
||||
}
|
||||
_volData->setVoxelAt(x, _volData->getHeight() - 1, z, neighborValue);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
if (currentZPNeighbor && currentZPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
for (int x = 0; x < _volData->getWidth(); x++) {
|
||||
for (int y = 0; y < _volData->getHeight(); y++) {
|
||||
for (int z = 0; z < _volData->getDepth(); z++) {
|
||||
uint8_t neighborValue = polyVoxXPNeighbor->getVoxel(0, y, z);
|
||||
_volData->setVoxelAt(_volData->getWidth() - 1, y, z, neighborValue);
|
||||
uint8_t neighborValue = currentZPNeighbor->getVoxel(x, y, 0);
|
||||
_volData->setVoxelAt(x, y, _volData->getDepth() - 1, neighborValue);
|
||||
if ((x == 0 || y == 0) && _volData->getVoxelAt(x, y, _volData->getDepth() - 1) != neighborValue) {
|
||||
bonkNeighbors();
|
||||
}
|
||||
_volData->setVoxelAt(x, y, _volData->getDepth() - 1, neighborValue);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (currentYPNeighbor) {
|
||||
auto polyVoxYPNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentYPNeighbor);
|
||||
if (polyVoxYPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
for (int x = 0; x < _volData->getWidth(); x++) {
|
||||
for (int z = 0; z < _volData->getDepth(); z++) {
|
||||
uint8_t neighborValue = polyVoxYPNeighbor->getVoxel(x, 0, z);
|
||||
_volData->setVoxelAt(x, _volData->getWidth() - 1, z, neighborValue);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (currentZPNeighbor) {
|
||||
auto polyVoxZPNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentZPNeighbor);
|
||||
if (polyVoxZPNeighbor->getVoxelVolumeSize() == _voxelVolumeSize) {
|
||||
withWriteLock([&] {
|
||||
for (int x = 0; x < _volData->getWidth(); x++) {
|
||||
for (int y = 0; y < _volData->getHeight(); y++) {
|
||||
uint8_t neighborValue = polyVoxZPNeighbor->getVoxel(x, y, 0);
|
||||
_volData->setVoxelAt(x, y, _volData->getDepth() - 1, neighborValue);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1393,25 +1396,46 @@ void RenderablePolyVoxEntityItem::setZPNeighborID(const EntityItemID& zPNeighbor
|
|||
}
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getXNNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_xNNeighbor.lock());
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getYNNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_yNNeighbor.lock());
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getZNNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_zNNeighbor.lock());
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getXPNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_xPNeighbor.lock());
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getYPNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_yPNeighbor.lock());
|
||||
}
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> RenderablePolyVoxEntityItem::getZPNeighbor() {
|
||||
return std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(_zPNeighbor.lock());
|
||||
}
|
||||
|
||||
|
||||
void RenderablePolyVoxEntityItem::bonkNeighbors() {
|
||||
// flag neighbors to the negative of this entity as needing to rebake their meshes.
|
||||
cacheNeighbors();
|
||||
|
||||
EntityItemPointer currentXNNeighbor = _xNNeighbor.lock();
|
||||
EntityItemPointer currentYNNeighbor = _yNNeighbor.lock();
|
||||
EntityItemPointer currentZNNeighbor = _zNNeighbor.lock();
|
||||
auto currentXNNeighbor = getXNNeighbor();
|
||||
auto currentYNNeighbor = getYNNeighbor();
|
||||
auto currentZNNeighbor = getZNNeighbor();
|
||||
|
||||
if (currentXNNeighbor && currentXNNeighbor->getType() == EntityTypes::PolyVox) {
|
||||
auto polyVoxXNNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentXNNeighbor);
|
||||
polyVoxXNNeighbor->setVolDataDirty();
|
||||
if (currentXNNeighbor) {
|
||||
currentXNNeighbor->setVolDataDirty();
|
||||
}
|
||||
if (currentYNNeighbor && currentYNNeighbor->getType() == EntityTypes::PolyVox) {
|
||||
auto polyVoxYNNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentYNNeighbor);
|
||||
polyVoxYNNeighbor->setVolDataDirty();
|
||||
if (currentYNNeighbor) {
|
||||
currentYNNeighbor->setVolDataDirty();
|
||||
}
|
||||
if (currentZNNeighbor && currentZNNeighbor->getType() == EntityTypes::PolyVox) {
|
||||
auto polyVoxZNNeighbor = std::dynamic_pointer_cast<RenderablePolyVoxEntityItem>(currentZNNeighbor);
|
||||
polyVoxZNNeighbor->setVolDataDirty();
|
||||
if (currentZNNeighbor) {
|
||||
currentZNNeighbor->setVolDataDirty();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,6 +116,13 @@ public:
|
|||
virtual void setYPNeighborID(const EntityItemID& yPNeighborID) override;
|
||||
virtual void setZPNeighborID(const EntityItemID& zPNeighborID) override;
|
||||
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getXNNeighbor();
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getYNNeighbor();
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getZNNeighbor();
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getXPNeighbor();
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getYPNeighbor();
|
||||
std::shared_ptr<RenderablePolyVoxEntityItem> getZPNeighbor();
|
||||
|
||||
virtual void updateRegistrationPoint(const glm::vec3& value) override;
|
||||
|
||||
void setVoxelsFromData(QByteArray uncompressedData, quint16 voxelXSize, quint16 voxelYSize, quint16 voxelZSize);
|
||||
|
|
|
@ -64,7 +64,7 @@ RenderableWebEntityItem::~RenderableWebEntityItem() {
|
|||
}
|
||||
}
|
||||
|
||||
bool RenderableWebEntityItem::buildWebSurface(EntityTreeRenderer* renderer) {
|
||||
bool RenderableWebEntityItem::buildWebSurface(QSharedPointer<EntityTreeRenderer> renderer) {
|
||||
if (_currentWebCount >= MAX_CONCURRENT_WEB_VIEWS) {
|
||||
qWarning() << "Too many concurrent web views to create new view";
|
||||
return false;
|
||||
|
@ -95,7 +95,13 @@ bool RenderableWebEntityItem::buildWebSurface(EntityTreeRenderer* renderer) {
|
|||
|
||||
auto deleter = [](OffscreenQmlSurface* webSurface) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface] {
|
||||
webSurface->deleteLater();
|
||||
if (AbstractViewStateInterface::instance()->isAboutToQuit()) {
|
||||
// WebEngineView may run other threads (wasapi), so they must be deleted for a clean shutdown
|
||||
// if the application has already stopped its event loop, delete must be explicit
|
||||
delete webSurface;
|
||||
} else {
|
||||
webSurface->deleteLater();
|
||||
}
|
||||
});
|
||||
};
|
||||
_webSurface = QSharedPointer<OffscreenQmlSurface>(new OffscreenQmlSurface(), deleter);
|
||||
|
@ -133,10 +139,11 @@ bool RenderableWebEntityItem::buildWebSurface(EntityTreeRenderer* renderer) {
|
|||
handlePointerEvent(event);
|
||||
}
|
||||
};
|
||||
_mousePressConnection = QObject::connect(renderer, &EntityTreeRenderer::mousePressOnEntity, forwardPointerEvent);
|
||||
_mouseReleaseConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseReleaseOnEntity, forwardPointerEvent);
|
||||
_mouseMoveConnection = QObject::connect(renderer, &EntityTreeRenderer::mouseMoveOnEntity, forwardPointerEvent);
|
||||
_hoverLeaveConnection = QObject::connect(renderer, &EntityTreeRenderer::hoverLeaveEntity, [=](const EntityItemID& entityItemID, const PointerEvent& event) {
|
||||
_mousePressConnection = QObject::connect(renderer.data(), &EntityTreeRenderer::mousePressOnEntity, forwardPointerEvent);
|
||||
_mouseReleaseConnection = QObject::connect(renderer.data(), &EntityTreeRenderer::mouseReleaseOnEntity, forwardPointerEvent);
|
||||
_mouseMoveConnection = QObject::connect(renderer.data(), &EntityTreeRenderer::mouseMoveOnEntity, forwardPointerEvent);
|
||||
_hoverLeaveConnection = QObject::connect(renderer.data(), &EntityTreeRenderer::hoverLeaveEntity,
|
||||
[=](const EntityItemID& entityItemID, const PointerEvent& event) {
|
||||
if (this->_pressed && this->getID() == entityItemID) {
|
||||
// If the user mouses off the entity while the button is down, simulate a touch end.
|
||||
QTouchEvent::TouchPoint point;
|
||||
|
@ -184,7 +191,8 @@ void RenderableWebEntityItem::render(RenderArgs* args) {
|
|||
#endif
|
||||
|
||||
if (!_webSurface) {
|
||||
if (!buildWebSurface(static_cast<EntityTreeRenderer*>(args->_renderer))) {
|
||||
auto renderer = qSharedPointerCast<EntityTreeRenderer>(args->_renderer);
|
||||
if (!buildWebSurface(renderer)) {
|
||||
return;
|
||||
}
|
||||
_fadeStartTime = usecTimestampNow();
|
||||
|
@ -326,7 +334,18 @@ void RenderableWebEntityItem::handlePointerEvent(const PointerEvent& event) {
|
|||
void RenderableWebEntityItem::destroyWebSurface() {
|
||||
if (_webSurface) {
|
||||
--_currentWebCount;
|
||||
|
||||
QQuickItem* rootItem = _webSurface->getRootItem();
|
||||
if (rootItem) {
|
||||
QObject* obj = rootItem->findChild<QObject*>("webEngineView");
|
||||
if (obj) {
|
||||
// stop loading
|
||||
QMetaObject::invokeMethod(obj, "stop");
|
||||
}
|
||||
}
|
||||
|
||||
_webSurface->pause();
|
||||
|
||||
_webSurface->disconnect(_connection);
|
||||
QObject::disconnect(_mousePressConnection);
|
||||
_mousePressConnection = QMetaObject::Connection();
|
||||
|
|
|
@ -52,7 +52,7 @@ public:
|
|||
virtual bool isTransparent() override;
|
||||
|
||||
private:
|
||||
bool buildWebSurface(EntityTreeRenderer* renderer);
|
||||
bool buildWebSurface(QSharedPointer<EntityTreeRenderer> renderer);
|
||||
void destroyWebSurface();
|
||||
glm::vec2 getWindowSize() const;
|
||||
|
||||
|
|
|
@ -123,7 +123,9 @@ glm::vec3 OBJTokenizer::getVec3() {
|
|||
return v;
|
||||
}
|
||||
glm::vec2 OBJTokenizer::getVec2() {
|
||||
auto v = glm::vec2(getFloat(), 1.0f - getFloat()); // OBJ has an odd sense of u, v. Also N.B.: getFloat() has side-effect
|
||||
float uCoord = getFloat();
|
||||
float vCoord = 1.0f - getFloat();
|
||||
auto v = glm::vec2(uCoord, vCoord);
|
||||
while (isNextTokenFloat()) {
|
||||
// there can be a w, but we don't handle that
|
||||
nextToken();
|
||||
|
|
|
@ -344,7 +344,6 @@ bool OffscreenQmlSurface::allowNewFrame(uint8_t fps) {
|
|||
OffscreenQmlSurface::OffscreenQmlSurface() {
|
||||
}
|
||||
|
||||
static const uint64_t MAX_SHUTDOWN_WAIT_SECS = 2;
|
||||
OffscreenQmlSurface::~OffscreenQmlSurface() {
|
||||
QObject::disconnect(&_updateTimer);
|
||||
QObject::disconnect(qApp);
|
||||
|
|
|
@ -56,6 +56,7 @@ BackendPointer GLBackend::createBackend() {
|
|||
}
|
||||
result->initInput();
|
||||
result->initTransform();
|
||||
result->initTextureManagementStage();
|
||||
|
||||
INSTANCE = result.get();
|
||||
void* voidInstance = &(*result);
|
||||
|
|
|
@ -176,6 +176,9 @@ public:
|
|||
virtual void releaseQuery(GLuint id) const;
|
||||
virtual void queueLambda(const std::function<void()> lambda) const;
|
||||
|
||||
bool isTextureManagementSparseEnabled() const override { return (_textureManagement._sparseCapable && Texture::getEnableSparseTextures()); }
|
||||
bool isTextureManagementIncrementalTransferEnabled() const override { return (_textureManagement._incrementalTransferCapable && Texture::getEnableIncrementalTextureTransfers()); }
|
||||
|
||||
protected:
|
||||
|
||||
void recycle() const override;
|
||||
|
@ -364,6 +367,12 @@ protected:
|
|||
|
||||
void resetStages();
|
||||
|
||||
struct TextureManagementStageState {
|
||||
bool _sparseCapable { false };
|
||||
bool _incrementalTransferCapable { false };
|
||||
} _textureManagement;
|
||||
virtual void initTextureManagementStage() {}
|
||||
|
||||
typedef void (GLBackend::*CommandCall)(const Batch&, size_t);
|
||||
static CommandCall _commandCalls[Batch::NUM_COMMANDS];
|
||||
friend class GLState;
|
||||
|
|
|
@ -111,7 +111,7 @@ float GLTexture::getMemoryPressure() {
|
|||
}
|
||||
#else
|
||||
// Hardcode texture limit for sparse textures at 1 GB for now
|
||||
availableTextureMemory = GPU_MEMORY_RESERVE_BYTES;
|
||||
availableTextureMemory = TEXTURE_MEMORY_MIN_BYTES;
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ public:
|
|||
static GLuint allocate(const Texture& texture);
|
||||
static const uint32_t DEFAULT_PAGE_DIMENSION = 128;
|
||||
static const uint32_t DEFAULT_MAX_SPARSE_LEVEL = 0xFFFF;
|
||||
|
||||
public:
|
||||
GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, GLuint externalId);
|
||||
GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, bool transferrable);
|
||||
|
@ -132,6 +133,9 @@ protected:
|
|||
|
||||
// Output stage
|
||||
void do_blit(const Batch& batch, size_t paramOffset) override;
|
||||
|
||||
// Texture Management Stage
|
||||
void initTextureManagementStage() override;
|
||||
};
|
||||
|
||||
} }
|
||||
|
|
|
@ -148,6 +148,24 @@ uint32_t SparseInfo::getPageCount(const uvec3& dimensions) const {
|
|||
return pageCounts.x * pageCounts.y * pageCounts.z;
|
||||
}
|
||||
|
||||
|
||||
|
||||
void GL45Backend::initTextureManagementStage() {
|
||||
|
||||
// enable the Sparse Texture on gl45
|
||||
_textureManagement._sparseCapable = true;
|
||||
_textureManagement._incrementalTransferCapable = true;
|
||||
|
||||
// But now let s refine the behavior based on vendor
|
||||
std::string vendor { (const char*)glGetString(GL_VENDOR) };
|
||||
if ((vendor.find("AMD") != std::string::npos) || (vendor.find("ATI") != std::string::npos) || (vendor.find("INTEL") != std::string::npos)) {
|
||||
qCDebug(gpugllogging) << "GPU is sparse capable but force it off, vendor = " << vendor.c_str();
|
||||
_textureManagement._sparseCapable = false;
|
||||
} else {
|
||||
qCDebug(gpugllogging) << "GPU is sparse capable, vendor = " << vendor.c_str();
|
||||
}
|
||||
}
|
||||
|
||||
using TransferState = GL45Backend::GL45Texture::TransferState;
|
||||
|
||||
TransferState::TransferState(GL45Texture& texture) : texture(texture) {
|
||||
|
@ -250,7 +268,8 @@ GL45Texture::GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture&
|
|||
GL45Texture::GL45Texture(const std::weak_ptr<GLBackend>& backend, const Texture& texture, bool transferrable)
|
||||
: GLTexture(backend, texture, allocate(texture), transferrable), _sparseInfo(*this), _transferState(*this) {
|
||||
|
||||
if (_transferrable && Texture::getEnableSparseTextures()) {
|
||||
auto theBackend = _backend.lock();
|
||||
if (_transferrable && theBackend && theBackend->isTextureManagementSparseEnabled()) {
|
||||
_sparseInfo.maybeMakeSparse();
|
||||
if (_sparseInfo.sparse) {
|
||||
Backend::incrementTextureGPUSparseCount();
|
||||
|
@ -322,7 +341,9 @@ void GL45Texture::withPreservedTexture(std::function<void()> f) const {
|
|||
}
|
||||
|
||||
void GL45Texture::generateMips() const {
|
||||
qCDebug(gpugl45logging) << "Generating mipmaps for " << _source.c_str();
|
||||
if (_transferrable) {
|
||||
qCDebug(gpugl45logging) << "Generating mipmaps for " << _source.c_str();
|
||||
}
|
||||
glGenerateTextureMipmap(_id);
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
|
@ -360,7 +381,8 @@ void GL45Texture::startTransfer() {
|
|||
}
|
||||
|
||||
bool GL45Texture::continueTransfer() {
|
||||
if (!Texture::getEnableIncrementalTextureTransfers()) {
|
||||
auto backend = _backend.lock();
|
||||
if (!backend || !backend->isTextureManagementIncrementalTransferEnabled()) {
|
||||
size_t maxFace = GL_TEXTURE_CUBE_MAP == _target ? CUBE_NUM_FACES : 1;
|
||||
for (uint8_t face = 0; face < maxFace; ++face) {
|
||||
for (uint16_t mipLevel = _minMip; mipLevel <= _maxMip; ++mipLevel) {
|
||||
|
|
|
@ -85,7 +85,8 @@ public:
|
|||
|
||||
void getStats(ContextStats& stats) const { stats = _stats; }
|
||||
|
||||
|
||||
virtual bool isTextureManagementSparseEnabled() const = 0;
|
||||
virtual bool isTextureManagementIncrementalTransferEnabled() const = 0;
|
||||
|
||||
// These should only be accessed by Backend implementation to repport the buffer and texture allocations,
|
||||
// they are NOT public calls
|
||||
|
@ -125,6 +126,7 @@ protected:
|
|||
friend class Context;
|
||||
ContextStats _stats;
|
||||
StereoState _stereo;
|
||||
|
||||
};
|
||||
|
||||
class Context {
|
||||
|
@ -270,7 +272,6 @@ protected:
|
|||
static std::atomic<Size> _textureGPUFramebufferMemoryUsage;
|
||||
static std::atomic<uint32_t> _textureGPUTransferCount;
|
||||
|
||||
|
||||
friend class Backend;
|
||||
};
|
||||
typedef std::shared_ptr<Context> ContextPointer;
|
||||
|
|
|
@ -147,6 +147,7 @@ class Texture : public Resource {
|
|||
|
||||
static std::atomic<bool> _enableSparseTextures;
|
||||
static std::atomic<bool> _enableIncrementalTextureTransfers;
|
||||
|
||||
public:
|
||||
static uint32_t getTextureCPUCount();
|
||||
static Size getTextureCPUMemoryUsage();
|
||||
|
|
|
@ -24,8 +24,11 @@ void UserActivityLoggerScriptingInterface::toggledAway(bool isAway) {
|
|||
logAction("toggled_away", { { "is_away", isAway } });
|
||||
}
|
||||
|
||||
void UserActivityLoggerScriptingInterface::tutorialProgress(QString stepName, int stepNumber, float secondsToComplete, float tutorialElapsedTime) {
|
||||
void UserActivityLoggerScriptingInterface::tutorialProgress( QString stepName, int stepNumber, float secondsToComplete,
|
||||
float tutorialElapsedTime, QString tutorialRunID, int tutorialVersion) {
|
||||
logAction("tutorial_progress", {
|
||||
{ "tutorial_run_id", tutorialRunID },
|
||||
{ "tutorial_version", tutorialVersion },
|
||||
{ "step", stepName },
|
||||
{ "step_number", stepNumber },
|
||||
{ "seconds_to_complete", secondsToComplete },
|
||||
|
|
|
@ -23,7 +23,8 @@ public:
|
|||
Q_INVOKABLE void enabledEdit();
|
||||
Q_INVOKABLE void openedMarketplace();
|
||||
Q_INVOKABLE void toggledAway(bool isAway);
|
||||
Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete, float tutorialElapsedTime);
|
||||
Q_INVOKABLE void tutorialProgress(QString stepName, int stepNumber, float secondsToComplete,
|
||||
float tutorialElapsedTime, QString tutorialRunID = "", int tutorialVersion = 0);
|
||||
|
||||
private:
|
||||
void logAction(QString action, QJsonObject details = {});
|
||||
|
|
|
@ -49,7 +49,7 @@ Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
|||
connect(&_udpSocket, &QAbstractSocket::stateChanged, this, &Socket::handleStateChanged);
|
||||
|
||||
// in order to help track down the zombie server bug, add a timer to check if we missed a readyRead
|
||||
const int READY_READ_BACKUP_CHECK_MSECS = 10 * 1000;
|
||||
const int READY_READ_BACKUP_CHECK_MSECS = 2 * 1000;
|
||||
connect(_readyReadBackupTimer, &QTimer::timeout, this, &Socket::checkForReadyReadBackup);
|
||||
_readyReadBackupTimer->start(READY_READ_BACKUP_CHECK_MSECS);
|
||||
}
|
||||
|
@ -306,6 +306,13 @@ void Socket::checkForReadyReadBackup() {
|
|||
if (_udpSocket.hasPendingDatagrams()) {
|
||||
qCDebug(networking) << "Socket::checkForReadyReadBackup() detected blocked readyRead signal. Flushing pending datagrams.";
|
||||
|
||||
// so that birarda can possibly figure out how the heck we get into this state in the first place
|
||||
// output the sequence number and socket address of the last processed packet
|
||||
qCDebug(networking) << "Socket::checkForReadyReadyBackup() last sequence number"
|
||||
<< (uint32_t) _lastReceivedSequenceNumber << "from" << _lastPacketSockAddr << "-"
|
||||
<< _lastPacketSizeRead << "bytes";
|
||||
|
||||
|
||||
// drop all of the pending datagrams on the floor
|
||||
while (_udpSocket.hasPendingDatagrams()) {
|
||||
_udpSocket.readDatagram(nullptr, 0);
|
||||
|
@ -334,6 +341,10 @@ void Socket::readPendingDatagrams() {
|
|||
auto sizeRead = _udpSocket.readDatagram(buffer.get(), packetSizeWithHeader,
|
||||
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
|
||||
|
||||
// save information for this packet, in case it is the one that sticks readyRead
|
||||
_lastPacketSizeRead = sizeRead;
|
||||
_lastPacketSockAddr = senderSockAddr;
|
||||
|
||||
if (sizeRead <= 0) {
|
||||
// we either didn't pull anything for this packet or there was an error reading (this seems to trigger
|
||||
// on windows even if there's not a packet available)
|
||||
|
@ -373,6 +384,9 @@ void Socket::readPendingDatagrams() {
|
|||
auto packet = Packet::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
packet->setReceiveTime(receiveTime);
|
||||
|
||||
// save the sequence number in case this is the packet that sticks readyRead
|
||||
_lastReceivedSequenceNumber = packet->getSequenceNumber();
|
||||
|
||||
// call our verification operator to see if this packet is verified
|
||||
if (!_packetFilterOperator || _packetFilterOperator(*packet)) {
|
||||
if (packet->isReliable()) {
|
||||
|
@ -494,12 +508,16 @@ std::vector<HifiSockAddr> Socket::getConnectionSockAddrs() {
|
|||
}
|
||||
|
||||
void Socket::handleSocketError(QAbstractSocket::SocketError socketError) {
|
||||
qCWarning(networking) << "udt::Socket error -" << socketError;
|
||||
static const QString SOCKET_REGEX = "udt::Socket error - ";
|
||||
static QString repeatedMessage
|
||||
= LogHandler::getInstance().addRepeatedMessageRegex(SOCKET_REGEX);
|
||||
|
||||
qCDebug(networking) << "udt::Socket error - " << socketError;
|
||||
}
|
||||
|
||||
void Socket::handleStateChanged(QAbstractSocket::SocketState socketState) {
|
||||
if (socketState != QAbstractSocket::BoundState) {
|
||||
qCWarning(networking) << "udt::Socket state changed - state is now" << socketState;
|
||||
qCDebug(networking) << "udt::Socket state changed - state is now" << socketState;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -144,6 +144,10 @@ private:
|
|||
std::unique_ptr<CongestionControlVirtualFactory> _ccFactory { new CongestionControlFactory<TCPVegasCC>() };
|
||||
|
||||
bool _shouldChangeSocketOptions { true };
|
||||
|
||||
int _lastPacketSizeRead { 0 };
|
||||
SequenceNumber _lastReceivedSequenceNumber;
|
||||
HifiSockAddr _lastPacketSockAddr;
|
||||
|
||||
friend UDTTest;
|
||||
};
|
||||
|
|
|
@ -451,6 +451,9 @@ bool OctreePacketData::appendValue(const QVector<bool>& value) {
|
|||
bit = 0;
|
||||
}
|
||||
}
|
||||
if (bit != 0) {
|
||||
destinationBuffer++;
|
||||
}
|
||||
int boolsSize = destinationBuffer - start;
|
||||
success = append(start, boolsSize);
|
||||
if (success) {
|
||||
|
@ -683,6 +686,10 @@ int OctreePacketData::unpackDataFromBytes(const unsigned char *dataBytes, QVecto
|
|||
uint16_t length;
|
||||
memcpy(&length, dataBytes, sizeof(uint16_t));
|
||||
dataBytes += sizeof(length);
|
||||
if (length * sizeof(glm::vec3) > MAX_OCTREE_UNCOMRESSED_PACKET_SIZE) {
|
||||
result.resize(0);
|
||||
return sizeof(uint16_t);
|
||||
}
|
||||
result.resize(length);
|
||||
memcpy(result.data(), dataBytes, length * sizeof(glm::vec3));
|
||||
return sizeof(uint16_t) + length * sizeof(glm::vec3);
|
||||
|
@ -692,6 +699,10 @@ int OctreePacketData::unpackDataFromBytes(const unsigned char *dataBytes, QVecto
|
|||
uint16_t length;
|
||||
memcpy(&length, dataBytes, sizeof(uint16_t));
|
||||
dataBytes += sizeof(length);
|
||||
if (length * sizeof(glm::quat) > MAX_OCTREE_UNCOMRESSED_PACKET_SIZE) {
|
||||
result.resize(0);
|
||||
return sizeof(uint16_t);
|
||||
}
|
||||
result.resize(length);
|
||||
|
||||
const unsigned char *start = dataBytes;
|
||||
|
@ -706,6 +717,10 @@ int OctreePacketData::unpackDataFromBytes(const unsigned char* dataBytes, QVecto
|
|||
uint16_t length;
|
||||
memcpy(&length, dataBytes, sizeof(uint16_t));
|
||||
dataBytes += sizeof(length);
|
||||
if (length * sizeof(float) > MAX_OCTREE_UNCOMRESSED_PACKET_SIZE) {
|
||||
result.resize(0);
|
||||
return sizeof(uint16_t);
|
||||
}
|
||||
result.resize(length);
|
||||
memcpy(result.data(), dataBytes, length * sizeof(float));
|
||||
return sizeof(uint16_t) + length * sizeof(float);
|
||||
|
@ -715,6 +730,10 @@ int OctreePacketData::unpackDataFromBytes(const unsigned char* dataBytes, QVecto
|
|||
uint16_t length;
|
||||
memcpy(&length, dataBytes, sizeof(uint16_t));
|
||||
dataBytes += sizeof(length);
|
||||
if (length / 8 > MAX_OCTREE_UNCOMRESSED_PACKET_SIZE) {
|
||||
result.resize(0);
|
||||
return sizeof(uint16_t);
|
||||
}
|
||||
result.resize(length);
|
||||
|
||||
int bit = 0;
|
||||
|
|
|
@ -216,7 +216,7 @@ bool OctreeRenderer::renderOperation(OctreeElementPointer element, void* extraDa
|
|||
|
||||
void OctreeRenderer::render(RenderArgs* renderArgs) {
|
||||
if (_tree) {
|
||||
renderArgs->_renderer = this;
|
||||
renderArgs->_renderer = sharedFromThis();
|
||||
_tree->withReadLock([&] {
|
||||
_tree->recurseTreeWithOperation(renderOperation, renderArgs);
|
||||
});
|
||||
|
|
|
@ -29,7 +29,7 @@ class OctreeRenderer;
|
|||
|
||||
|
||||
// Generic client side Octree renderer class.
|
||||
class OctreeRenderer : public QObject {
|
||||
class OctreeRenderer : public QObject, public QEnableSharedFromThis<OctreeRenderer> {
|
||||
Q_OBJECT
|
||||
public:
|
||||
OctreeRenderer();
|
||||
|
|
|
@ -256,8 +256,18 @@ const btCollisionShape* ShapeFactory::createShapeFromInfo(const ShapeInfo& info)
|
|||
}
|
||||
break;
|
||||
case SHAPE_TYPE_SPHERE: {
|
||||
float radius = info.getHalfExtents().x;
|
||||
shape = new btSphereShape(radius);
|
||||
glm::vec3 halfExtents = info.getHalfExtents();
|
||||
float radius = halfExtents.x;
|
||||
if (radius == halfExtents.y && radius == halfExtents.z) {
|
||||
shape = new btSphereShape(radius);
|
||||
} else {
|
||||
ShapeInfo::PointList points;
|
||||
points.reserve(NUM_UNIT_SPHERE_DIRECTIONS);
|
||||
for (uint32_t i = 0; i < NUM_UNIT_SPHERE_DIRECTIONS; ++i) {
|
||||
points.push_back(bulletToGLM(_unitSphereDirections[i]) * halfExtents);
|
||||
}
|
||||
shape = createConvexHull(points);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case SHAPE_TYPE_CAPSULE_Y: {
|
||||
|
|
|
@ -139,6 +139,7 @@ public:
|
|||
virtual bool isStereo() const { return isHmd(); }
|
||||
virtual bool isThrottled() const { return false; }
|
||||
virtual float getTargetFrameRate() const { return 0.0f; }
|
||||
virtual bool hasAsyncReprojection() const { return false; }
|
||||
|
||||
/// Returns a boolean value indicating whether the display is currently visible
|
||||
/// to the user. For monitor displays, false might indicate that a screensaver,
|
||||
|
|
|
@ -21,6 +21,9 @@ public:
|
|||
virtual void pluginFocusOutEvent() = 0;
|
||||
virtual void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) = 0;
|
||||
|
||||
// Some input plugins are comprised of multiple subdevices (SDL2, for instance).
|
||||
// If an input plugin is only a single device, it will only return it's primary name.
|
||||
virtual QStringList getSubdeviceNames() { return { getName() }; };
|
||||
virtual bool isHandController() const = 0;
|
||||
};
|
||||
|
||||
|
|
|
@ -86,6 +86,7 @@ const LoaderList& getLoadedPlugins() {
|
|||
QString pluginPath = QCoreApplication::applicationDirPath() + "/plugins/";
|
||||
#endif
|
||||
QDir pluginDir(pluginPath);
|
||||
pluginDir.setSorting(QDir::Name);
|
||||
pluginDir.setFilter(QDir::Files);
|
||||
if (pluginDir.exists()) {
|
||||
qInfo() << "Loading runtime plugins from " << pluginPath;
|
||||
|
|
|
@ -32,3 +32,26 @@ bool PluginUtils::isHandControllerAvailable() {
|
|||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
bool isSubdeviceContainingNameAvailable(QString name) {
|
||||
for (auto& inputPlugin : PluginManager::getInstance()->getInputPlugins()) {
|
||||
if (inputPlugin->isActive()) {
|
||||
auto subdeviceNames = inputPlugin->getSubdeviceNames();
|
||||
for (auto& subdeviceName : subdeviceNames) {
|
||||
if (subdeviceName.contains(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
bool PluginUtils::isViveControllerAvailable() {
|
||||
return isSubdeviceContainingNameAvailable("OpenVR");
|
||||
};
|
||||
|
||||
bool PluginUtils::isXboxControllerAvailable() {
|
||||
return isSubdeviceContainingNameAvailable("X360 Controller");
|
||||
};
|
||||
|
||||
|
|
|
@ -16,4 +16,6 @@ class PluginUtils {
|
|||
public:
|
||||
static bool isHMDAvailable(const QString& pluginName = "");
|
||||
static bool isHandControllerAvailable();
|
||||
static bool isViveControllerAvailable();
|
||||
static bool isXboxControllerAvailable();
|
||||
};
|
||||
|
|
|
@ -40,7 +40,9 @@ public:
|
|||
|
||||
virtual glm::vec3 getAvatarPosition() const = 0;
|
||||
|
||||
virtual bool isAboutToQuit() const = 0;
|
||||
virtual void postLambdaEvent(std::function<void()> f) = 0;
|
||||
|
||||
virtual qreal getDevicePixelRatio() = 0;
|
||||
|
||||
virtual render::ScenePointer getMain3DScene() = 0;
|
||||
|
|
|
@ -44,33 +44,42 @@ void BatchLoader::start() {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
for (const auto& rawURL : _urls) {
|
||||
QUrl url = expandScriptUrl(normalizeScriptURL(rawURL));
|
||||
|
||||
qCDebug(scriptengine) << "Loading script at " << url;
|
||||
|
||||
QPointer<BatchLoader> self = this;
|
||||
DependencyManager::get<ScriptCache>()->getScriptContents(url.toString(), [this, self](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
if (!self) {
|
||||
return;
|
||||
}
|
||||
auto scriptCache = DependencyManager::get<ScriptCache>();
|
||||
|
||||
// Because the ScriptCache may call this callback from differents threads,
|
||||
// we need to make sure this is thread-safe.
|
||||
std::lock_guard<std::mutex> lock(_dataLock);
|
||||
// Use a proxy callback to handle the call and emit the signal in a thread-safe way.
|
||||
// If BatchLoader is deleted before the callback is called, the subsequent "emit" call will not do
|
||||
// anything.
|
||||
ScriptCacheSignalProxy* proxy = new ScriptCacheSignalProxy();
|
||||
connect(scriptCache.data(), &ScriptCache::destroyed, proxy, &ScriptCacheSignalProxy::deleteLater);
|
||||
|
||||
connect(proxy, &ScriptCacheSignalProxy::contentAvailable, this, [this](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
if (isURL && success) {
|
||||
_data.insert(url, contents);
|
||||
qCDebug(scriptengine) << "Loaded: " << url;
|
||||
} else {
|
||||
_data.insert(url, QString());
|
||||
qCDebug(scriptengine) << "Could not load" << url;
|
||||
qCDebug(scriptengine) << "Could not load: " << url;
|
||||
}
|
||||
|
||||
if (!_finished && _urls.size() == _data.size()) {
|
||||
_finished = true;
|
||||
emit finished(_data);
|
||||
}
|
||||
});
|
||||
|
||||
scriptCache->getScriptContents(url.toString(), [proxy](const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
proxy->receivedContent(url, contents, isURL, success);
|
||||
proxy->deleteLater();
|
||||
}, false);
|
||||
}
|
||||
}
|
||||
|
||||
void ScriptCacheSignalProxy::receivedContent(const QString& url, const QString& contents, bool isURL, bool success) {
|
||||
emit contentAvailable(url, contents, isURL, success);
|
||||
}
|
||||
|
|
|
@ -21,10 +21,19 @@
|
|||
|
||||
#include <mutex>
|
||||
|
||||
class ScriptCacheSignalProxy : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
void receivedContent(const QString& url, const QString& contents, bool isURL, bool success);
|
||||
|
||||
signals:
|
||||
void contentAvailable(const QString& url, const QString& contents, bool isURL, bool success);
|
||||
};
|
||||
|
||||
class BatchLoader : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
BatchLoader(const QList<QUrl>& urls) ;
|
||||
BatchLoader(const QList<QUrl>& urls);
|
||||
|
||||
void start();
|
||||
bool isFinished() const { return _finished; };
|
||||
|
@ -39,7 +48,6 @@ private:
|
|||
bool _finished;
|
||||
QSet<QUrl> _urls;
|
||||
QMap<QUrl, QString> _data;
|
||||
std::mutex _dataLock;
|
||||
};
|
||||
|
||||
#endif // hifi_BatchLoader_h
|
||||
|
|
|
@ -37,29 +37,30 @@ glm::quat Quat::lookAt(const glm::vec3& eye, const glm::vec3& center, const glm:
|
|||
glm::quat Quat::lookAtSimple(const glm::vec3& eye, const glm::vec3& center) {
|
||||
auto dir = glm::normalize(center - eye);
|
||||
// if the direction is nearly aligned with the Y axis, then use the X axis for 'up'
|
||||
if (dir.x < 0.001f && dir.z < 0.001f) {
|
||||
const float MAX_ABS_Y_COMPONENT = 0.9999991f;
|
||||
if (fabsf(dir.y) > MAX_ABS_Y_COMPONENT) {
|
||||
return lookAt(eye, center, Vectors::UNIT_X);
|
||||
}
|
||||
return lookAt(eye, center, Vectors::UNIT_Y);
|
||||
}
|
||||
|
||||
glm::quat Quat::multiply(const glm::quat& q1, const glm::quat& q2) {
|
||||
return q1 * q2;
|
||||
glm::quat Quat::multiply(const glm::quat& q1, const glm::quat& q2) {
|
||||
return q1 * q2;
|
||||
}
|
||||
|
||||
glm::quat Quat::fromVec3Degrees(const glm::vec3& eulerAngles) {
|
||||
return glm::quat(glm::radians(eulerAngles));
|
||||
glm::quat Quat::fromVec3Degrees(const glm::vec3& eulerAngles) {
|
||||
return glm::quat(glm::radians(eulerAngles));
|
||||
}
|
||||
|
||||
glm::quat Quat::fromVec3Radians(const glm::vec3& eulerAngles) {
|
||||
return glm::quat(eulerAngles);
|
||||
glm::quat Quat::fromVec3Radians(const glm::vec3& eulerAngles) {
|
||||
return glm::quat(eulerAngles);
|
||||
}
|
||||
|
||||
glm::quat Quat::fromPitchYawRollDegrees(float pitch, float yaw, float roll) {
|
||||
glm::quat Quat::fromPitchYawRollDegrees(float pitch, float yaw, float roll) {
|
||||
return glm::quat(glm::radians(glm::vec3(pitch, yaw, roll)));
|
||||
}
|
||||
|
||||
glm::quat Quat::fromPitchYawRollRadians(float pitch, float yaw, float roll) {
|
||||
glm::quat Quat::fromPitchYawRollRadians(float pitch, float yaw, float roll) {
|
||||
return glm::quat(glm::vec3(pitch, yaw, roll));
|
||||
}
|
||||
|
||||
|
|
|
@ -222,6 +222,9 @@ void ScriptCache::scriptContentAvailable() {
|
|||
});
|
||||
} else {
|
||||
// Dubious, but retained here because it matches the behavior before fixing the threading
|
||||
|
||||
allCallbacks = scriptRequest.scriptUsers;
|
||||
|
||||
scriptContent = _scriptCache[url];
|
||||
finished = true;
|
||||
qCWarning(scriptengine) << "Error loading script from URL " << url;
|
||||
|
|
|
@ -146,6 +146,8 @@ public:
|
|||
|
||||
Q_INVOKABLE void requestGarbageCollection() { collectGarbage(); }
|
||||
|
||||
Q_INVOKABLE QUuid generateUUID() { return QUuid::createUuid(); }
|
||||
|
||||
bool isFinished() const { return _isFinished; } // used by Application and ScriptWidget
|
||||
bool isRunning() const { return _isRunning; } // used by ScriptWidget
|
||||
|
||||
|
|
|
@ -10,9 +10,15 @@
|
|||
|
||||
#include <QtCore/QtGlobal>
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
#include <atlbase.h>
|
||||
#include <Wbemidl.h>
|
||||
#include <string>
|
||||
|
||||
//#include <atlbase.h>
|
||||
//#include <Wbemidl.h>
|
||||
|
||||
#include <dxgi1_3.h>
|
||||
#pragma comment(lib, "dxgi.lib")
|
||||
|
||||
#elif defined(Q_OS_MAC)
|
||||
#include <OpenGL/OpenGL.h>
|
||||
|
@ -53,9 +59,101 @@ GPUIdent* GPUIdent::ensureQuery(const QString& vendor, const QString& renderer)
|
|||
CGLDestroyRendererInfo(rendererInfo);
|
||||
|
||||
#elif defined(Q_OS_WIN)
|
||||
|
||||
struct ConvertLargeIntegerToQString {
|
||||
QString convert(const LARGE_INTEGER& version) {
|
||||
QString value;
|
||||
value.append(QString::number(uint32_t(((version.HighPart & 0xFFFF0000) >> 16) & 0x0000FFFF)));
|
||||
value.append(".");
|
||||
value.append(QString::number(uint32_t((version.HighPart) & 0x0000FFFF)));
|
||||
value.append(".");
|
||||
value.append(QString::number(uint32_t(((version.LowPart & 0xFFFF0000) >> 16) & 0x0000FFFF)));
|
||||
value.append(".");
|
||||
value.append(QString::number(uint32_t((version.LowPart) & 0x0000FFFF)));
|
||||
return value;
|
||||
}
|
||||
} convertDriverVersionToString;
|
||||
|
||||
// Create the DXGI factory
|
||||
// Let s get into DXGI land:
|
||||
HRESULT hr = S_OK;
|
||||
|
||||
IDXGIFactory1* pFactory = nullptr;
|
||||
hr = CreateDXGIFactory1(__uuidof(IDXGIFactory1), (void**)(&pFactory) );
|
||||
if (hr != S_OK || pFactory == nullptr) {
|
||||
qCDebug(shared) << "Unable to create DXGI";
|
||||
return this;
|
||||
}
|
||||
|
||||
std::vector<int> validAdapterList;
|
||||
using AdapterEntry = std::pair<std::pair<DXGI_ADAPTER_DESC1, LARGE_INTEGER>, std::vector<DXGI_OUTPUT_DESC>>;
|
||||
std::vector<AdapterEntry> adapterToOutputs;
|
||||
// Enumerate adapters and outputs
|
||||
{
|
||||
UINT adapterNum = 0;
|
||||
IDXGIAdapter1* pAdapter = nullptr;
|
||||
while (pFactory->EnumAdapters1(adapterNum, &pAdapter) != DXGI_ERROR_NOT_FOUND) {
|
||||
|
||||
// Found an adapter, get descriptor
|
||||
DXGI_ADAPTER_DESC1 adapterDesc;
|
||||
pAdapter->GetDesc1(&adapterDesc);
|
||||
|
||||
LARGE_INTEGER version;
|
||||
hr = pAdapter->CheckInterfaceSupport(__uuidof(IDXGIDevice), &version);
|
||||
|
||||
std::wstring wDescription (adapterDesc.Description);
|
||||
std::string description(wDescription.begin(), wDescription.end());
|
||||
qCDebug(shared) << "Found adapter: " << description.c_str()
|
||||
<< " Driver version: " << convertDriverVersionToString.convert(version);
|
||||
|
||||
AdapterEntry adapterEntry;
|
||||
adapterEntry.first.first = adapterDesc;
|
||||
adapterEntry.first.second = version;
|
||||
|
||||
|
||||
|
||||
UINT outputNum = 0;
|
||||
IDXGIOutput * pOutput;
|
||||
bool hasOutputConnectedToDesktop = false;
|
||||
while (pAdapter->EnumOutputs(outputNum, &pOutput) != DXGI_ERROR_NOT_FOUND) {
|
||||
|
||||
// FOund an output attached to the adapter, get descriptor
|
||||
DXGI_OUTPUT_DESC outputDesc;
|
||||
pOutput->GetDesc(&outputDesc);
|
||||
|
||||
adapterEntry.second.push_back(outputDesc);
|
||||
|
||||
std::wstring wDeviceName(outputDesc.DeviceName);
|
||||
std::string deviceName(wDeviceName.begin(), wDeviceName.end());
|
||||
qCDebug(shared) << " Found output: " << deviceName.c_str() << " desktop: " << (outputDesc.AttachedToDesktop ? "true" : "false")
|
||||
<< " Rect [ l=" << outputDesc.DesktopCoordinates.left << " r=" << outputDesc.DesktopCoordinates.right
|
||||
<< " b=" << outputDesc.DesktopCoordinates.bottom << " t=" << outputDesc.DesktopCoordinates.top << " ]";
|
||||
|
||||
hasOutputConnectedToDesktop |= (bool) outputDesc.AttachedToDesktop;
|
||||
|
||||
pOutput->Release();
|
||||
outputNum++;
|
||||
}
|
||||
|
||||
adapterToOutputs.push_back(adapterEntry);
|
||||
|
||||
// add this adapter to the valid list if has output
|
||||
if (hasOutputConnectedToDesktop && !adapterEntry.second.empty()) {
|
||||
validAdapterList.push_back(adapterNum);
|
||||
}
|
||||
|
||||
pAdapter->Release();
|
||||
adapterNum++;
|
||||
}
|
||||
}
|
||||
pFactory->Release();
|
||||
|
||||
|
||||
// THis was the previous technique used to detect the platform we are running on on windows.
|
||||
/*
|
||||
// COM must be initialized already using CoInitialize. E.g., by the audio subsystem.
|
||||
CComPtr<IWbemLocator> spLoc = NULL;
|
||||
HRESULT hr = CoCreateInstance(CLSID_WbemLocator, 0, CLSCTX_SERVER, IID_IWbemLocator, (LPVOID *)&spLoc);
|
||||
hr = CoCreateInstance(CLSID_WbemLocator, 0, CLSCTX_SERVER, IID_IWbemLocator, (LPVOID *)&spLoc);
|
||||
if (hr != S_OK || spLoc == NULL) {
|
||||
qCDebug(shared) << "Unable to connect to WMI";
|
||||
return this;
|
||||
|
@ -139,7 +237,7 @@ GPUIdent* GPUIdent::ensureQuery(const QString& vendor, const QString& renderer)
|
|||
var.ChangeType(CIM_UINT64); // We're going to receive some integral type, but it might not be uint.
|
||||
// We might be hosed here. The parameter is documented to be UINT32, but that's only 4 GB!
|
||||
const ULONGLONG BYTES_PER_MEGABYTE = 1024 * 1024;
|
||||
_dedicatedMemoryMB = (uint) (var.ullVal / BYTES_PER_MEGABYTE);
|
||||
_dedicatedMemoryMB = (uint64_t) (var.ullVal / BYTES_PER_MEGABYTE);
|
||||
}
|
||||
else {
|
||||
qCDebug(shared) << "Unable to get video AdapterRAM";
|
||||
|
@ -149,6 +247,22 @@ GPUIdent* GPUIdent::ensureQuery(const QString& vendor, const QString& renderer)
|
|||
}
|
||||
hr = spEnumInst->Next(WBEM_INFINITE, 1, &spInstance.p, &uNumOfInstances);
|
||||
}
|
||||
*/
|
||||
|
||||
if (!validAdapterList.empty()) {
|
||||
auto& adapterEntry = adapterToOutputs[validAdapterList.front()];
|
||||
|
||||
std::wstring wDescription(adapterEntry.first.first.Description);
|
||||
std::string description(wDescription.begin(), wDescription.end());
|
||||
_name = QString(description.c_str());
|
||||
|
||||
_driver = convertDriverVersionToString.convert(adapterEntry.first.second);
|
||||
|
||||
const ULONGLONG BYTES_PER_MEGABYTE = 1024 * 1024;
|
||||
_dedicatedMemoryMB = (uint64_t)(adapterEntry.first.first.DedicatedVideoMemory / BYTES_PER_MEGABYTE);
|
||||
_isValid = true;
|
||||
}
|
||||
|
||||
#endif
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -14,17 +14,19 @@
|
|||
#ifndef hifi_GPUIdent_h
|
||||
#define hifi_GPUIdent_h
|
||||
|
||||
#include <cstdint>
|
||||
|
||||
class GPUIdent
|
||||
{
|
||||
public:
|
||||
unsigned int getMemory() { return _dedicatedMemoryMB; }
|
||||
uint64_t getMemory() { return _dedicatedMemoryMB; }
|
||||
QString getName() { return _name; }
|
||||
QString getDriver() { return _driver; }
|
||||
bool isValid() { return _isValid; }
|
||||
// E.g., GPUIdent::getInstance()->getMemory();
|
||||
static GPUIdent* getInstance(const QString& vendor = "", const QString& renderer = "") { return _instance.ensureQuery(vendor, renderer); }
|
||||
private:
|
||||
uint _dedicatedMemoryMB { 0 };
|
||||
uint64_t _dedicatedMemoryMB { 0 };
|
||||
QString _name { "" };
|
||||
QString _driver { "" };
|
||||
bool _isQueried { false };
|
||||
|
|
|
@ -263,6 +263,14 @@ void quatFromScriptValue(const QScriptValue& object, glm::quat &quat) {
|
|||
quat.y = object.property("y").toVariant().toFloat();
|
||||
quat.z = object.property("z").toVariant().toFloat();
|
||||
quat.w = object.property("w").toVariant().toFloat();
|
||||
|
||||
// enforce normalized quaternion
|
||||
float length = glm::length(quat);
|
||||
if (length > FLT_EPSILON) {
|
||||
quat /= length;
|
||||
} else {
|
||||
quat = glm::quat();
|
||||
}
|
||||
}
|
||||
|
||||
glm::quat quatFromVariant(const QVariant &object, bool& isValid) {
|
||||
|
@ -273,6 +281,14 @@ glm::quat quatFromVariant(const QVariant &object, bool& isValid) {
|
|||
q.y = qvec3.y();
|
||||
q.z = qvec3.z();
|
||||
q.w = qvec3.scalar();
|
||||
|
||||
// enforce normalized quaternion
|
||||
float length = glm::length(q);
|
||||
if (length > FLT_EPSILON) {
|
||||
q /= length;
|
||||
} else {
|
||||
q = glm::quat();
|
||||
}
|
||||
isValid = true;
|
||||
} else {
|
||||
auto map = object.toMap();
|
||||
|
|
|
@ -79,7 +79,7 @@ public:
|
|||
};
|
||||
|
||||
RenderArgs(std::shared_ptr<gpu::Context> context = nullptr,
|
||||
OctreeRenderer* renderer = nullptr,
|
||||
QSharedPointer<OctreeRenderer> renderer = QSharedPointer<OctreeRenderer>(nullptr),
|
||||
float sizeScale = 1.0f,
|
||||
int boundaryLevelAdjust = 0,
|
||||
RenderMode renderMode = DEFAULT_RENDER_MODE,
|
||||
|
@ -110,7 +110,7 @@ public:
|
|||
std::shared_ptr<gpu::Context> _context = nullptr;
|
||||
std::shared_ptr<gpu::Framebuffer> _blitFramebuffer = nullptr;
|
||||
std::shared_ptr<render::ShapePipeline> _pipeline = nullptr;
|
||||
OctreeRenderer* _renderer = nullptr;
|
||||
QSharedPointer<OctreeRenderer> _renderer;
|
||||
std::stack<ViewFrustum> _viewFrustums;
|
||||
glm::ivec4 _viewport{ 0.0f, 0.0f, 1.0f, 1.0f };
|
||||
glm::vec3 _boomOffset{ 0.0f, 0.0f, 1.0f };
|
||||
|
|
|
@ -33,13 +33,8 @@ void ShapeInfo::setParams(ShapeType type, const glm::vec3& halfExtents, QString
|
|||
_halfExtents = glm::vec3(0.0f);
|
||||
break;
|
||||
case SHAPE_TYPE_BOX:
|
||||
case SHAPE_TYPE_SPHERE:
|
||||
break;
|
||||
case SHAPE_TYPE_SPHERE: {
|
||||
// sphere radius is max of halfExtents
|
||||
float radius = glm::max(glm::max(halfExtents.x, halfExtents.y), halfExtents.z);
|
||||
_halfExtents = glm::vec3(radius);
|
||||
break;
|
||||
}
|
||||
case SHAPE_TYPE_COMPOUND:
|
||||
case SHAPE_TYPE_STATIC_MESH:
|
||||
_url = QUrl(url);
|
||||
|
@ -119,8 +114,7 @@ float ShapeInfo::computeVolume() const {
|
|||
break;
|
||||
}
|
||||
case SHAPE_TYPE_SPHERE: {
|
||||
float radius = _halfExtents.x;
|
||||
volume = 4.0f * PI * radius * radius * radius / 3.0f;
|
||||
volume = 4.0f * PI * _halfExtents.x * _halfExtents.y * _halfExtents.z / 3.0f;
|
||||
break;
|
||||
}
|
||||
case SHAPE_TYPE_CYLINDER_Y: {
|
||||
|
|
|
@ -245,6 +245,32 @@ void SteamClient::shutdown() {
|
|||
steamCallbackManager.getTicketRequests().stopAll();
|
||||
}
|
||||
|
||||
int SteamClient::getSteamVRBuildID() {
|
||||
if (initialized) {
|
||||
static const int MAX_PATH_SIZE = 512;
|
||||
static const int STEAMVR_APPID = 250820;
|
||||
char rawPath[MAX_PATH_SIZE];
|
||||
SteamApps()->GetAppInstallDir(STEAMVR_APPID, rawPath, MAX_PATH_SIZE);
|
||||
|
||||
QString path(rawPath);
|
||||
path += "\\bin\\version.txt";
|
||||
qDebug() << "SteamVR version file path:" << path;
|
||||
|
||||
QFile file(path);
|
||||
if (file.open(QIODevice::ReadOnly)) {
|
||||
QString buildIDString = file.readLine();
|
||||
|
||||
bool ok = false;
|
||||
int buildID = buildIDString.toInt(&ok);
|
||||
if (ok) {
|
||||
return buildID;
|
||||
}
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
void SteamClient::runCallbacks() {
|
||||
if (!initialized) {
|
||||
return;
|
||||
|
|
|
@ -37,6 +37,7 @@ public:
|
|||
static void openInviteOverlay();
|
||||
static void joinLobby(QString lobbyId);
|
||||
|
||||
static int getSteamVRBuildID();
|
||||
};
|
||||
|
||||
class SteamScriptingInterface : public QObject {
|
||||
|
|
|
@ -37,7 +37,7 @@ QString fetchVersion(const QUrl& url) {
|
|||
return r.trimmed();
|
||||
}
|
||||
|
||||
void InfoView::show(const QString& path, bool firstOrChangedOnly) {
|
||||
void InfoView::show(const QString& path, bool firstOrChangedOnly, QString urlQuery) {
|
||||
static bool registered{ false };
|
||||
if (!registered) {
|
||||
registerType();
|
||||
|
@ -49,6 +49,8 @@ void InfoView::show(const QString& path, bool firstOrChangedOnly) {
|
|||
} else {
|
||||
url = QUrl::fromLocalFile(path);
|
||||
}
|
||||
url.setQuery(urlQuery);
|
||||
|
||||
if (firstOrChangedOnly) {
|
||||
const QString lastVersion = infoVersion.get();
|
||||
const QString version = fetchVersion(url);
|
||||
|
|
|
@ -22,7 +22,7 @@ class InfoView : public QQuickItem {
|
|||
static const QString NAME;
|
||||
public:
|
||||
static void registerType();
|
||||
static void show(const QString& path, bool firstOrChangedOnly = false);
|
||||
static void show(const QString& path, bool firstOrChangedOnly = false, QString urlQuery = "");
|
||||
|
||||
InfoView(QQuickItem* parent = nullptr);
|
||||
QUrl url();
|
||||
|
|
|
@ -371,6 +371,13 @@ void OffscreenUi::setPinned(bool pinned) {
|
|||
}
|
||||
}
|
||||
|
||||
void OffscreenUi::setConstrainToolbarToCenterX(bool constrained) {
|
||||
bool invokeResult = QMetaObject::invokeMethod(_desktop, "setConstrainToolbarToCenterX", Q_ARG(QVariant, constrained));
|
||||
if (!invokeResult) {
|
||||
qWarning() << "Failed to set toolbar constraint";
|
||||
}
|
||||
}
|
||||
|
||||
void OffscreenUi::addMenuInitializer(std::function<void(VrMenu*)> f) {
|
||||
if (!_vrMenu) {
|
||||
_queuedMenuInitializers.push_back(f);
|
||||
|
|
|
@ -52,6 +52,7 @@ public:
|
|||
void setPinned(bool pinned = true);
|
||||
|
||||
void togglePinned();
|
||||
void setConstrainToolbarToCenterX(bool constrained);
|
||||
|
||||
bool eventFilter(QObject* originalDestination, QEvent* event) override;
|
||||
void addMenuInitializer(std::function<void(VrMenu*)> f);
|
||||
|
|
|
@ -31,6 +31,8 @@ public:
|
|||
|
||||
const QString& getName() const { return _name; }
|
||||
|
||||
SDL_GameController* getGameController() { return _sdlGameController; }
|
||||
|
||||
// Device functions
|
||||
virtual controller::Input::NamedVector getAvailableInputs() const override;
|
||||
virtual QString getDefaultMappingConfig() const override;
|
||||
|
|
|
@ -65,8 +65,10 @@ void SDL2Manager::init() {
|
|||
_openJoysticks[id] = joystick;
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
userInputMapper->registerDevice(joystick);
|
||||
auto name = SDL_GameControllerName(controller);
|
||||
_subdeviceNames << name;
|
||||
emit joystickAdded(joystick.get());
|
||||
emit subdeviceConnected(getName(), SDL_GameControllerName(controller));
|
||||
emit subdeviceConnected(getName(), name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -78,6 +80,10 @@ void SDL2Manager::init() {
|
|||
}
|
||||
}
|
||||
|
||||
QStringList SDL2Manager::getSubdeviceNames() {
|
||||
return _subdeviceNames;
|
||||
}
|
||||
|
||||
void SDL2Manager::deinit() {
|
||||
_openJoysticks.clear();
|
||||
|
||||
|
@ -157,15 +163,19 @@ void SDL2Manager::pluginUpdate(float deltaTime, const controller::InputCalibrati
|
|||
Joystick::Pointer joystick = std::make_shared<Joystick>(id, controller);
|
||||
_openJoysticks[id] = joystick;
|
||||
userInputMapper->registerDevice(joystick);
|
||||
QString name = SDL_GameControllerName(controller);
|
||||
emit joystickAdded(joystick.get());
|
||||
emit subdeviceConnected(getName(), SDL_GameControllerName(controller));
|
||||
emit subdeviceConnected(getName(), name);
|
||||
_subdeviceNames << name;
|
||||
}
|
||||
} else if (event.type == SDL_CONTROLLERDEVICEREMOVED) {
|
||||
if (_openJoysticks.contains(event.cdevice.which)) {
|
||||
Joystick::Pointer joystick = _openJoysticks[event.cdevice.which];
|
||||
_openJoysticks.remove(event.cdevice.which);
|
||||
userInputMapper->removeDevice(joystick->getDeviceID());
|
||||
QString name = SDL_GameControllerName(joystick->getGameController());
|
||||
emit joystickRemoved(joystick.get());
|
||||
_subdeviceNames.removeOne(name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,6 +26,7 @@ public:
|
|||
bool isSupported() const override;
|
||||
const QString& getName() const override { return NAME; }
|
||||
|
||||
QStringList getSubdeviceNames() override;
|
||||
bool isHandController() const override { return false; }
|
||||
|
||||
void init() override;
|
||||
|
@ -79,6 +80,7 @@ private:
|
|||
QMap<SDL_JoystickID, Joystick::Pointer> _openJoysticks;
|
||||
bool _isInitialized { false } ;
|
||||
static const QString NAME;
|
||||
QStringList _subdeviceNames;
|
||||
};
|
||||
|
||||
#endif // hifi__SDL2Manager_h
|
||||
|
|
|
@ -19,6 +19,9 @@ public:
|
|||
~OculusBaseDisplayPlugin();
|
||||
bool isSupported() const override;
|
||||
|
||||
bool hasAsyncReprojection() const override { return true; }
|
||||
|
||||
|
||||
// Stereo specific methods
|
||||
void resetSensors() override final;
|
||||
bool beginFrameRender(uint32_t frameIndex) override;
|
||||
|
|
|
@ -117,6 +117,17 @@ void OculusControllerManager::stopHapticPulse(bool leftHand) {
|
|||
}
|
||||
}
|
||||
|
||||
QStringList OculusControllerManager::getSubdeviceNames() {
|
||||
QStringList devices;
|
||||
if (_touch) {
|
||||
devices << _touch->getName();
|
||||
}
|
||||
if (_remote) {
|
||||
devices << _remote->getName();
|
||||
}
|
||||
return devices;
|
||||
}
|
||||
|
||||
using namespace controller;
|
||||
|
||||
static const std::vector<std::pair<ovrButton, StandardButtonChannel>> BUTTON_MAP { {
|
||||
|
|
|
@ -27,6 +27,7 @@ public:
|
|||
const QString& getName() const override { return NAME; }
|
||||
|
||||
bool isHandController() const override { return _touch != nullptr; }
|
||||
QStringList getSubdeviceNames() override;
|
||||
|
||||
bool activate() override;
|
||||
void deactivate() override;
|
||||
|
|
|
@ -35,6 +35,7 @@ Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
|||
|
||||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit"; // this probably shouldn't be hardcoded here
|
||||
|
||||
PoseData _nextRenderPoseData;
|
||||
PoseData _nextSimPoseData;
|
||||
|
@ -42,15 +43,12 @@ PoseData _nextSimPoseData;
|
|||
#define MIN_CORES_FOR_NORMAL_RENDER 5
|
||||
bool forceInterleavedReprojection = (QThread::idealThreadCount() < MIN_CORES_FOR_NORMAL_RENDER);
|
||||
|
||||
|
||||
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
|
||||
bool _openVrDisplayActive { false };
|
||||
// Flip y-axis since GL UV coords are backwards.
|
||||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_LEFT{ 0, 0, 0.5f, 1 };
|
||||
static vr::VRTextureBounds_t OPENVR_TEXTURE_BOUNDS_RIGHT{ 0.5f, 0, 1, 1 };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
|
||||
#define REPROJECTION_BINDING 1
|
||||
|
||||
static const char* HMD_REPROJECTION_VERT = R"SHADER(
|
||||
|
@ -351,12 +349,17 @@ public:
|
|||
OpenVrDisplayPlugin& _plugin;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
||||
bool OpenVrDisplayPlugin::isSupported() const {
|
||||
return openVrSupported();
|
||||
}
|
||||
|
||||
float OpenVrDisplayPlugin::getTargetFrameRate() const {
|
||||
if (forceInterleavedReprojection && !_asyncReprojectionActive) {
|
||||
return TARGET_RATE_OpenVr / 2.0f;
|
||||
}
|
||||
return TARGET_RATE_OpenVr;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::init() {
|
||||
Plugin::init();
|
||||
|
||||
|
@ -394,6 +397,16 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
return false;
|
||||
}
|
||||
|
||||
vr::Compositor_FrameTiming timing;
|
||||
memset(&timing, 0, sizeof(timing));
|
||||
timing.m_nSize = sizeof(vr::Compositor_FrameTiming);
|
||||
vr::VRCompositor()->GetFrameTiming(&timing);
|
||||
_asyncReprojectionActive = timing.m_nReprojectionFlags & VRCompositor_ReprojectionAsync;
|
||||
|
||||
_threadedSubmit = !_asyncReprojectionActive;
|
||||
qDebug() << "OpenVR Async Reprojection active: " << _asyncReprojectionActive;
|
||||
qDebug() << "OpenVR Threaded submit enabled: " << _threadedSubmit;
|
||||
|
||||
_openVrDisplayActive = true;
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
|
@ -434,16 +447,16 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
#endif
|
||||
}
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
if (_threadedSubmit) {
|
||||
_submitThread = std::make_shared<OpenVrSubmitThread>(*this);
|
||||
if (!_submitCanvas) {
|
||||
withMainThreadContext([&] {
|
||||
_submitCanvas = std::make_shared<gl::OffscreenContext>();
|
||||
_submitCanvas->create();
|
||||
_submitCanvas->doneCurrent();
|
||||
});
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return Parent::internalActivate();
|
||||
}
|
||||
|
@ -473,27 +486,27 @@ void OpenVrDisplayPlugin::customizeContext() {
|
|||
|
||||
Parent::customizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
if (_threadedSubmit) {
|
||||
_compositeInfos[0].texture = _compositeFramebuffer->getRenderBuffer(0);
|
||||
for (size_t i = 0; i < COMPOSITING_BUFFER_SIZE; ++i) {
|
||||
if (0 != i) {
|
||||
_compositeInfos[i].texture = gpu::TexturePointer(gpu::Texture::create2D(gpu::Element::COLOR_RGBA_32, _renderTargetSize.x, _renderTargetSize.y, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT)));
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
}
|
||||
_compositeInfos[i].textureID = getGLBackend()->getTextureID(_compositeInfos[i].texture, false);
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
}
|
||||
_submitThread->_canvas = _submitCanvas;
|
||||
_submitThread->start(QThread::HighPriority);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::uncustomizeContext() {
|
||||
Parent::uncustomizeContext();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->_quit = true;
|
||||
_submitThread->wait();
|
||||
_submitThread.reset();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
|
@ -582,75 +595,76 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
}
|
||||
|
||||
void OpenVrDisplayPlugin::compositeLayers() {
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
if (_threadedSubmit) {
|
||||
++_renderingIndex;
|
||||
_renderingIndex %= COMPOSITING_BUFFER_SIZE;
|
||||
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
#endif
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.pose = _currentPresentFrameInfo.presentPose;
|
||||
_compositeFramebuffer->setRenderBuffer(0, newComposite.texture);
|
||||
}
|
||||
|
||||
Parent::compositeLayers();
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
if (_threadedSubmit) {
|
||||
auto& newComposite = _compositeInfos[_renderingIndex];
|
||||
newComposite.fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
// https://www.opengl.org/registry/specs/ARB/sync.txt:
|
||||
// > The simple flushing behavior defined by
|
||||
// > SYNC_FLUSH_COMMANDS_BIT will not help when waiting for a fence
|
||||
// > command issued in another context's command stream to complete.
|
||||
// > Applications which block on a fence sync object must take
|
||||
// > additional steps to assure that the context from which the
|
||||
// > corresponding fence command was issued has flushed that command
|
||||
// > to the graphics pipeline.
|
||||
glFlush();
|
||||
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
if (!newComposite.textureID) {
|
||||
newComposite.textureID = getGLBackend()->getTextureID(newComposite.texture, false);
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
}
|
||||
withPresentThreadLock([&] {
|
||||
_submitThread->update(newComposite);
|
||||
});
|
||||
#endif
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::hmdPresent() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
_submitThread->waitForPresent();
|
||||
#else
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture{ (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
#endif
|
||||
if (_threadedSubmit) {
|
||||
_submitThread->waitForPresent();
|
||||
} else {
|
||||
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0), false);
|
||||
vr::Texture_t vrTexture { (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
vr::VRCompositor()->Submit(vr::Eye_Left, &vrTexture, &OPENVR_TEXTURE_BOUNDS_LEFT);
|
||||
vr::VRCompositor()->Submit(vr::Eye_Right, &vrTexture, &OPENVR_TEXTURE_BOUNDS_RIGHT);
|
||||
vr::VRCompositor()->PostPresentHandoff();
|
||||
_presentRate.increment();
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::postPreview() {
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
|
||||
PoseData nextRender, nextSim;
|
||||
nextRender.frameIndex = presentCount();
|
||||
#if !OPENVR_THREADED_SUBMIT
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
// FIXME - this looks wrong!
|
||||
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#else
|
||||
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||
#endif
|
||||
|
||||
if (!_threadedSubmit) {
|
||||
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nextSim.vrPoses, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
glm::mat4 resetMat;
|
||||
withPresentThreadLock([&] {
|
||||
resetMat = _sensorResetMat;
|
||||
});
|
||||
nextRender.update(resetMat);
|
||||
nextSim.update(resetMat);
|
||||
withPresentThreadLock([&] {
|
||||
_nextSimPoseData = nextSim;
|
||||
});
|
||||
_nextRenderPoseData = nextRender;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVrDisplayPlugin::isHmdMounted() const {
|
||||
|
@ -684,3 +698,7 @@ void OpenVrDisplayPlugin::unsuppressKeyboard() {
|
|||
bool OpenVrDisplayPlugin::isKeyboardVisible() {
|
||||
return isOpenVrKeyboardShown();
|
||||
}
|
||||
|
||||
int OpenVrDisplayPlugin::getRequiredThreadCount() const {
|
||||
return Parent::getRequiredThreadCount() + (_threadedSubmit ? 1 : 0);
|
||||
}
|
||||
|
|
|
@ -15,9 +15,6 @@
|
|||
|
||||
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
|
||||
|
||||
#define OPENVR_THREADED_SUBMIT 1
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
namespace gl {
|
||||
class OffscreenContext;
|
||||
}
|
||||
|
@ -34,7 +31,6 @@ struct CompositeInfo {
|
|||
glm::mat4 pose;
|
||||
GLsync fence{ 0 };
|
||||
};
|
||||
#endif
|
||||
|
||||
class OpenVrDisplayPlugin : public HmdDisplayPlugin {
|
||||
using Parent = HmdDisplayPlugin;
|
||||
|
@ -44,7 +40,8 @@ public:
|
|||
|
||||
void init() override;
|
||||
|
||||
float getTargetFrameRate() const override { return TARGET_RATE_OpenVr; }
|
||||
float getTargetFrameRate() const override;
|
||||
bool hasAsyncReprojection() const override { return _asyncReprojectionActive; }
|
||||
|
||||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
|
@ -58,8 +55,8 @@ public:
|
|||
void unsuppressKeyboard() override;
|
||||
bool isKeyboardVisible() override;
|
||||
|
||||
// Needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override { return Parent::getRequiredThreadCount() + 1; }
|
||||
// Possibly needs an additional thread for VR submission
|
||||
int getRequiredThreadCount() const override;
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
|
@ -71,7 +68,6 @@ protected:
|
|||
bool isHmdMounted() const override;
|
||||
void postPreview() override;
|
||||
|
||||
|
||||
private:
|
||||
vr::IVRSystem* _system { nullptr };
|
||||
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
|
||||
|
@ -80,12 +76,13 @@ private:
|
|||
|
||||
vr::HmdMatrix34_t _lastGoodHMDPose;
|
||||
mat4 _sensorResetMat;
|
||||
bool _threadedSubmit { true };
|
||||
|
||||
#if OPENVR_THREADED_SUBMIT
|
||||
CompositeInfo::Array _compositeInfos;
|
||||
size_t _renderingIndex { 0 };
|
||||
std::shared_ptr<OpenVrSubmitThread> _submitThread;
|
||||
std::shared_ptr<gl::OffscreenContext> _submitCanvas;
|
||||
friend class OpenVrSubmitThread;
|
||||
#endif
|
||||
|
||||
bool _asyncReprojectionActive { false };
|
||||
};
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue