mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 08:04:01 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into fix-ray-intersection
This commit is contained in:
commit
9935b2ba22
38 changed files with 1342 additions and 726 deletions
|
@ -49,12 +49,12 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
LogUtils::init();
|
||||
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
|
||||
|
||||
// create a NodeList as an unassigned client
|
||||
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||
auto addressManager = DependencyManager::set<AddressManager>();
|
||||
auto nodeList = DependencyManager::set<NodeList>(NodeType::Unassigned); // Order is important
|
||||
|
||||
|
||||
auto animationCache = DependencyManager::set<AnimationCache>();
|
||||
auto avatarHashMap = DependencyManager::set<AvatarHashMap>();
|
||||
auto entityScriptingInterface = DependencyManager::set<EntityScriptingInterface>();
|
||||
|
@ -76,18 +76,18 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
qDebug() << "The destination wallet UUID for credits is" << uuidStringWithoutCurlyBraces(walletUUID);
|
||||
_requestAssignment.setWalletUUID(walletUUID);
|
||||
}
|
||||
|
||||
|
||||
// check for an overriden assignment server hostname
|
||||
if (assignmentServerHostname != "") {
|
||||
// change the hostname for our assignment server
|
||||
_assignmentServerHostname = assignmentServerHostname;
|
||||
}
|
||||
|
||||
|
||||
_assignmentServerSocket = HifiSockAddr(_assignmentServerHostname, assignmentServerPort, true);
|
||||
nodeList->setAssignmentServerSocket(_assignmentServerSocket);
|
||||
|
||||
qDebug() << "Assignment server socket is" << _assignmentServerSocket;
|
||||
|
||||
|
||||
// call a timer function every ASSIGNMENT_REQUEST_INTERVAL_MSECS to ask for assignment, if required
|
||||
qDebug() << "Waiting for assignment -" << _requestAssignment;
|
||||
|
||||
|
@ -104,35 +104,35 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
// connections to AccountManager for authentication
|
||||
connect(&AccountManager::getInstance(), &AccountManager::authRequired,
|
||||
this, &AssignmentClient::handleAuthenticationRequest);
|
||||
|
||||
|
||||
// Create Singleton objects on main thread
|
||||
NetworkAccessManager::getInstance();
|
||||
|
||||
|
||||
// did we get an assignment-client monitor port?
|
||||
if (assignmentMonitorPort > 0) {
|
||||
_assignmentClientMonitorSocket = HifiSockAddr(DEFAULT_ASSIGNMENT_CLIENT_MONITOR_HOSTNAME, assignmentMonitorPort);
|
||||
|
||||
|
||||
qDebug() << "Assignment-client monitor socket is" << _assignmentClientMonitorSocket;
|
||||
|
||||
|
||||
// Hook up a timer to send this child's status to the Monitor once per second
|
||||
setUpStatsToMonitor();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AssignmentClient::stopAssignmentClient() {
|
||||
qDebug() << "Forced stop of assignment-client.";
|
||||
|
||||
|
||||
_requestTimer.stop();
|
||||
_statsTimerACM.stop();
|
||||
|
||||
|
||||
if (_currentAssignment) {
|
||||
// grab the thread for the current assignment
|
||||
QThread* currentAssignmentThread = _currentAssignment->thread();
|
||||
|
||||
|
||||
// ask the current assignment to stop
|
||||
QMetaObject::invokeMethod(_currentAssignment, "stop", Qt::BlockingQueuedConnection);
|
||||
|
||||
|
||||
// ask the current assignment to delete itself on its thread
|
||||
_currentAssignment->deleteLater();
|
||||
|
||||
|
@ -148,9 +148,9 @@ void AssignmentClient::stopAssignmentClient() {
|
|||
|
||||
void AssignmentClient::aboutToQuit() {
|
||||
stopAssignmentClient();
|
||||
|
||||
|
||||
// clear the log handler so that Qt doesn't call the destructor on LogHandler
|
||||
qInstallMessageHandler(0);
|
||||
qInstallMessageHandler(0);
|
||||
}
|
||||
|
||||
|
||||
|
@ -175,9 +175,9 @@ void AssignmentClient::sendStatsPacketToACM() {
|
|||
|
||||
void AssignmentClient::sendAssignmentRequest() {
|
||||
if (!_currentAssignment) {
|
||||
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
if (_assignmentServerHostname == "localhost") {
|
||||
// we want to check again for the local domain-server port in case the DS has restarted
|
||||
quint16 localAssignmentServerPort;
|
||||
|
@ -186,13 +186,13 @@ void AssignmentClient::sendAssignmentRequest() {
|
|||
if (localAssignmentServerPort != _assignmentServerSocket.getPort()) {
|
||||
qDebug() << "Port for local assignment server read from shared memory is"
|
||||
<< localAssignmentServerPort;
|
||||
|
||||
|
||||
_assignmentServerSocket.setPort(localAssignmentServerPort);
|
||||
nodeList->setAssignmentServerSocket(_assignmentServerSocket);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
nodeList->sendAssignment(_requestAssignment);
|
||||
}
|
||||
}
|
||||
|
@ -232,13 +232,15 @@ void AssignmentClient::readPendingDatagrams() {
|
|||
|
||||
connect(workerThread, &QThread::started, _currentAssignment.data(), &ThreadedAssignment::run);
|
||||
|
||||
// once the ThreadedAssignment says it is finished - we ask it to deleteLater
|
||||
// Once the ThreadedAssignment says it is finished - we ask it to deleteLater
|
||||
// This is a queued connection so that it is put into the event loop to be processed by the worker
|
||||
// thread when it is ready.
|
||||
connect(_currentAssignment.data(), &ThreadedAssignment::finished, _currentAssignment.data(),
|
||||
&ThreadedAssignment::deleteLater);
|
||||
|
||||
&ThreadedAssignment::deleteLater, Qt::QueuedConnection);
|
||||
|
||||
// once it is deleted, we quit the worker thread
|
||||
connect(_currentAssignment.data(), &ThreadedAssignment::destroyed, workerThread, &QThread::quit);
|
||||
|
||||
|
||||
// have the worker thread remove itself once it is done
|
||||
connect(workerThread, &QThread::finished, workerThread, &QThread::deleteLater);
|
||||
|
||||
|
@ -264,7 +266,7 @@ void AssignmentClient::readPendingDatagrams() {
|
|||
if (senderSockAddr.getAddress() == QHostAddress::LocalHost ||
|
||||
senderSockAddr.getAddress() == QHostAddress::LocalHostIPv6) {
|
||||
qDebug() << "AssignmentClientMonitor at" << senderSockAddr << "requested stop via PacketTypeStopNode.";
|
||||
|
||||
|
||||
QCoreApplication::quit();
|
||||
} else {
|
||||
qDebug() << "Got a stop packet from other than localhost.";
|
||||
|
@ -306,7 +308,7 @@ void AssignmentClient::assignmentCompleted() {
|
|||
// we expect that to be here the previous assignment has completely cleaned up
|
||||
assert(_currentAssignment.isNull());
|
||||
|
||||
// reset our current assignment pointer to NULL now that it has been deleted
|
||||
// reset our current assignment pointer to NULL now that it has been deleted
|
||||
_currentAssignment = NULL;
|
||||
|
||||
// reset the logging target to the the CHILD_TARGET_NAME
|
||||
|
@ -317,7 +319,7 @@ void AssignmentClient::assignmentCompleted() {
|
|||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// have us handle incoming NodeList datagrams again, and make sure our ThreadedAssignment isn't handling them
|
||||
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &AssignmentClient::readPendingDatagrams);
|
||||
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead, this, &AssignmentClient::readPendingDatagrams);
|
||||
|
||||
// reset our NodeList by switching back to unassigned and clearing the list
|
||||
nodeList->setOwnerType(NodeType::Unassigned);
|
||||
|
|
|
@ -24,7 +24,7 @@ class AssignmentClient : public QObject {
|
|||
public:
|
||||
|
||||
AssignmentClient(Assignment::Type requestAssignmentType, QString assignmentPool,
|
||||
QUuid walletUUID, QString assignmentServerHostname, quint16 assignmentServerPort,
|
||||
QUuid walletUUID, QString assignmentServerHostname, quint16 assignmentServerPort,
|
||||
quint16 assignmentMonitorPort);
|
||||
private slots:
|
||||
void sendAssignmentRequest();
|
||||
|
|
|
@ -109,11 +109,11 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
// Basically, we'll repeat that last frame until it has a frame to mix. Depending on how many times
|
||||
// we've repeated that frame in a row, we'll gradually fade that repeated frame into silence.
|
||||
// This improves the perceived quality of the audio slightly.
|
||||
|
||||
|
||||
bool showDebug = false; // (randFloat() < 0.05f);
|
||||
|
||||
|
||||
float repeatedFrameFadeFactor = 1.0f;
|
||||
|
||||
|
||||
if (!streamToAdd->lastPopSucceeded()) {
|
||||
if (_streamSettings._repetitionWithFade && !streamToAdd->getLastPopOutput().isNull()) {
|
||||
// reptition with fade is enabled, and we do have a valid previous frame to repeat.
|
||||
|
@ -126,73 +126,73 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// at this point, we know streamToAdd's last pop output is valid
|
||||
|
||||
|
||||
// if the frame we're about to mix is silent, bail
|
||||
if (streamToAdd->getLastPopOutputLoudness() == 0.0f) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
float bearingRelativeAngleToSource = 0.0f;
|
||||
float attenuationCoefficient = 1.0f;
|
||||
int numSamplesDelay = 0;
|
||||
float weakChannelAmplitudeRatio = 1.0f;
|
||||
|
||||
|
||||
// Is the source that I am mixing my own?
|
||||
bool sourceIsSelf = (streamToAdd == listeningNodeStream);
|
||||
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
|
||||
|
||||
|
||||
float distanceBetween = glm::length(relativePosition);
|
||||
|
||||
|
||||
if (distanceBetween < EPSILON) {
|
||||
distanceBetween = EPSILON;
|
||||
}
|
||||
|
||||
|
||||
if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
|
||||
// according to mixer performance we have decided this does not get to be mixed in
|
||||
// bail out
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
++_sumMixes;
|
||||
|
||||
|
||||
if (streamToAdd->getType() == PositionalAudioStream::Injector) {
|
||||
attenuationCoefficient *= reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getAttenuationRatio();
|
||||
if (showDebug) {
|
||||
qDebug() << "AttenuationRatio: " << reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getAttenuationRatio();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (showDebug) {
|
||||
qDebug() << "distance: " << distanceBetween;
|
||||
}
|
||||
|
||||
|
||||
glm::quat inverseOrientation = glm::inverse(listeningNodeStream->getOrientation());
|
||||
|
||||
|
||||
if (!sourceIsSelf && (streamToAdd->getType() == PositionalAudioStream::Microphone)) {
|
||||
// source is another avatar, apply fixed off-axis attenuation to make them quieter as they turn away from listener
|
||||
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd->getOrientation()) * relativePosition;
|
||||
|
||||
|
||||
float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
|
||||
glm::normalize(rotatedListenerPosition));
|
||||
|
||||
|
||||
const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
|
||||
const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
|
||||
|
||||
|
||||
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
|
||||
(OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO));
|
||||
|
||||
|
||||
if (showDebug) {
|
||||
qDebug() << "angleOfDelivery" << angleOfDelivery << "offAxisCoefficient: " << offAxisCoefficient;
|
||||
|
||||
|
||||
}
|
||||
// multiply the current attenuation coefficient by the calculated off axis coefficient
|
||||
|
||||
|
||||
attenuationCoefficient *= offAxisCoefficient;
|
||||
}
|
||||
|
||||
|
||||
float attenuationPerDoublingInDistance = _attenuationPerDoublingInDistance;
|
||||
for (int i = 0; i < _zonesSettings.length(); ++i) {
|
||||
if (_audioZones[_zonesSettings[i].source].contains(streamToAdd->getPosition()) &&
|
||||
|
@ -201,67 +201,67 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) {
|
||||
// calculate the distance coefficient using the distance to this node
|
||||
float distanceCoefficient = 1 - (logf(distanceBetween / ATTENUATION_BEGINS_AT_DISTANCE) / logf(2.0f)
|
||||
* attenuationPerDoublingInDistance);
|
||||
|
||||
|
||||
if (distanceCoefficient < 0) {
|
||||
distanceCoefficient = 0;
|
||||
}
|
||||
|
||||
|
||||
// multiply the current attenuation coefficient by the distance coefficient
|
||||
attenuationCoefficient *= distanceCoefficient;
|
||||
if (showDebug) {
|
||||
qDebug() << "distanceCoefficient: " << distanceCoefficient;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!sourceIsSelf) {
|
||||
// Compute sample delay for the two ears to create phase panning
|
||||
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
|
||||
|
||||
// project the rotated source position vector onto the XZ plane
|
||||
rotatedSourcePosition.y = 0.0f;
|
||||
|
||||
|
||||
// produce an oriented angle about the y-axis
|
||||
bearingRelativeAngleToSource = glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f),
|
||||
glm::normalize(rotatedSourcePosition),
|
||||
glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
|
||||
|
||||
const float PHASE_AMPLITUDE_RATIO_AT_90 = 0.5;
|
||||
|
||||
|
||||
// figure out the number of samples of delay and the ratio of the amplitude
|
||||
// in the weak channel for audio spatialization
|
||||
float sinRatio = fabsf(sinf(bearingRelativeAngleToSource));
|
||||
numSamplesDelay = SAMPLE_PHASE_DELAY_AT_90 * sinRatio;
|
||||
weakChannelAmplitudeRatio = 1 - (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio);
|
||||
|
||||
|
||||
if (distanceBetween < RADIUS_OF_HEAD) {
|
||||
// Diminish phase panning if source would be inside head
|
||||
numSamplesDelay *= distanceBetween / RADIUS_OF_HEAD;
|
||||
weakChannelAmplitudeRatio += (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio) * distanceBetween / RADIUS_OF_HEAD;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (showDebug) {
|
||||
qDebug() << "attenuation: " << attenuationCoefficient;
|
||||
qDebug() << "bearingRelativeAngleToSource: " << bearingRelativeAngleToSource << " numSamplesDelay: " << numSamplesDelay;
|
||||
}
|
||||
|
||||
|
||||
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
|
||||
|
||||
|
||||
if (!streamToAdd->isStereo()) {
|
||||
// this is a mono stream, which means it gets full attenuation and spatialization
|
||||
|
||||
|
||||
// we need to do several things in this process:
|
||||
// 1) convert from mono to stereo by copying each input sample into the left and right output samples
|
||||
// 2)
|
||||
// 2)
|
||||
// 2) apply an attenuation AND fade to all samples (left and right)
|
||||
// 3) based on the bearing relative angle to the source we will weaken and delay either the left or
|
||||
// right channel of the input into the output
|
||||
// 4) because one of these channels is delayed, we will need to use historical samples from
|
||||
// 4) because one of these channels is delayed, we will need to use historical samples from
|
||||
// the input stream for that delayed channel
|
||||
|
||||
// Mono input to stereo output (item 1 above)
|
||||
|
@ -274,12 +274,12 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
|
||||
// determine which side is weak and delayed (item 3 above)
|
||||
bool rightSideWeakAndDelayed = (bearingRelativeAngleToSource > 0.0f);
|
||||
|
||||
|
||||
// since we're converting from mono to stereo, we'll use these two indices to step through
|
||||
// the output samples. we'll increment each index independently in the loop
|
||||
int leftDestinationIndex = 0;
|
||||
int rightDestinationIndex = 1;
|
||||
|
||||
|
||||
// One of our two channels will be delayed (determined below). We'll use this index to step
|
||||
// through filling in our output with the historical samples for the delayed channel. (item 4 above)
|
||||
int delayedChannelHistoricalAudioOutputIndex;
|
||||
|
@ -287,14 +287,14 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
// All samples will be attenuated by at least this much
|
||||
float leftSideAttenuation = attenuationAndFade;
|
||||
float rightSideAttenuation = attenuationAndFade;
|
||||
|
||||
|
||||
// The weak/delayed channel will be attenuated by this additional amount
|
||||
float attenuationAndWeakChannelRatioAndFade = attenuationAndFade * weakChannelAmplitudeRatio;
|
||||
|
||||
|
||||
// Now, based on the determination of which side is weak and delayed, set up our true starting point
|
||||
// for our indexes, as well as the appropriate attenuation for each channel
|
||||
if (rightSideWeakAndDelayed) {
|
||||
delayedChannelHistoricalAudioOutputIndex = rightDestinationIndex;
|
||||
delayedChannelHistoricalAudioOutputIndex = rightDestinationIndex;
|
||||
rightSideAttenuation = attenuationAndWeakChannelRatioAndFade;
|
||||
rightDestinationIndex += (numSamplesDelay * OUTPUT_SAMPLES_PER_INPUT_SAMPLE);
|
||||
} else {
|
||||
|
@ -315,7 +315,7 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
for (int i = 0; i < numSamplesDelay; i++) {
|
||||
int16_t originalHistoricalSample = *delayStreamSourceSamples;
|
||||
|
||||
_preMixSamples[delayedChannelHistoricalAudioOutputIndex] += originalHistoricalSample
|
||||
_preMixSamples[delayedChannelHistoricalAudioOutputIndex] += originalHistoricalSample
|
||||
* attenuationAndWeakChannelRatioAndFade;
|
||||
++delayStreamSourceSamples; // move our input pointer
|
||||
delayedChannelHistoricalAudioOutputIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE; // move our output sample
|
||||
|
@ -339,7 +339,7 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
leftDestinationIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
|
||||
rightDestinationIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
|
||||
}
|
||||
|
||||
|
||||
} else {
|
||||
int stereoDivider = streamToAdd->isStereo() ? 1 : 2;
|
||||
|
||||
|
@ -355,44 +355,44 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
if (!sourceIsSelf && _enableFilter && !streamToAdd->ignorePenumbraFilter()) {
|
||||
|
||||
const float TWO_OVER_PI = 2.0f / PI;
|
||||
|
||||
|
||||
const float ZERO_DB = 1.0f;
|
||||
const float NEGATIVE_ONE_DB = 0.891f;
|
||||
const float NEGATIVE_THREE_DB = 0.708f;
|
||||
|
||||
|
||||
const float FILTER_GAIN_AT_0 = ZERO_DB; // source is in front
|
||||
const float FILTER_GAIN_AT_90 = NEGATIVE_ONE_DB; // source is incident to left or right ear
|
||||
const float FILTER_GAIN_AT_180 = NEGATIVE_THREE_DB; // source is behind
|
||||
|
||||
|
||||
const float FILTER_CUTOFF_FREQUENCY_HZ = 1000.0f;
|
||||
|
||||
|
||||
const float penumbraFilterFrequency = FILTER_CUTOFF_FREQUENCY_HZ; // constant frequency
|
||||
const float penumbraFilterSlope = NEGATIVE_THREE_DB; // constant slope
|
||||
|
||||
|
||||
float penumbraFilterGainL;
|
||||
float penumbraFilterGainR;
|
||||
|
||||
// variable gain calculation broken down by quadrant
|
||||
if (-bearingRelativeAngleToSource < -PI_OVER_TWO && -bearingRelativeAngleToSource > -PI) {
|
||||
penumbraFilterGainL = TWO_OVER_PI *
|
||||
penumbraFilterGainL = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_180) * (-bearingRelativeAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_0;
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_180) * (-bearingRelativeAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_90;
|
||||
} else if (-bearingRelativeAngleToSource <= PI && -bearingRelativeAngleToSource > PI_OVER_TWO) {
|
||||
penumbraFilterGainL = TWO_OVER_PI *
|
||||
penumbraFilterGainL = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_90) * (-bearingRelativeAngleToSource - PI) + FILTER_GAIN_AT_180;
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_0) * (-bearingRelativeAngleToSource - PI) + FILTER_GAIN_AT_180;
|
||||
} else if (-bearingRelativeAngleToSource <= PI_OVER_TWO && -bearingRelativeAngleToSource > 0) {
|
||||
penumbraFilterGainL = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_0) * (-bearingRelativeAngleToSource - PI_OVER_TWO) + FILTER_GAIN_AT_90;
|
||||
penumbraFilterGainR = FILTER_GAIN_AT_0;
|
||||
penumbraFilterGainR = FILTER_GAIN_AT_0;
|
||||
} else {
|
||||
penumbraFilterGainL = FILTER_GAIN_AT_0;
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
penumbraFilterGainR = TWO_OVER_PI *
|
||||
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_90) * (-bearingRelativeAngleToSource) + FILTER_GAIN_AT_0;
|
||||
}
|
||||
|
||||
|
||||
if (distanceBetween < RADIUS_OF_HEAD) {
|
||||
// Diminish effect if source would be inside head
|
||||
penumbraFilterGainL += (1.0f - penumbraFilterGainL) * (1.0f - distanceBetween / RADIUS_OF_HEAD);
|
||||
|
@ -405,16 +405,16 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
<< "gainR=" << penumbraFilterGainR
|
||||
<< "angle=" << -bearingRelativeAngleToSource;
|
||||
}
|
||||
|
||||
|
||||
// Get our per listener/source data so we can get our filter
|
||||
AudioFilterHSF1s& penumbraFilter = listenerNodeData->getListenerSourcePairData(streamUUID)->getPenumbraFilter();
|
||||
|
||||
|
||||
// set the gain on both filter channels
|
||||
penumbraFilter.setParameters(0, 0, AudioConstants::SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
|
||||
penumbraFilter.setParameters(0, 1, AudioConstants::SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
|
||||
penumbraFilter.render(_preMixSamples, _preMixSamples, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO / 2);
|
||||
}
|
||||
|
||||
|
||||
// Actually mix the _preMixSamples into the _mixSamples here.
|
||||
for (int s = 0; s < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; s++) {
|
||||
_mixSamples[s] = glm::clamp(_mixSamples[s] + _preMixSamples[s], AudioConstants::MIN_SAMPLE_VALUE,
|
||||
|
@ -427,30 +427,30 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData* l
|
|||
int AudioMixer::prepareMixForListeningNode(Node* node) {
|
||||
AvatarAudioStream* nodeAudioStream = static_cast<AudioMixerClientData*>(node->getLinkedData())->getAvatarAudioStream();
|
||||
AudioMixerClientData* listenerNodeData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
|
||||
// zero out the client mix for this node
|
||||
memset(_preMixSamples, 0, sizeof(_preMixSamples));
|
||||
memset(_mixSamples, 0, sizeof(_mixSamples));
|
||||
|
||||
// loop through all other nodes that have sufficient audio to mix
|
||||
int streamsMixed = 0;
|
||||
|
||||
|
||||
DependencyManager::get<NodeList>()->eachNode([&](const SharedNodePointer& otherNode){
|
||||
if (otherNode->getLinkedData()) {
|
||||
AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData();
|
||||
|
||||
|
||||
// enumerate the ARBs attached to the otherNode and add all that should be added to mix
|
||||
|
||||
|
||||
const QHash<QUuid, PositionalAudioStream*>& otherNodeAudioStreams = otherNodeClientData->getAudioStreams();
|
||||
QHash<QUuid, PositionalAudioStream*>::ConstIterator i;
|
||||
for (i = otherNodeAudioStreams.constBegin(); i != otherNodeAudioStreams.constEnd(); i++) {
|
||||
PositionalAudioStream* otherNodeStream = i.value();
|
||||
QUuid streamUUID = i.key();
|
||||
|
||||
|
||||
if (otherNodeStream->getType() == PositionalAudioStream::Microphone) {
|
||||
streamUUID = otherNode->getUUID();
|
||||
}
|
||||
|
||||
|
||||
if (*otherNode != *node || otherNodeStream->shouldLoopbackForNode()) {
|
||||
streamsMixed += addStreamToMixForListeningNodeWithStream(listenerNodeData, streamUUID,
|
||||
otherNodeStream, nodeAudioStream);
|
||||
|
@ -458,13 +458,13 @@ int AudioMixer::prepareMixForListeningNode(Node* node) {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
return streamsMixed;
|
||||
}
|
||||
|
||||
void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
||||
static char clientEnvBuffer[MAX_PACKET_SIZE];
|
||||
|
||||
|
||||
// Send stream properties
|
||||
bool hasReverb = false;
|
||||
float reverbTime, wetLevel;
|
||||
|
@ -477,7 +477,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
hasReverb = true;
|
||||
reverbTime = _zoneReverbSettings[i].reverbTime;
|
||||
wetLevel = _zoneReverbSettings[i].wetLevel;
|
||||
|
||||
|
||||
// Modulate wet level with distance to wall
|
||||
float MIN_ATTENUATION_DISTANCE = 2.0f;
|
||||
float MAX_ATTENUATION = -12; // dB
|
||||
|
@ -502,24 +502,24 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
stream->clearReverb();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Send at change or every so often
|
||||
float CHANCE_OF_SEND = 0.01f;
|
||||
bool sendData = dataChanged || (randFloat() < CHANCE_OF_SEND);
|
||||
|
||||
|
||||
if (sendData) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
int numBytesEnvPacketHeader = nodeList->populatePacketHeader(clientEnvBuffer, PacketTypeAudioEnvironment);
|
||||
char* envDataAt = clientEnvBuffer + numBytesEnvPacketHeader;
|
||||
|
||||
|
||||
unsigned char bitset = 0;
|
||||
if (hasReverb) {
|
||||
setAtBit(bitset, HAS_REVERB_BIT);
|
||||
}
|
||||
|
||||
|
||||
memcpy(envDataAt, &bitset, sizeof(unsigned char));
|
||||
envDataAt += sizeof(unsigned char);
|
||||
|
||||
|
||||
if (hasReverb) {
|
||||
memcpy(envDataAt, &reverbTime, sizeof(float));
|
||||
envDataAt += sizeof(float);
|
||||
|
@ -532,7 +532,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
|
||||
void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
|
||||
// pull any new audio data from nodes off of the network stack
|
||||
PacketType mixerPacketType = packetTypeForPacket(receivedPacket);
|
||||
|
@ -541,14 +541,14 @@ void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const Hif
|
|||
|| mixerPacketType == PacketTypeInjectAudio
|
||||
|| mixerPacketType == PacketTypeSilentAudioFrame
|
||||
|| mixerPacketType == PacketTypeAudioStreamStats) {
|
||||
|
||||
|
||||
nodeList->findNodeAndUpdateWithDataFromPacket(receivedPacket);
|
||||
} else if (mixerPacketType == PacketTypeMuteEnvironment) {
|
||||
SharedNodePointer sendingNode = nodeList->sendingNodeForPacket(receivedPacket);
|
||||
if (sendingNode->getCanAdjustLocks()) {
|
||||
QByteArray packet = receivedPacket;
|
||||
nodeList->populatePacketHeader(packet, PacketTypeMuteEnvironment);
|
||||
|
||||
|
||||
nodeList->eachNode([&](const SharedNodePointer& node){
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket() &&
|
||||
node->getLinkedData() && node != sendingNode) {
|
||||
|
@ -560,18 +560,18 @@ void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const Hif
|
|||
// let processNodeData handle it.
|
||||
nodeList->processNodeData(senderSockAddr, receivedPacket);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixer::sendStatsPacket() {
|
||||
static QJsonObject statsObject;
|
||||
|
||||
|
||||
statsObject["useDynamicJitterBuffers"] = _streamSettings._dynamicJitterBuffers;
|
||||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
statsObject["average_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
|
||||
|
||||
|
||||
if (_sumListeners > 0) {
|
||||
statsObject["average_mixes_per_listener"] = (float) _sumMixes / (float) _sumListeners;
|
||||
} else {
|
||||
|
@ -581,65 +581,65 @@ void AudioMixer::sendStatsPacket() {
|
|||
_sumListeners = 0;
|
||||
_sumMixes = 0;
|
||||
_numStatFrames = 0;
|
||||
|
||||
|
||||
QJsonObject readPendingDatagramStats;
|
||||
|
||||
|
||||
QJsonObject rpdCallsStats;
|
||||
rpdCallsStats["calls_per_sec_avg_30s"] = _readPendingCallsPerSecondStats.getWindowAverage();
|
||||
rpdCallsStats["calls_last_sec"] = _readPendingCallsPerSecondStats.getLastCompleteIntervalStats().getSum() + 0.5;
|
||||
|
||||
|
||||
readPendingDatagramStats["calls"] = rpdCallsStats;
|
||||
|
||||
QJsonObject packetsPerCallStats;
|
||||
packetsPerCallStats["avg_30s"] = _datagramsReadPerCallStats.getWindowAverage();
|
||||
packetsPerCallStats["avg_1s"] = _datagramsReadPerCallStats.getLastCompleteIntervalStats().getAverage();
|
||||
|
||||
|
||||
readPendingDatagramStats["packets_per_call"] = packetsPerCallStats;
|
||||
|
||||
|
||||
QJsonObject packetsTimePerCallStats;
|
||||
packetsTimePerCallStats["usecs_per_call_avg_30s"] = _timeSpentPerCallStats.getWindowAverage();
|
||||
packetsTimePerCallStats["usecs_per_call_avg_1s"] = _timeSpentPerCallStats.getLastCompleteIntervalStats().getAverage();
|
||||
packetsTimePerCallStats["prct_time_in_call_30s"] =
|
||||
packetsTimePerCallStats["prct_time_in_call_30s"] =
|
||||
_timeSpentPerCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS * USECS_PER_SECOND) * 100.0;
|
||||
packetsTimePerCallStats["prct_time_in_call_1s"] =
|
||||
packetsTimePerCallStats["prct_time_in_call_1s"] =
|
||||
_timeSpentPerCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0;
|
||||
|
||||
readPendingDatagramStats["packets_time_per_call"] = packetsTimePerCallStats;
|
||||
|
||||
|
||||
QJsonObject hashMatchTimePerCallStats;
|
||||
hashMatchTimePerCallStats["usecs_per_hashmatch_avg_30s"] = _timeSpentPerHashMatchCallStats.getWindowAverage();
|
||||
hashMatchTimePerCallStats["usecs_per_hashmatch_avg_1s"]
|
||||
hashMatchTimePerCallStats["usecs_per_hashmatch_avg_1s"]
|
||||
= _timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getAverage();
|
||||
hashMatchTimePerCallStats["prct_time_in_hashmatch_30s"]
|
||||
hashMatchTimePerCallStats["prct_time_in_hashmatch_30s"]
|
||||
= _timeSpentPerHashMatchCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0;
|
||||
hashMatchTimePerCallStats["prct_time_in_hashmatch_1s"]
|
||||
= _timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0;
|
||||
hashMatchTimePerCallStats["prct_time_in_hashmatch_1s"]
|
||||
= _timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0;
|
||||
readPendingDatagramStats["hashmatch_time_per_call"] = hashMatchTimePerCallStats;
|
||||
|
||||
|
||||
statsObject["read_pending_datagrams"] = readPendingDatagramStats;
|
||||
|
||||
|
||||
// add stats for each listerner
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
QJsonObject listenerStats;
|
||||
|
||||
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
AudioMixerClientData* clientData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (clientData) {
|
||||
QJsonObject nodeStats;
|
||||
QString uuidString = uuidStringWithoutCurlyBraces(node->getUUID());
|
||||
|
||||
|
||||
nodeStats["outbound_kbps"] = node->getOutboundBandwidth();
|
||||
nodeStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidString;
|
||||
|
||||
nodeStats["jitter"] = clientData->getAudioStreamStats();
|
||||
|
||||
|
||||
listenerStats[uuidString] = nodeStats;
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// add the listeners object to the root object
|
||||
statsObject["listeners"] = listenerStats;
|
||||
|
||||
|
||||
// send off the stats packets
|
||||
ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject);
|
||||
}
|
||||
|
@ -649,125 +649,125 @@ void AudioMixer::run() {
|
|||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
// we do not want this event loop to be the handler for UDP datagrams, so disconnect
|
||||
disconnect(&nodeList->getNodeSocket(), 0, this, 0);
|
||||
|
||||
|
||||
// setup a QThread with us as parent that will house the AudioMixerDatagramProcessor
|
||||
_datagramProcessingThread = new QThread(this);
|
||||
_datagramProcessingThread->setObjectName("Datagram Processor Thread");
|
||||
|
||||
|
||||
// create an AudioMixerDatagramProcessor and move it to that thread
|
||||
AudioMixerDatagramProcessor* datagramProcessor = new AudioMixerDatagramProcessor(nodeList->getNodeSocket(), thread());
|
||||
datagramProcessor->moveToThread(_datagramProcessingThread);
|
||||
|
||||
|
||||
// remove the NodeList as the parent of the node socket
|
||||
nodeList->getNodeSocket().setParent(NULL);
|
||||
nodeList->getNodeSocket().moveToThread(_datagramProcessingThread);
|
||||
|
||||
|
||||
// let the datagram processor handle readyRead from node socket
|
||||
connect(&nodeList->getNodeSocket(), &QUdpSocket::readyRead,
|
||||
datagramProcessor, &AudioMixerDatagramProcessor::readPendingDatagrams);
|
||||
|
||||
|
||||
// connect to the datagram processing thread signal that tells us we have to handle a packet
|
||||
connect(datagramProcessor, &AudioMixerDatagramProcessor::packetRequiresProcessing, this, &AudioMixer::readPendingDatagram);
|
||||
|
||||
|
||||
// delete the datagram processor and the associated thread when the QThread quits
|
||||
connect(_datagramProcessingThread, &QThread::finished, datagramProcessor, &QObject::deleteLater);
|
||||
connect(datagramProcessor, &QObject::destroyed, _datagramProcessingThread, &QThread::deleteLater);
|
||||
|
||||
|
||||
// start the datagram processing thread
|
||||
_datagramProcessingThread->start();
|
||||
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [](Node* node) {
|
||||
node->setLinkedData(new AudioMixerClientData());
|
||||
};
|
||||
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
||||
|
||||
qDebug() << "Waiting for domain settings from domain-server.";
|
||||
|
||||
|
||||
// block until we get the settingsRequestComplete signal
|
||||
QEventLoop loop;
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, &loop, &QEventLoop::quit);
|
||||
connect(&domainHandler, &DomainHandler::settingsReceiveFail, &loop, &QEventLoop::quit);
|
||||
domainHandler.requestDomainSettings();
|
||||
loop.exec();
|
||||
|
||||
|
||||
if (domainHandler.getSettingsObject().isEmpty()) {
|
||||
qDebug() << "Failed to retreive settings object from domain-server. Bailing on assignment.";
|
||||
setFinished(true);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
|
||||
|
||||
// check the settings object to see if we have anything we can parse out
|
||||
parseSettingsObject(settingsObject);
|
||||
|
||||
|
||||
int nextFrame = 0;
|
||||
QElapsedTimer timer;
|
||||
timer.start();
|
||||
|
||||
char clientMixBuffer[MAX_PACKET_SIZE];
|
||||
|
||||
|
||||
int usecToSleep = AudioConstants::NETWORK_FRAME_USECS;
|
||||
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||
|
||||
while (!_isFinished) {
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
|
||||
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
|
||||
if (usecToSleep < 0) {
|
||||
usecToSleep = 0;
|
||||
}
|
||||
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||
+ (usecToSleep * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
|
||||
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
// we're struggling - change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
|
||||
|
||||
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
// we've recovered and can back off the required loudness
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||
|
||||
|
||||
if (_performanceThrottlingRatio < 0) {
|
||||
_performanceThrottlingRatio = 0;
|
||||
}
|
||||
|
||||
|
||||
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||
|
||||
|
||||
framesSinceCutoffEvent = 0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
@ -777,9 +777,9 @@ void AudioMixer::run() {
|
|||
perSecondActions();
|
||||
_lastPerSecondCallbackTime = now;
|
||||
}
|
||||
|
||||
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
|
||||
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
|
@ -787,14 +787,14 @@ void AudioMixer::run() {
|
|||
// a pointer to the popped data is stored as a member in InboundAudioStream.
|
||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||
nodeData->checkBuffersBeforeFrameSend();
|
||||
|
||||
|
||||
// if the stream should be muted, send mute packet
|
||||
if (nodeData->getAvatarAudioStream()
|
||||
&& shouldMute(nodeData->getAvatarAudioStream()->getQuietestFrameLoudness())) {
|
||||
QByteArray packet = nodeList->byteArrayWithPopulatedHeader(PacketTypeNoisyMute);
|
||||
nodeList->writeDatagram(packet, node);
|
||||
}
|
||||
|
||||
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()
|
||||
&& nodeData->getAvatarAudioStream()) {
|
||||
|
||||
|
@ -810,7 +810,7 @@ void AudioMixer::run() {
|
|||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
memcpy(mixDataAt, &sequence, sizeof(quint16));
|
||||
mixDataAt += sizeof(quint16);
|
||||
|
||||
|
||||
// pack mixed audio samples
|
||||
memcpy(mixDataAt, _mixSamples, AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
mixDataAt += AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
|
@ -829,7 +829,7 @@ void AudioMixer::run() {
|
|||
memcpy(mixDataAt, &numSilentSamples, sizeof(quint16));
|
||||
mixDataAt += sizeof(quint16);
|
||||
}
|
||||
|
||||
|
||||
// Send audio environment
|
||||
sendAudioEnvironmentPacket(node);
|
||||
|
||||
|
@ -847,9 +847,9 @@ void AudioMixer::run() {
|
|||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
++_numStatFrames;
|
||||
|
||||
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
if (_isFinished) {
|
||||
|
@ -925,7 +925,7 @@ void AudioMixer::perSecondActions() {
|
|||
void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
||||
if (settingsObject.contains(AUDIO_BUFFER_GROUP_KEY)) {
|
||||
QJsonObject audioBufferGroupObject = settingsObject[AUDIO_BUFFER_GROUP_KEY].toObject();
|
||||
|
||||
|
||||
// check the payload to see if we have asked for dynamicJitterBuffer support
|
||||
const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "dynamic_jitter_buffer";
|
||||
_streamSettings._dynamicJitterBuffers = audioBufferGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
|
||||
|
@ -934,7 +934,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
} else {
|
||||
qDebug() << "Dynamic jitter buffers disabled.";
|
||||
}
|
||||
|
||||
|
||||
bool ok;
|
||||
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "static_desired_jitter_buffer_frames";
|
||||
_streamSettings._staticDesiredJitterBufferFrames = audioBufferGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok);
|
||||
|
@ -942,14 +942,14 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
_streamSettings._staticDesiredJitterBufferFrames = DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES;
|
||||
}
|
||||
qDebug() << "Static desired jitter buffer frames:" << _streamSettings._staticDesiredJitterBufferFrames;
|
||||
|
||||
|
||||
const QString MAX_FRAMES_OVER_DESIRED_JSON_KEY = "max_frames_over_desired";
|
||||
_streamSettings._maxFramesOverDesired = audioBufferGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED;
|
||||
}
|
||||
qDebug() << "Max frames over desired:" << _streamSettings._maxFramesOverDesired;
|
||||
|
||||
|
||||
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "use_stdev_for_desired_calc";
|
||||
_streamSettings._useStDevForJitterCalc = audioBufferGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
|
||||
if (_streamSettings._useStDevForJitterCalc) {
|
||||
|
@ -957,28 +957,28 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
} else {
|
||||
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
|
||||
}
|
||||
|
||||
|
||||
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "window_starve_threshold";
|
||||
_streamSettings._windowStarveThreshold = audioBufferGroupObject[WINDOW_STARVE_THRESHOLD_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowStarveThreshold = DEFAULT_WINDOW_STARVE_THRESHOLD;
|
||||
}
|
||||
qDebug() << "Window A starve threshold:" << _streamSettings._windowStarveThreshold;
|
||||
|
||||
|
||||
const QString WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY = "window_seconds_for_desired_calc_on_too_many_starves";
|
||||
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = audioBufferGroupObject[WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES;
|
||||
}
|
||||
qDebug() << "Window A length:" << _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves << "seconds";
|
||||
|
||||
|
||||
const QString WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY = "window_seconds_for_desired_reduction";
|
||||
_streamSettings._windowSecondsForDesiredReduction = audioBufferGroupObject[WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowSecondsForDesiredReduction = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION;
|
||||
}
|
||||
qDebug() << "Window B length:" << _streamSettings._windowSecondsForDesiredReduction << "seconds";
|
||||
|
||||
|
||||
const QString REPETITION_WITH_FADE_JSON_KEY = "repetition_with_fade";
|
||||
_streamSettings._repetitionWithFade = audioBufferGroupObject[REPETITION_WITH_FADE_JSON_KEY].toBool();
|
||||
if (_streamSettings._repetitionWithFade) {
|
||||
|
@ -986,17 +986,17 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
} else {
|
||||
qDebug() << "Repetition with fade disabled";
|
||||
}
|
||||
|
||||
|
||||
const QString PRINT_STREAM_STATS_JSON_KEY = "print_stream_stats";
|
||||
_printStreamStats = audioBufferGroupObject[PRINT_STREAM_STATS_JSON_KEY].toBool();
|
||||
if (_printStreamStats) {
|
||||
qDebug() << "Stream stats will be printed to stdout";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (settingsObject.contains(AUDIO_ENV_GROUP_KEY)) {
|
||||
QJsonObject audioEnvGroupObject = settingsObject[AUDIO_ENV_GROUP_KEY].toObject();
|
||||
|
||||
|
||||
const QString ATTENATION_PER_DOULING_IN_DISTANCE = "attenuation_per_doubling_in_distance";
|
||||
if (audioEnvGroupObject[ATTENATION_PER_DOULING_IN_DISTANCE].isString()) {
|
||||
bool ok = false;
|
||||
|
@ -1006,7 +1006,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
qDebug() << "Attenuation per doubling in distance changed to" << _attenuationPerDoublingInDistance;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const QString NOISE_MUTING_THRESHOLD = "noise_muting_threshold";
|
||||
if (audioEnvGroupObject[NOISE_MUTING_THRESHOLD].isString()) {
|
||||
bool ok = false;
|
||||
|
@ -1024,22 +1024,22 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
if (_enableFilter) {
|
||||
qDebug() << "Filter enabled";
|
||||
}
|
||||
|
||||
|
||||
const QString AUDIO_ZONES = "zones";
|
||||
if (audioEnvGroupObject[AUDIO_ZONES].isObject()) {
|
||||
const QJsonObject& zones = audioEnvGroupObject[AUDIO_ZONES].toObject();
|
||||
|
||||
|
||||
const QString X_RANGE = "x_range";
|
||||
const QString Y_RANGE = "y_range";
|
||||
const QString Z_RANGE = "z_range";
|
||||
foreach (const QString& zone, zones.keys()) {
|
||||
QJsonObject zoneObject = zones[zone].toObject();
|
||||
|
||||
|
||||
if (zoneObject.contains(X_RANGE) && zoneObject.contains(Y_RANGE) && zoneObject.contains(Z_RANGE)) {
|
||||
QStringList xRange = zoneObject.value(X_RANGE).toString().split("-", QString::SkipEmptyParts);
|
||||
QStringList yRange = zoneObject.value(Y_RANGE).toString().split("-", QString::SkipEmptyParts);
|
||||
QStringList zRange = zoneObject.value(Z_RANGE).toString().split("-", QString::SkipEmptyParts);
|
||||
|
||||
|
||||
if (xRange.size() == 2 && yRange.size() == 2 && zRange.size() == 2) {
|
||||
float xMin, xMax, yMin, yMax, zMin, zMax;
|
||||
bool ok, allOk = true;
|
||||
|
@ -1055,7 +1055,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
allOk &= ok;
|
||||
zMax = zRange[1].toFloat(&ok);
|
||||
allOk &= ok;
|
||||
|
||||
|
||||
if (allOk) {
|
||||
glm::vec3 corner(xMin, yMin, zMin);
|
||||
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
|
||||
|
@ -1068,63 +1068,63 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const QString ATTENUATION_COEFFICIENTS = "attenuation_coefficients";
|
||||
if (audioEnvGroupObject[ATTENUATION_COEFFICIENTS].isArray()) {
|
||||
const QJsonArray& coefficients = audioEnvGroupObject[ATTENUATION_COEFFICIENTS].toArray();
|
||||
|
||||
|
||||
const QString SOURCE = "source";
|
||||
const QString LISTENER = "listener";
|
||||
const QString COEFFICIENT = "coefficient";
|
||||
for (int i = 0; i < coefficients.count(); ++i) {
|
||||
QJsonObject coefficientObject = coefficients[i].toObject();
|
||||
|
||||
|
||||
if (coefficientObject.contains(SOURCE) &&
|
||||
coefficientObject.contains(LISTENER) &&
|
||||
coefficientObject.contains(COEFFICIENT)) {
|
||||
|
||||
|
||||
ZonesSettings settings;
|
||||
|
||||
|
||||
bool ok;
|
||||
settings.source = coefficientObject.value(SOURCE).toString();
|
||||
settings.listener = coefficientObject.value(LISTENER).toString();
|
||||
settings.coefficient = coefficientObject.value(COEFFICIENT).toString().toFloat(&ok);
|
||||
|
||||
|
||||
if (ok && settings.coefficient >= 0.0f && settings.coefficient <= 1.0f &&
|
||||
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
|
||||
|
||||
|
||||
_zonesSettings.push_back(settings);
|
||||
qDebug() << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const QString REVERB = "reverb";
|
||||
if (audioEnvGroupObject[REVERB].isArray()) {
|
||||
const QJsonArray& reverb = audioEnvGroupObject[REVERB].toArray();
|
||||
|
||||
|
||||
const QString ZONE = "zone";
|
||||
const QString REVERB_TIME = "reverb_time";
|
||||
const QString WET_LEVEL = "wet_level";
|
||||
for (int i = 0; i < reverb.count(); ++i) {
|
||||
QJsonObject reverbObject = reverb[i].toObject();
|
||||
|
||||
|
||||
if (reverbObject.contains(ZONE) &&
|
||||
reverbObject.contains(REVERB_TIME) &&
|
||||
reverbObject.contains(WET_LEVEL)) {
|
||||
|
||||
|
||||
bool okReverbTime, okWetLevel;
|
||||
QString zone = reverbObject.value(ZONE).toString();
|
||||
float reverbTime = reverbObject.value(REVERB_TIME).toString().toFloat(&okReverbTime);
|
||||
float wetLevel = reverbObject.value(WET_LEVEL).toString().toFloat(&okWetLevel);
|
||||
|
||||
|
||||
if (okReverbTime && okWetLevel && _audioZones.contains(zone)) {
|
||||
ReverbSettings settings;
|
||||
settings.zone = zone;
|
||||
settings.reverbTime = reverbTime;
|
||||
settings.wetLevel = wetLevel;
|
||||
|
||||
|
||||
_zoneReverbSettings.push_back(settings);
|
||||
qDebug() << "Added Reverb:" << zone << reverbTime << wetLevel;
|
||||
}
|
||||
|
|
|
@ -29,44 +29,46 @@ class AudioMixer : public ThreadedAssignment {
|
|||
Q_OBJECT
|
||||
public:
|
||||
AudioMixer(const QByteArray& packet);
|
||||
|
||||
void deleteLater() { qDebug() << "DELETE LATER CALLED?"; QObject::deleteLater(); }
|
||||
public slots:
|
||||
/// threaded run of assignment
|
||||
void run();
|
||||
|
||||
|
||||
void readPendingDatagrams() { }; // this will not be called since our datagram processing thread will handle
|
||||
void readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr);
|
||||
|
||||
|
||||
void sendStatsPacket();
|
||||
|
||||
static const InboundAudioStream::Settings& getStreamSettings() { return _streamSettings; }
|
||||
|
||||
|
||||
private:
|
||||
/// adds one stream to the mix for a listening node
|
||||
int addStreamToMixForListeningNodeWithStream(AudioMixerClientData* listenerNodeData,
|
||||
const QUuid& streamUUID,
|
||||
PositionalAudioStream* streamToAdd,
|
||||
AvatarAudioStream* listeningNodeStream);
|
||||
|
||||
|
||||
/// prepares and sends a mix to one Node
|
||||
int prepareMixForListeningNode(Node* node);
|
||||
|
||||
|
||||
/// Send Audio Environment packet for a single node
|
||||
void sendAudioEnvironmentPacket(SharedNodePointer node);
|
||||
|
||||
// used on a per stream basis to run the filter on before mixing, large enough to handle the historical
|
||||
// data from a phase delay as well as an entire network buffer
|
||||
int16_t _preMixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
|
||||
|
||||
|
||||
// client samples capacity is larger than what will be sent to optimize mixing
|
||||
// we are MMX adding 4 samples at a time so we need client samples to have an extra 4
|
||||
int16_t _mixSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
|
||||
|
||||
void perSecondActions();
|
||||
|
||||
|
||||
bool shouldMute(float quietestFrame);
|
||||
|
||||
void parseSettingsObject(const QJsonObject& settingsObject);
|
||||
|
||||
|
||||
float _trailingSleepRatio;
|
||||
float _minAudibilityThreshold;
|
||||
float _performanceThrottlingRatio;
|
||||
|
@ -75,7 +77,7 @@ private:
|
|||
int _numStatFrames;
|
||||
int _sumListeners;
|
||||
int _sumMixes;
|
||||
|
||||
|
||||
QHash<QString, AABox> _audioZones;
|
||||
struct ZonesSettings {
|
||||
QString source;
|
||||
|
@ -89,12 +91,12 @@ private:
|
|||
float wetLevel;
|
||||
};
|
||||
QVector<ReverbSettings> _zoneReverbSettings;
|
||||
|
||||
|
||||
static InboundAudioStream::Settings _streamSettings;
|
||||
|
||||
static bool _printStreamStats;
|
||||
static bool _enableFilter;
|
||||
|
||||
|
||||
quint64 _lastPerSecondCallbackTime;
|
||||
|
||||
bool _sendAudioStreamStats;
|
||||
|
|
|
@ -26,7 +26,7 @@ var viewHelpers = {
|
|||
|
||||
setting_value = _(values).valueForKeyPath(keypath);
|
||||
|
||||
if (!setting_value) {
|
||||
if (setting_value === undefined || setting_value === null) {
|
||||
if (_.has(setting, 'default')) {
|
||||
setting_value = setting.default;
|
||||
} else {
|
||||
|
|
|
@ -254,6 +254,7 @@ QJsonObject DomainServerSettingsManager::responseObjectForType(const QString& ty
|
|||
|
||||
void DomainServerSettingsManager::updateSetting(const QString& key, const QJsonValue& newValue, QVariantMap& settingMap,
|
||||
const QJsonObject& settingDescription) {
|
||||
|
||||
if (newValue.isString()) {
|
||||
if (newValue.toString().isEmpty()) {
|
||||
// this is an empty value, clear it in settings variant so the default is sent
|
||||
|
@ -288,7 +289,16 @@ void DomainServerSettingsManager::updateSetting(const QString& key, const QJsonV
|
|||
settingMap[key] = QVariantMap();
|
||||
}
|
||||
|
||||
QVariantMap& thisMap = *reinterpret_cast<QVariantMap*>(settingMap[key].data());
|
||||
QVariant& possibleMap = settingMap[key];
|
||||
|
||||
if (!possibleMap.canConvert(QMetaType::QVariantMap)) {
|
||||
// if this isn't a map then we need to make it one, otherwise we're about to crash
|
||||
qDebug() << "Value at" << key << "was not the expected QVariantMap while updating DS settings"
|
||||
<< "- removing existing value and making it a QVariantMap";
|
||||
possibleMap = QVariantMap();
|
||||
}
|
||||
|
||||
QVariantMap& thisMap = *reinterpret_cast<QVariantMap*>(possibleMap.data());
|
||||
foreach(const QString childKey, newValue.toObject().keys()) {
|
||||
|
||||
QJsonObject childDescriptionObject = settingDescription;
|
||||
|
@ -351,7 +361,7 @@ void DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
settingsVariant[rootKey] = QVariantMap();
|
||||
}
|
||||
|
||||
QVariantMap& thisMap = settingsVariant;
|
||||
QVariantMap* thisMap = &settingsVariant;
|
||||
|
||||
QJsonObject groupDescriptionObject;
|
||||
|
||||
|
@ -362,7 +372,7 @@ void DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
groupDescriptionObject = groupValue.toObject();
|
||||
|
||||
// change the map we will update to be the map for this group
|
||||
thisMap = *reinterpret_cast<QVariantMap*>(settingsVariant[rootKey].data());
|
||||
thisMap = reinterpret_cast<QVariantMap*>(settingsVariant[rootKey].data());
|
||||
|
||||
break;
|
||||
}
|
||||
|
@ -388,7 +398,7 @@ void DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
}
|
||||
|
||||
if (!matchingDescriptionObject.isEmpty()) {
|
||||
updateSetting(rootKey, rootValue, thisMap, matchingDescriptionObject);
|
||||
updateSetting(rootKey, rootValue, *thisMap, matchingDescriptionObject);
|
||||
} else {
|
||||
qDebug() << "Setting for root key" << rootKey << "does not exist - cannot update setting.";
|
||||
}
|
||||
|
@ -401,7 +411,7 @@ void DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
// if we matched the setting then update the value
|
||||
if (!matchingDescriptionObject.isEmpty()) {
|
||||
QJsonValue settingValue = rootValue.toObject()[settingKey];
|
||||
updateSetting(settingKey, settingValue, thisMap, matchingDescriptionObject);
|
||||
updateSetting(settingKey, settingValue, *thisMap, matchingDescriptionObject);
|
||||
} else {
|
||||
qDebug() << "Could not find description for setting" << settingKey << "in group" << rootKey <<
|
||||
"- cannot update setting.";
|
||||
|
|
|
@ -137,6 +137,7 @@ var toolBar = (function () {
|
|||
newSphereButton,
|
||||
newLightButton,
|
||||
newTextButton,
|
||||
newWebButton,
|
||||
newZoneButton,
|
||||
browseMarketplaceButton;
|
||||
|
||||
|
@ -204,6 +205,16 @@ var toolBar = (function () {
|
|||
alpha: 0.9,
|
||||
visible: false
|
||||
});
|
||||
|
||||
newWebButton = toolBar.addTool({
|
||||
imageURL: "https://s3.amazonaws.com/Oculus/earth17.svg",
|
||||
subImage: { x: 0, y: Tool.IMAGE_WIDTH, width: Tool.IMAGE_WIDTH, height: Tool.IMAGE_HEIGHT },
|
||||
width: toolWidth,
|
||||
height: toolHeight,
|
||||
alpha: 0.9,
|
||||
visible: false
|
||||
});
|
||||
|
||||
newZoneButton = toolBar.addTool({
|
||||
imageURL: toolIconUrl + "zonecube_text.svg",
|
||||
subImage: { x: 0, y: 128, width: 128, height: 128 },
|
||||
|
@ -253,6 +264,7 @@ var toolBar = (function () {
|
|||
toolBar.showTool(newSphereButton, doShow);
|
||||
toolBar.showTool(newLightButton, doShow);
|
||||
toolBar.showTool(newTextButton, doShow);
|
||||
toolBar.showTool(newWebButton, doShow);
|
||||
toolBar.showTool(newZoneButton, doShow);
|
||||
};
|
||||
|
||||
|
@ -425,6 +437,22 @@ var toolBar = (function () {
|
|||
return true;
|
||||
}
|
||||
|
||||
if (newWebButton === toolBar.clicked(clickedOverlay)) {
|
||||
var position = getPositionToCreateEntity();
|
||||
|
||||
if (position.x > 0 && position.y > 0 && position.z > 0) {
|
||||
placingEntityID = Entities.addEntity({
|
||||
type: "Web",
|
||||
position: grid.snapToSurface(grid.snapToGrid(position, false, DEFAULT_DIMENSIONS), DEFAULT_DIMENSIONS),
|
||||
dimensions: { x: 1.6, y: 0.9, z: 0.01 },
|
||||
sourceUrl: "https://highfidelity.com/",
|
||||
});
|
||||
} else {
|
||||
print("Can't create Web Entity: would be out of bounds.");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (newZoneButton === toolBar.clicked(clickedOverlay)) {
|
||||
var position = getPositionToCreateEntity();
|
||||
|
||||
|
|
|
@ -279,6 +279,10 @@
|
|||
var elModelTextures = document.getElementById("property-model-textures");
|
||||
var elModelOriginalTextures = document.getElementById("property-model-original-textures");
|
||||
|
||||
var elWebSections = document.querySelectorAll(".web-section");
|
||||
allSections.push(elModelSections);
|
||||
var elWebSourceURL = document.getElementById("property-web-source-url");
|
||||
|
||||
var elTextSections = document.querySelectorAll(".text-section");
|
||||
allSections.push(elTextSections);
|
||||
var elTextText = document.getElementById("property-text-text");
|
||||
|
@ -468,6 +472,12 @@
|
|||
elModelAnimationSettings.value = properties.animationSettings;
|
||||
elModelTextures.value = properties.textures;
|
||||
elModelOriginalTextures.value = properties.originalTextures;
|
||||
} else if (properties.type == "Web") {
|
||||
for (var i = 0; i < elTextSections.length; i++) {
|
||||
elWebSections[i].style.display = 'block';
|
||||
}
|
||||
|
||||
elWebSourceURL.value = properties.sourceUrl;
|
||||
} else if (properties.type == "Text") {
|
||||
for (var i = 0; i < elTextSections.length; i++) {
|
||||
elTextSections[i].style.display = 'block';
|
||||
|
@ -654,6 +664,8 @@
|
|||
elLightExponent.addEventListener('change', createEmitNumberPropertyUpdateFunction('exponent'));
|
||||
elLightCutoff.addEventListener('change', createEmitNumberPropertyUpdateFunction('cutoff'));
|
||||
|
||||
elWebSourceURL.addEventListener('change', createEmitTextPropertyUpdateFunction('sourceUrl'));
|
||||
|
||||
elModelURL.addEventListener('change', createEmitTextPropertyUpdateFunction('modelURL'));
|
||||
elShapeType.addEventListener('change', createEmitTextPropertyUpdateFunction('shapeType'));
|
||||
elCompoundShapeURL.addEventListener('change', createEmitTextPropertyUpdateFunction('compoundShapeURL'));
|
||||
|
@ -1019,7 +1031,12 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="web-section property">
|
||||
<div class="label">Source URL</div>
|
||||
<div class="value">
|
||||
<input type="text" id="property-web-source-url" class="url"></input>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="model-section property">
|
||||
<div class="label">Model URL</div>
|
||||
|
|
|
@ -243,7 +243,6 @@ table#properties-list {
|
|||
|
||||
#properties-list .label {
|
||||
font-weight: bold;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
|
||||
|
|
10
interface/resources/qml/WebEntity.qml
Normal file
10
interface/resources/qml/WebEntity.qml
Normal file
|
@ -0,0 +1,10 @@
|
|||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
import QtWebKit 3.0
|
||||
|
||||
WebView {
|
||||
id: root
|
||||
objectName: "webview"
|
||||
anchors.fill: parent
|
||||
url: "about:blank"
|
||||
}
|
|
@ -341,6 +341,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
_domainConnectionRefusals(QList<QString>()),
|
||||
_maxOctreePPS(maxOctreePacketsPerSecond.get())
|
||||
{
|
||||
setInstance(this);
|
||||
#ifdef Q_OS_WIN
|
||||
installNativeEventFilter(&MyNativeEventFilter::getInstance());
|
||||
#endif
|
||||
|
@ -603,9 +604,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||
faceshiftTracker->init();
|
||||
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
#ifdef HAVE_DDE
|
||||
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||
ddeTracker->init();
|
||||
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
|
@ -953,6 +956,7 @@ void Application::faceTrackerMuteToggled() {
|
|||
bool isMuted = getSelectedFaceTracker()->isMuted();
|
||||
muteAction->setChecked(isMuted);
|
||||
getSelectedFaceTracker()->setEnabled(!isMuted);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::CalibrateCamera)->setEnabled(!isMuted);
|
||||
}
|
||||
|
||||
void Application::aboutApp() {
|
||||
|
@ -4689,3 +4693,8 @@ void Application::setMaxOctreePacketsPerSecond(int maxOctreePPS) {
|
|||
int Application::getMaxOctreePacketsPerSecond() {
|
||||
return _maxOctreePPS;
|
||||
}
|
||||
|
||||
qreal Application::getDevicePixelRatio() {
|
||||
return _window ? _window->windowHandle()->devicePixelRatio() : 1.0;
|
||||
}
|
||||
|
||||
|
|
|
@ -300,7 +300,7 @@ public:
|
|||
virtual const glm::vec3& getAvatarPosition() const { return _myAvatar->getPosition(); }
|
||||
virtual void overrideEnvironmentData(const EnvironmentData& newData) { _environment.override(newData); }
|
||||
virtual void endOverrideEnvironmentData() { _environment.endOverride(); }
|
||||
|
||||
virtual qreal getDevicePixelRatio();
|
||||
|
||||
NodeBounds& getNodeBoundsDisplay() { return _nodeBoundsDisplay; }
|
||||
|
||||
|
|
|
@ -368,8 +368,12 @@ Menu::Menu() {
|
|||
{
|
||||
QActionGroup* faceTrackerGroup = new QActionGroup(avatarDebugMenu);
|
||||
|
||||
bool defaultNoFaceTracking = true;
|
||||
#ifdef HAVE_DDE
|
||||
defaultNoFaceTracking = false;
|
||||
#endif
|
||||
QAction* noFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::NoFaceTracking,
|
||||
0, true,
|
||||
0, defaultNoFaceTracking,
|
||||
qApp, SLOT(setActiveFaceTracker()));
|
||||
faceTrackerGroup->addAction(noFaceTracker);
|
||||
|
||||
|
@ -381,7 +385,7 @@ Menu::Menu() {
|
|||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
QAction* ddeFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::UseCamera,
|
||||
0, false,
|
||||
0, true,
|
||||
qApp, SLOT(setActiveFaceTracker()));
|
||||
faceTrackerGroup->addAction(ddeFaceTracker);
|
||||
#endif
|
||||
|
@ -391,17 +395,17 @@ Menu::Menu() {
|
|||
QAction* binaryEyelidControl = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::BinaryEyelidControl, 0, true);
|
||||
binaryEyelidControl->setVisible(false);
|
||||
QAction* useAudioForMouth = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::UseAudioForMouth, 0, true);
|
||||
useAudioForMouth->setVisible(false);
|
||||
useAudioForMouth->setVisible(true); // DDE face tracking is on by default
|
||||
QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true);
|
||||
ddeFiltering->setVisible(false);
|
||||
ddeFiltering->setVisible(true); // DDE face tracking is on by default
|
||||
QAction* ddeCalibrate = addActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::CalibrateCamera, 0,
|
||||
DependencyManager::get<DdeFaceTracker>().data(), SLOT(calibrate()));
|
||||
ddeCalibrate->setVisible(false);
|
||||
ddeCalibrate->setVisible(true); // DDE face tracking is on by default
|
||||
#endif
|
||||
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
|
||||
faceTrackingMenu->addSeparator();
|
||||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_F, false,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_F, true, // DDE face tracking is on by default
|
||||
qApp, SLOT(toggleFaceTrackerMute()));
|
||||
#endif
|
||||
|
||||
|
|
|
@ -188,7 +188,8 @@ DdeFaceTracker::DdeFaceTracker(const QHostAddress& host, quint16 serverPort, qui
|
|||
_calibrationValues(),
|
||||
_calibrationBillboard(NULL),
|
||||
_calibrationBillboardID(0),
|
||||
_calibrationMessage(QString())
|
||||
_calibrationMessage(QString()),
|
||||
_isCalibrated(false)
|
||||
{
|
||||
_coefficients.resize(NUM_FACESHIFT_BLENDSHAPES);
|
||||
_blendshapeCoefficients.resize(NUM_FACESHIFT_BLENDSHAPES);
|
||||
|
@ -219,6 +220,7 @@ DdeFaceTracker::~DdeFaceTracker() {
|
|||
void DdeFaceTracker::init() {
|
||||
FaceTracker::init();
|
||||
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::UseCamera) && !_isMuted);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::CalibrateCamera)->setEnabled(!_isMuted);
|
||||
}
|
||||
|
||||
void DdeFaceTracker::setEnabled(bool enabled) {
|
||||
|
@ -345,6 +347,10 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
|
|||
_lastReceiveTimestamp = usecTimestampNow();
|
||||
|
||||
if (buffer.size() > MIN_PACKET_SIZE) {
|
||||
if (!_isCalibrated) {
|
||||
calibrate();
|
||||
}
|
||||
|
||||
bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);
|
||||
|
||||
Packet packet;
|
||||
|
@ -564,15 +570,19 @@ void DdeFaceTracker::setEyeClosingThreshold(float eyeClosingThreshold) {
|
|||
_eyeClosingThreshold.set(eyeClosingThreshold);
|
||||
}
|
||||
|
||||
static const int CALIBRATION_BILLBOARD_WIDTH = 240;
|
||||
static const int CALIBRATION_BILLBOARD_HEIGHT = 180;
|
||||
static const int CALIBRATION_BILLBOARD_TOP_MARGIN = 60;
|
||||
static const int CALIBRATION_BILLBOARD_WIDTH = 300;
|
||||
static const int CALIBRATION_BILLBOARD_HEIGHT = 120;
|
||||
static const int CALIBRATION_BILLBOARD_TOP_MARGIN = 30;
|
||||
static const int CALIBRATION_BILLBOARD_LEFT_MARGIN = 30;
|
||||
static const int CALIBRATION_BILLBOARD_FONT_SIZE = 16;
|
||||
static const float CALIBRATION_BILLBOARD_ALPHA = 0.5f;
|
||||
static QString CALIBRATION_INSTRUCTION_MESSAGE = "Hold still to calibrate";
|
||||
static QString CALIBRATION_INSTRUCTION_MESSAGE = "Hold still to calibrate camera";
|
||||
|
||||
void DdeFaceTracker::calibrate() {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::UseCamera) || _isMuted) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_isCalibrating) {
|
||||
qCDebug(interfaceapp) << "DDE Face Tracker: Calibration started";
|
||||
|
||||
|
@ -629,10 +639,13 @@ void DdeFaceTracker::finishCalibration() {
|
|||
qApp->getOverlays().deleteOverlay(_calibrationBillboardID);
|
||||
_calibrationBillboard = NULL;
|
||||
_isCalibrating = false;
|
||||
_isCalibrated = true;
|
||||
|
||||
for (int i = 0; i < NUM_FACESHIFT_BLENDSHAPES; i++) {
|
||||
_coefficientAverages[i] = _calibrationValues[i] / (float)CALIBRATION_SAMPLES;
|
||||
}
|
||||
|
||||
reset();
|
||||
|
||||
qCDebug(interfaceapp) << "DDE Face Tracker: Calibration finished";
|
||||
}
|
||||
|
|
|
@ -136,6 +136,7 @@ private:
|
|||
TextOverlay* _calibrationBillboard;
|
||||
int _calibrationBillboardID;
|
||||
QString _calibrationMessage;
|
||||
bool _isCalibrated;
|
||||
void addCalibrationDatum();
|
||||
void cancelCalibration();
|
||||
void finishCalibration();
|
||||
|
|
|
@ -79,7 +79,11 @@ WebWindowClass::WebWindowClass(const QString& title, const QString& url, int wid
|
|||
}
|
||||
|
||||
_webView->setPage(new DataWebPage());
|
||||
_webView->setUrl(url);
|
||||
if (!url.startsWith("http") && !url.startsWith("file://")) {
|
||||
_webView->setUrl(QUrl::fromLocalFile(url));
|
||||
} else {
|
||||
_webView->setUrl(url);
|
||||
}
|
||||
|
||||
connect(this, &WebWindowClass::destroyed, _windowWidget, &QWidget::deleteLater);
|
||||
connect(_webView->page()->mainFrame(), &QWebFrame::javaScriptWindowObjectCleared,
|
||||
|
|
|
@ -162,8 +162,7 @@ void RunningScriptsWidget::showEvent(QShowEvent* event) {
|
|||
|
||||
QRect parentGeometry = Application::getInstance()->getDesirableApplicationGeometry();
|
||||
int titleBarHeight = UIUtil::getWindowTitleBarHeight(this);
|
||||
int menuBarHeight = Menu::getInstance()->geometry().height();
|
||||
int topMargin = titleBarHeight + menuBarHeight;
|
||||
int topMargin = titleBarHeight;
|
||||
|
||||
setGeometry(parentGeometry.topLeft().x(), parentGeometry.topLeft().y() + topMargin,
|
||||
size().width(), parentWidget()->height() - topMargin);
|
||||
|
|
|
@ -38,8 +38,7 @@ bool ToolWindow::event(QEvent* event) {
|
|||
QRect mainGeometry = mainWindow->geometry();
|
||||
|
||||
int titleBarHeight = UIUtil::getWindowTitleBarHeight(this);
|
||||
int menuBarHeight = Menu::getInstance()->geometry().height();
|
||||
int topMargin = titleBarHeight + menuBarHeight;
|
||||
int topMargin = titleBarHeight;
|
||||
|
||||
_lastGeometry = QRect(mainGeometry.topLeft().x(), mainGeometry.topLeft().y() + topMargin,
|
||||
DEFAULT_WIDTH, mainGeometry.height() - topMargin);
|
||||
|
|
|
@ -349,22 +349,6 @@ font: bold 16pt;
|
|||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="tipLabel">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Helvetica,Arial,sans-serif</family>
|
||||
<pointsize>14</pointsize>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: #5f5f5f; margin: 2px;</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Tip</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</widget>
|
||||
|
|
|
@ -35,6 +35,7 @@
|
|||
#include "RenderableParticleEffectEntityItem.h"
|
||||
#include "RenderableSphereEntityItem.h"
|
||||
#include "RenderableTextEntityItem.h"
|
||||
#include "RenderableWebEntityItem.h"
|
||||
#include "RenderableZoneEntityItem.h"
|
||||
#include "RenderableLineEntityItem.h"
|
||||
#include "EntitiesRendererLogging.h"
|
||||
|
@ -58,6 +59,7 @@ EntityTreeRenderer::EntityTreeRenderer(bool wantScripts, AbstractViewStateInterf
|
|||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Sphere, RenderableSphereEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Light, RenderableLightEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Text, RenderableTextEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Web, RenderableWebEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(ParticleEffect, RenderableParticleEffectEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Zone, RenderableZoneEntityItem::factory)
|
||||
REGISTER_ENTITY_TYPE_WITH_FACTORY(Line, RenderableLineEntityItem::factory)
|
||||
|
|
147
libraries/entities-renderer/src/RenderableWebEntityItem.cpp
Normal file
147
libraries/entities-renderer/src/RenderableWebEntityItem.cpp
Normal file
|
@ -0,0 +1,147 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "RenderableWebEntityItem.h"
|
||||
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <gpu/GPUConfig.h>
|
||||
|
||||
#include <DeferredLightingEffect.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <PerfStat.h>
|
||||
#include <TextRenderer.h>
|
||||
#include <OffscreenQmlSurface.h>
|
||||
#include <AbstractViewStateInterface.h>
|
||||
#include <GLMHelpers.h>
|
||||
#include <PathUtils.h>
|
||||
#include <TextureCache.h>
|
||||
#include <gpu/GLBackend.h>
|
||||
|
||||
const int FIXED_FONT_POINT_SIZE = 40;
|
||||
const float DPI = 30.47;
|
||||
const float METERS_TO_INCHES = 39.3701;
|
||||
|
||||
EntityItem* RenderableWebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||
return new RenderableWebEntityItem(entityID, properties);
|
||||
}
|
||||
|
||||
RenderableWebEntityItem::RenderableWebEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
WebEntityItem(entityItemID, properties) {
|
||||
qDebug() << "Created web entity " << getID();
|
||||
}
|
||||
|
||||
RenderableWebEntityItem::~RenderableWebEntityItem() {
|
||||
if (_webSurface) {
|
||||
_webSurface->pause();
|
||||
_webSurface->disconnect(_connection);
|
||||
// After the disconnect, ensure that we have the latest texture by acquiring the
|
||||
// lock used when updating the _texture value
|
||||
_textureLock.lock();
|
||||
_textureLock.unlock();
|
||||
// The lifetime of the QML surface MUST be managed by the main thread
|
||||
// Additionally, we MUST use local variables copied by value, rather than
|
||||
// member variables, since they would implicitly refer to a this that
|
||||
// is no longer valid
|
||||
auto webSurface = _webSurface;
|
||||
auto texture = _texture;
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([webSurface, texture] {
|
||||
if (texture) {
|
||||
webSurface->releaseTexture(texture);
|
||||
}
|
||||
webSurface->deleteLater();
|
||||
});
|
||||
}
|
||||
qDebug() << "Destroyed web entity " << getID();
|
||||
}
|
||||
|
||||
void RenderableWebEntityItem::render(RenderArgs* args) {
|
||||
QOpenGLContext * currentContext = QOpenGLContext::currentContext();
|
||||
QSurface * currentSurface = currentContext->surface();
|
||||
if (!_webSurface) {
|
||||
_webSurface = new OffscreenQmlSurface();
|
||||
_webSurface->create(currentContext);
|
||||
_webSurface->setBaseUrl(QUrl::fromLocalFile(PathUtils::resourcesPath() + "/qml/"));
|
||||
_webSurface->load("WebEntity.qml");
|
||||
_webSurface->resume();
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
_connection = QObject::connect(_webSurface, &OffscreenQmlSurface::textureUpdated, [&](GLuint textureId) {
|
||||
_webSurface->lockTexture(textureId);
|
||||
assert(!glGetError());
|
||||
// TODO change to atomic<GLuint>?
|
||||
withLock(_textureLock, [&] {
|
||||
std::swap(_texture, textureId);
|
||||
});
|
||||
if (textureId) {
|
||||
_webSurface->releaseTexture(textureId);
|
||||
}
|
||||
if (_texture) {
|
||||
_webSurface->makeCurrent();
|
||||
glBindTexture(GL_TEXTURE_2D, _texture);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
_webSurface->doneCurrent();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
glm::vec2 dims = glm::vec2(_dimensions);
|
||||
dims *= METERS_TO_INCHES * DPI;
|
||||
// The offscreen surface is idempotent for resizes (bails early
|
||||
// if it's a no-op), so it's safe to just call resize every frame
|
||||
// without worrying about excessive overhead.
|
||||
_webSurface->resize(QSize(dims.x, dims.y));
|
||||
currentContext->makeCurrent(currentSurface);
|
||||
|
||||
PerformanceTimer perfTimer("RenderableWebEntityItem::render");
|
||||
assert(getType() == EntityTypes::Web);
|
||||
glm::vec3 position = getPosition();
|
||||
glm::vec3 dimensions = getDimensions();
|
||||
glm::vec3 halfDimensions = dimensions / 2.0f;
|
||||
glm::quat rotation = getRotation();
|
||||
|
||||
//qCDebug(entitytree) << "RenderableWebEntityItem::render() id:" << getEntityItemID() << "text:" << getText();
|
||||
|
||||
glPushMatrix();
|
||||
{
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
glm::vec3 axis = glm::axis(rotation);
|
||||
glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
|
||||
|
||||
float alpha = 1.0f;
|
||||
static const glm::vec2 texMin(0);
|
||||
static const glm::vec2 texMax(1);
|
||||
glm::vec2 topLeft(-halfDimensions.x, -halfDimensions.y);
|
||||
glm::vec2 bottomRight(halfDimensions.x, halfDimensions.y);
|
||||
if (_texture) {
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
glBindTexture(GL_TEXTURE_2D, _texture);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
}
|
||||
DependencyManager::get<GeometryCache>()->renderQuad(
|
||||
topLeft, bottomRight, texMin, texMax, glm::vec4(1));
|
||||
if (_texture) {
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
}
|
||||
}
|
||||
glPopMatrix();
|
||||
}
|
||||
|
||||
void RenderableWebEntityItem::setSourceUrl(const QString& value) {
|
||||
if (_sourceUrl != value) {
|
||||
_sourceUrl = value;
|
||||
if (_webSurface) {
|
||||
AbstractViewStateInterface::instance()->postLambdaEvent([this] {
|
||||
_webSurface->getRootItem()->setProperty("url", _sourceUrl);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
36
libraries/entities-renderer/src/RenderableWebEntityItem.h
Normal file
36
libraries/entities-renderer/src/RenderableWebEntityItem.h
Normal file
|
@ -0,0 +1,36 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_RenderableWebEntityItem_h
|
||||
#define hifi_RenderableWebEntityItem_h
|
||||
|
||||
#include <QSharedPointer>
|
||||
|
||||
#include <WebEntityItem.h>
|
||||
|
||||
class OffscreenQmlSurface;
|
||||
|
||||
class RenderableWebEntityItem : public WebEntityItem {
|
||||
public:
|
||||
static EntityItem* factory(const EntityItemID& entityID, const EntityItemProperties& properties);
|
||||
|
||||
RenderableWebEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
|
||||
~RenderableWebEntityItem();
|
||||
|
||||
virtual void render(RenderArgs* args);
|
||||
virtual void setSourceUrl(const QString& value);
|
||||
|
||||
private:
|
||||
OffscreenQmlSurface* _webSurface{ nullptr };
|
||||
QMetaObject::Connection _connection;
|
||||
uint32_t _texture{ 0 };
|
||||
QMutex _textureLock;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_RenderableWebEntityItem_h
|
|
@ -86,6 +86,7 @@ EntityItemProperties::EntityItemProperties() :
|
|||
CONSTRUCT_PROPERTY(keyLightDirection, ZoneEntityItem::DEFAULT_KEYLIGHT_DIRECTION),
|
||||
CONSTRUCT_PROPERTY(name, ENTITY_ITEM_DEFAULT_NAME),
|
||||
CONSTRUCT_PROPERTY(backgroundMode, BACKGROUND_MODE_INHERIT),
|
||||
CONSTRUCT_PROPERTY(sourceUrl, ""),
|
||||
|
||||
_id(UNKNOWN_ENTITY_ID),
|
||||
_idSet(false),
|
||||
|
@ -328,6 +329,7 @@ EntityPropertyFlags EntityItemProperties::getChangedProperties() const {
|
|||
CHECK_PROPERTY_CHANGE(PROP_KEYLIGHT_AMBIENT_INTENSITY, keyLightAmbientIntensity);
|
||||
CHECK_PROPERTY_CHANGE(PROP_KEYLIGHT_DIRECTION, keyLightDirection);
|
||||
CHECK_PROPERTY_CHANGE(PROP_BACKGROUND_MODE, backgroundMode);
|
||||
CHECK_PROPERTY_CHANGE(PROP_SOURCE_URL, sourceUrl);
|
||||
|
||||
changedProperties += _stage.getChangedProperties();
|
||||
changedProperties += _atmosphere.getChangedProperties();
|
||||
|
@ -409,6 +411,7 @@ QScriptValue EntityItemProperties::copyToScriptValue(QScriptEngine* engine, bool
|
|||
COPY_PROPERTY_TO_QSCRIPTVALUE(keyLightAmbientIntensity);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE_VEC3(keyLightDirection);
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE_GETTER(backgroundMode, getBackgroundModeAsString());
|
||||
COPY_PROPERTY_TO_QSCRIPTVALUE(sourceUrl);
|
||||
|
||||
// Sitting properties support
|
||||
if (!skipDefaults) {
|
||||
|
@ -510,6 +513,7 @@ void EntityItemProperties::copyFromScriptValue(const QScriptValue& object) {
|
|||
COPY_PROPERTY_FROM_QSCRIPTVALUE_FLOAT(keyLightAmbientIntensity, setKeyLightAmbientIntensity);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE_VEC3(keyLightDirection, setKeyLightDirection);
|
||||
COPY_PROPERTY_FROM_QSCRITPTVALUE_ENUM(backgroundMode, BackgroundMode);
|
||||
COPY_PROPERTY_FROM_QSCRIPTVALUE_STRING(sourceUrl, setSourceUrl);
|
||||
|
||||
_stage.copyFromScriptValue(object, _defaultSettings);
|
||||
_atmosphere.copyFromScriptValue(object, _defaultSettings);
|
||||
|
@ -666,7 +670,11 @@ bool EntityItemProperties::encodeEntityEditPacket(PacketType command, EntityItem
|
|||
APPEND_ENTITY_PROPERTY(PROP_LOCKED, appendValue, properties.getLocked());
|
||||
APPEND_ENTITY_PROPERTY(PROP_USER_DATA, appendValue, properties.getUserData());
|
||||
APPEND_ENTITY_PROPERTY(PROP_SIMULATOR_ID, appendValue, properties.getSimulatorID());
|
||||
|
||||
|
||||
if (properties.getType() == EntityTypes::Web) {
|
||||
APPEND_ENTITY_PROPERTY(PROP_SOURCE_URL, appendValue, properties.getSourceUrl());
|
||||
}
|
||||
|
||||
if (properties.getType() == EntityTypes::Text) {
|
||||
APPEND_ENTITY_PROPERTY(PROP_TEXT, appendValue, properties.getText());
|
||||
APPEND_ENTITY_PROPERTY(PROP_LINE_HEIGHT, appendValue, properties.getLineHeight());
|
||||
|
@ -922,6 +930,10 @@ bool EntityItemProperties::decodeEntityEditPacket(const unsigned char* data, int
|
|||
READ_ENTITY_PROPERTY_STRING_TO_PROPERTIES(PROP_USER_DATA, setUserData);
|
||||
READ_ENTITY_PROPERTY_UUID_TO_PROPERTIES(PROP_SIMULATOR_ID, setSimulatorID);
|
||||
|
||||
if (properties.getType() == EntityTypes::Web) {
|
||||
READ_ENTITY_PROPERTY_STRING_TO_PROPERTIES(PROP_SOURCE_URL, setSourceUrl);
|
||||
}
|
||||
|
||||
if (properties.getType() == EntityTypes::Text) {
|
||||
READ_ENTITY_PROPERTY_STRING_TO_PROPERTIES(PROP_TEXT, setText);
|
||||
READ_ENTITY_PROPERTY_TO_PROPERTIES(PROP_LINE_HEIGHT, float, setLineHeight);
|
||||
|
@ -1073,6 +1085,7 @@ void EntityItemProperties::markAllChanged() {
|
|||
_atmosphere.markAllChanged();
|
||||
_skybox.markAllChanged();
|
||||
|
||||
_sourceUrlChanged = true;
|
||||
}
|
||||
|
||||
/// The maximum bounding cube for the entity, independent of it's rotation.
|
||||
|
|
|
@ -52,6 +52,7 @@ class EntityItemProperties {
|
|||
friend class TextEntityItem; // TODO: consider removing this friend relationship and use public methods
|
||||
friend class ParticleEffectEntityItem; // TODO: consider removing this friend relationship and use public methods
|
||||
friend class ZoneEntityItem; // TODO: consider removing this friend relationship and use public methods
|
||||
friend class WebEntityItem; // TODO: consider removing this friend relationship and use public methods
|
||||
friend class LineEntityItem; // TODO: consider removing this friend relationship and use public methods
|
||||
public:
|
||||
EntityItemProperties();
|
||||
|
@ -139,6 +140,7 @@ public:
|
|||
DEFINE_PROPERTY_GROUP(Stage, stage, StagePropertyGroup);
|
||||
DEFINE_PROPERTY_GROUP(Atmosphere, atmosphere, AtmospherePropertyGroup);
|
||||
DEFINE_PROPERTY_GROUP(Skybox, skybox, SkyboxPropertyGroup);
|
||||
DEFINE_PROPERTY_REF(PROP_SOURCE_URL, SourceUrl, sourceUrl, QString);
|
||||
|
||||
static QString getBackgroundModeString(BackgroundMode mode);
|
||||
|
||||
|
|
|
@ -148,6 +148,10 @@ enum EntityPropertyList {
|
|||
PROP_SKYBOX_URL = PROP_ANIMATION_FPS,
|
||||
PROP_STAGE_AUTOMATIC_HOURDAY = PROP_ANIMATION_FRAME_INDEX,
|
||||
|
||||
// Aliases/Piggyback properties for Web. These properties intentionally reuse the enum values for
|
||||
// other properties which will never overlap with each other.
|
||||
PROP_SOURCE_URL = PROP_MODEL_URL,
|
||||
|
||||
// WARNING!!! DO NOT ADD PROPS_xxx here unless you really really meant to.... Add them UP above
|
||||
};
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include "ParticleEffectEntityItem.h"
|
||||
#include "SphereEntityItem.h"
|
||||
#include "TextEntityItem.h"
|
||||
#include "WebEntityItem.h"
|
||||
#include "ZoneEntityItem.h"
|
||||
#include "LineEntityItem.h"
|
||||
|
||||
|
@ -37,6 +38,7 @@ const QString ENTITY_TYPE_NAME_UNKNOWN = "Unknown";
|
|||
// Register Entity the default implementations of entity types here...
|
||||
REGISTER_ENTITY_TYPE(Model)
|
||||
REGISTER_ENTITY_TYPE(Box)
|
||||
REGISTER_ENTITY_TYPE(Web)
|
||||
REGISTER_ENTITY_TYPE(Sphere)
|
||||
REGISTER_ENTITY_TYPE(Light)
|
||||
REGISTER_ENTITY_TYPE(Text)
|
||||
|
|
|
@ -37,6 +37,7 @@ public:
|
|||
Text,
|
||||
ParticleEffect,
|
||||
Zone,
|
||||
Web,
|
||||
Line,
|
||||
LAST = Line
|
||||
} EntityType;
|
||||
|
|
153
libraries/entities/src/WebEntityItem.cpp
Normal file
153
libraries/entities/src/WebEntityItem.cpp
Normal file
|
@ -0,0 +1,153 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "WebEntityItem.h"
|
||||
|
||||
#include <glm/gtx/transform.hpp>
|
||||
|
||||
#include <QDebug>
|
||||
|
||||
#include <ByteCountCoding.h>
|
||||
#include <PlaneShape.h>
|
||||
|
||||
#include "EntityTree.h"
|
||||
#include "EntityTreeElement.h"
|
||||
#include "EntitiesLogging.h"
|
||||
|
||||
|
||||
const QString WebEntityItem::DEFAULT_SOURCE_URL("http://www.google.com");
|
||||
|
||||
EntityItem* WebEntityItem::factory(const EntityItemID& entityID, const EntityItemProperties& properties) {
|
||||
EntityItem* result = new WebEntityItem(entityID, properties);
|
||||
return result;
|
||||
}
|
||||
|
||||
WebEntityItem::WebEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties) :
|
||||
EntityItem(entityItemID)
|
||||
{
|
||||
_type = EntityTypes::Web;
|
||||
_created = properties.getCreated();
|
||||
setProperties(properties);
|
||||
}
|
||||
|
||||
const float WEB_ENTITY_ITEM_FIXED_DEPTH = 0.01f;
|
||||
|
||||
void WebEntityItem::setDimensions(const glm::vec3& value) {
|
||||
// NOTE: Web Entities always have a "depth" of 1cm.
|
||||
_dimensions = glm::vec3(value.x, value.y, WEB_ENTITY_ITEM_FIXED_DEPTH);
|
||||
}
|
||||
|
||||
EntityItemProperties WebEntityItem::getProperties() const {
|
||||
EntityItemProperties properties = EntityItem::getProperties(); // get the properties from our base class
|
||||
COPY_ENTITY_PROPERTY_TO_PROPERTIES(sourceUrl, getSourceUrl);
|
||||
return properties;
|
||||
}
|
||||
|
||||
bool WebEntityItem::setProperties(const EntityItemProperties& properties) {
|
||||
bool somethingChanged = false;
|
||||
somethingChanged = EntityItem::setProperties(properties); // set the properties in our base class
|
||||
|
||||
SET_ENTITY_PROPERTY_FROM_PROPERTIES(sourceUrl, setSourceUrl);
|
||||
|
||||
if (somethingChanged) {
|
||||
bool wantDebug = false;
|
||||
if (wantDebug) {
|
||||
uint64_t now = usecTimestampNow();
|
||||
int elapsed = now - getLastEdited();
|
||||
qCDebug(entities) << "WebEntityItem::setProperties() AFTER update... edited AGO=" << elapsed <<
|
||||
"now=" << now << " getLastEdited()=" << getLastEdited();
|
||||
}
|
||||
setLastEdited(properties._lastEdited);
|
||||
}
|
||||
|
||||
return somethingChanged;
|
||||
}
|
||||
|
||||
int WebEntityItem::readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
|
||||
ReadBitstreamToTreeParams& args,
|
||||
EntityPropertyFlags& propertyFlags, bool overwriteLocalData) {
|
||||
|
||||
int bytesRead = 0;
|
||||
const unsigned char* dataAt = data;
|
||||
|
||||
READ_ENTITY_PROPERTY_STRING(PROP_SOURCE_URL, setSourceUrl);
|
||||
|
||||
return bytesRead;
|
||||
}
|
||||
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
EntityPropertyFlags WebEntityItem::getEntityProperties(EncodeBitstreamParams& params) const {
|
||||
EntityPropertyFlags requestedProperties = EntityItem::getEntityProperties(params);
|
||||
requestedProperties += PROP_SOURCE_URL;
|
||||
return requestedProperties;
|
||||
}
|
||||
|
||||
void WebEntityItem::appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
EntityTreeElementExtraEncodeData* modelTreeElementExtraEncodeData,
|
||||
EntityPropertyFlags& requestedProperties,
|
||||
EntityPropertyFlags& propertyFlags,
|
||||
EntityPropertyFlags& propertiesDidntFit,
|
||||
int& propertyCount,
|
||||
OctreeElement::AppendState& appendState) const {
|
||||
|
||||
bool successPropertyFits = true;
|
||||
APPEND_ENTITY_PROPERTY(PROP_SOURCE_URL, appendValue, _sourceUrl);
|
||||
}
|
||||
|
||||
|
||||
bool WebEntityItem::findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
|
||||
void** intersectedObject, bool precisionPicking) const {
|
||||
|
||||
RayIntersectionInfo rayInfo;
|
||||
rayInfo._rayStart = origin;
|
||||
rayInfo._rayDirection = direction;
|
||||
rayInfo._rayLength = std::numeric_limits<float>::max();
|
||||
|
||||
PlaneShape plane;
|
||||
|
||||
const glm::vec3 UNROTATED_NORMAL(0.0f, 0.0f, -1.0f);
|
||||
glm::vec3 normal = _rotation * UNROTATED_NORMAL;
|
||||
plane.setNormal(normal);
|
||||
plane.setPoint(getPosition()); // the position is definitely a point on our plane
|
||||
|
||||
bool intersects = plane.findRayIntersection(rayInfo);
|
||||
|
||||
if (intersects) {
|
||||
glm::vec3 hitAt = origin + (direction * rayInfo._hitDistance);
|
||||
// now we know the point the ray hit our plane
|
||||
|
||||
glm::mat4 rotation = glm::mat4_cast(getRotation());
|
||||
glm::mat4 translation = glm::translate(getPosition());
|
||||
glm::mat4 entityToWorldMatrix = translation * rotation;
|
||||
glm::mat4 worldToEntityMatrix = glm::inverse(entityToWorldMatrix);
|
||||
|
||||
glm::vec3 dimensions = getDimensions();
|
||||
glm::vec3 registrationPoint = getRegistrationPoint();
|
||||
glm::vec3 corner = -(dimensions * registrationPoint);
|
||||
AABox entityFrameBox(corner, dimensions);
|
||||
|
||||
glm::vec3 entityFrameHitAt = glm::vec3(worldToEntityMatrix * glm::vec4(hitAt, 1.0f));
|
||||
|
||||
intersects = entityFrameBox.contains(entityFrameHitAt);
|
||||
}
|
||||
|
||||
if (intersects) {
|
||||
distance = rayInfo._hitDistance;
|
||||
}
|
||||
return intersects;
|
||||
}
|
||||
|
||||
void WebEntityItem::setSourceUrl(const QString& value) {
|
||||
if (_sourceUrl != value) {
|
||||
_sourceUrl = value;
|
||||
}
|
||||
}
|
||||
|
||||
const QString& WebEntityItem::getSourceUrl() const { return _sourceUrl; }
|
59
libraries/entities/src/WebEntityItem.h
Normal file
59
libraries/entities/src/WebEntityItem.h
Normal file
|
@ -0,0 +1,59 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015/05/12
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_WebEntityItem_h
|
||||
#define hifi_WebEntityItem_h
|
||||
|
||||
#include "EntityItem.h"
|
||||
|
||||
class WebEntityItem : public EntityItem {
|
||||
public:
|
||||
static const QString DEFAULT_SOURCE_URL;
|
||||
|
||||
static EntityItem* factory(const EntityItemID& entityID, const EntityItemProperties& properties);
|
||||
|
||||
WebEntityItem(const EntityItemID& entityItemID, const EntityItemProperties& properties);
|
||||
|
||||
ALLOW_INSTANTIATION // This class can be instantiated
|
||||
|
||||
/// set dimensions in domain scale units (0.0 - 1.0) this will also reset radius appropriately
|
||||
virtual void setDimensions(const glm::vec3& value);
|
||||
virtual ShapeType getShapeType() const { return SHAPE_TYPE_BOX; }
|
||||
|
||||
// methods for getting/setting all properties of an entity
|
||||
virtual EntityItemProperties getProperties() const;
|
||||
virtual bool setProperties(const EntityItemProperties& properties);
|
||||
|
||||
// TODO: eventually only include properties changed since the params.lastViewFrustumSent time
|
||||
virtual EntityPropertyFlags getEntityProperties(EncodeBitstreamParams& params) const;
|
||||
|
||||
virtual void appendSubclassData(OctreePacketData* packetData, EncodeBitstreamParams& params,
|
||||
EntityTreeElementExtraEncodeData* modelTreeElementExtraEncodeData,
|
||||
EntityPropertyFlags& requestedProperties,
|
||||
EntityPropertyFlags& propertyFlags,
|
||||
EntityPropertyFlags& propertiesDidntFit,
|
||||
int& propertyCount,
|
||||
OctreeElement::AppendState& appendState) const;
|
||||
|
||||
virtual int readEntitySubclassDataFromBuffer(const unsigned char* data, int bytesLeftToRead,
|
||||
ReadBitstreamToTreeParams& args,
|
||||
EntityPropertyFlags& propertyFlags, bool overwriteLocalData);
|
||||
|
||||
virtual bool supportsDetailedRayIntersection() const { return true; }
|
||||
virtual bool findDetailedRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
bool& keepSearching, OctreeElement*& element, float& distance, BoxFace& face,
|
||||
void** intersectedObject, bool precisionPicking) const;
|
||||
|
||||
virtual void setSourceUrl(const QString& value);
|
||||
const QString& getSourceUrl() const;
|
||||
|
||||
protected:
|
||||
QString _sourceUrl;
|
||||
};
|
||||
|
||||
#endif // hifi_WebEntityItem_h
|
|
@ -23,7 +23,7 @@ ThreadedAssignment::ThreadedAssignment(const QByteArray& packet) :
|
|||
_isFinished(false),
|
||||
_datagramProcessingThread(NULL)
|
||||
{
|
||||
|
||||
|
||||
}
|
||||
|
||||
void ThreadedAssignment::setFinished(bool isFinished) {
|
||||
|
@ -41,7 +41,7 @@ void ThreadedAssignment::setFinished(bool isFinished) {
|
|||
if (_statsTimer) {
|
||||
_statsTimer->stop();
|
||||
}
|
||||
|
||||
|
||||
// stop processing datagrams from the node socket
|
||||
// this ensures we won't process a domain list while we are going down
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -52,20 +52,21 @@ void ThreadedAssignment::setFinished(bool isFinished) {
|
|||
|
||||
// if we have a datagram processing thread, quit it and wait on it to make sure that
|
||||
// the node socket is back on the same thread as the NodeList
|
||||
|
||||
|
||||
|
||||
if (_datagramProcessingThread) {
|
||||
// tell the datagram processing thread to quit and wait until it is done,
|
||||
// tell the datagram processing thread to quit and wait until it is done,
|
||||
// then return the node socket to the NodeList
|
||||
_datagramProcessingThread->quit();
|
||||
_datagramProcessingThread->wait();
|
||||
|
||||
|
||||
// set node socket parent back to NodeList
|
||||
nodeList->getNodeSocket().setParent(nodeList.data());
|
||||
}
|
||||
|
||||
|
||||
// move the NodeList back to the QCoreApplication instance's thread
|
||||
nodeList->moveToThread(QCoreApplication::instance()->thread());
|
||||
|
||||
|
||||
emit finished();
|
||||
}
|
||||
}
|
||||
|
@ -74,17 +75,17 @@ void ThreadedAssignment::setFinished(bool isFinished) {
|
|||
void ThreadedAssignment::commonInit(const QString& targetName, NodeType_t nodeType, bool shouldSendStats) {
|
||||
// change the logging target name while the assignment is running
|
||||
LogHandler::getInstance().setTargetName(targetName);
|
||||
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->setOwnerType(nodeType);
|
||||
|
||||
|
||||
// this is a temp fix for Qt 5.3 - rebinding the node socket gives us readyRead for the socket on this thread
|
||||
nodeList->rebindNodeSocket();
|
||||
|
||||
|
||||
_domainServerTimer = new QTimer();
|
||||
connect(_domainServerTimer, SIGNAL(timeout()), this, SLOT(checkInWithDomainServerOrExit()));
|
||||
_domainServerTimer->start(DOMAIN_SERVER_CHECK_IN_MSECS);
|
||||
|
||||
|
||||
if (shouldSendStats) {
|
||||
// send a stats packet every 1 second
|
||||
_statsTimer = new QTimer();
|
||||
|
@ -95,15 +96,15 @@ void ThreadedAssignment::commonInit(const QString& targetName, NodeType_t nodeTy
|
|||
|
||||
void ThreadedAssignment::addPacketStatsAndSendStatsPacket(QJsonObject &statsObject) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
float packetsPerSecond, bytesPerSecond;
|
||||
// XXX can BandwidthRecorder be used for this?
|
||||
nodeList->getPacketStats(packetsPerSecond, bytesPerSecond);
|
||||
nodeList->resetPacketStats();
|
||||
|
||||
|
||||
statsObject["packets_per_second"] = packetsPerSecond;
|
||||
statsObject["bytes_per_second"] = bytesPerSecond;
|
||||
|
||||
|
||||
nodeList->sendStatsToDomainServer(statsObject);
|
||||
}
|
||||
|
||||
|
@ -122,7 +123,7 @@ void ThreadedAssignment::checkInWithDomainServerOrExit() {
|
|||
|
||||
bool ThreadedAssignment::readAvailableDatagram(QByteArray& destinationByteArray, HifiSockAddr& senderSockAddr) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
||||
if (nodeList->getNodeSocket().hasPendingDatagrams()) {
|
||||
destinationByteArray.resize(nodeList->getNodeSocket().pendingDatagramSize());
|
||||
nodeList->getNodeSocket().readDatagram(destinationByteArray.data(), destinationByteArray.size(),
|
||||
|
|
|
@ -20,8 +20,8 @@ class ThreadedAssignment : public Assignment {
|
|||
Q_OBJECT
|
||||
public:
|
||||
ThreadedAssignment(const QByteArray& packet);
|
||||
~ThreadedAssignment() { stop(); }
|
||||
|
||||
~ThreadedAssignment() { stop(); }
|
||||
|
||||
void setFinished(bool isFinished);
|
||||
virtual void aboutToFinish() { };
|
||||
void addPacketStatsAndSendStatsPacket(QJsonObject& statsObject);
|
||||
|
@ -35,7 +35,7 @@ public slots:
|
|||
|
||||
signals:
|
||||
void finished();
|
||||
|
||||
|
||||
protected:
|
||||
bool readAvailableDatagram(QByteArray& destinationByteArray, HifiSockAddr& senderSockAddr);
|
||||
void commonInit(const QString& targetName, NodeType_t nodeType, bool shouldSendStats = true);
|
||||
|
@ -43,7 +43,7 @@ protected:
|
|||
QThread* _datagramProcessingThread;
|
||||
QTimer* _domainServerTimer = nullptr;
|
||||
QTimer* _statsTimer = nullptr;
|
||||
|
||||
|
||||
private slots:
|
||||
void checkInWithDomainServerOrExit();
|
||||
|
||||
|
|
|
@ -15,6 +15,36 @@
|
|||
#include "BulletUtil.h"
|
||||
|
||||
|
||||
// find the average point on a convex shape
|
||||
glm::vec3 findCenter(const QVector<glm::vec3>& points) {
|
||||
glm::vec3 result = glm::vec3(0);
|
||||
for (int i = 0; i < points.size(); i++) {
|
||||
result += points[i];
|
||||
}
|
||||
return result * (1.0f / points.size());
|
||||
}
|
||||
|
||||
|
||||
// bullet puts "margins" around all the collision shapes. This can cause shapes will hulls
|
||||
// to float a bit above what they are sitting on, etc. One option is to call:
|
||||
//
|
||||
// compound->setMargin(0.01);
|
||||
//
|
||||
// to reduce the size of the margin, but this has some consequences for the
|
||||
// performance and stability of the simulation. Instead, we clench in all the points of
|
||||
// the hull by the margin. These clenched points + bullets margin will but the actual
|
||||
// collision hull fairly close to the visual edge of the object.
|
||||
QVector<glm::vec3> shrinkByMargin(const QVector<glm::vec3>& points, const glm::vec3 center, float margin) {
|
||||
QVector<glm::vec3> result(points.size());
|
||||
for (int i = 0; i < points.size(); i++) {
|
||||
glm::vec3 pVec = points[ i ] - center;
|
||||
glm::vec3 pVecNorm = glm::normalize(pVec);
|
||||
result[ i ] = center + pVec - (pVecNorm * margin);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
btCollisionShape* ShapeInfoUtil::createShapeFromInfo(const ShapeInfo& info) {
|
||||
btCollisionShape* shape = NULL;
|
||||
switch(info.getType()) {
|
||||
|
@ -40,7 +70,9 @@ btCollisionShape* ShapeInfoUtil::createShapeFromInfo(const ShapeInfo& info) {
|
|||
if (numSubShapes == 1) {
|
||||
auto hull = new btConvexHullShape();
|
||||
const QVector<QVector<glm::vec3>>& points = info.getPoints();
|
||||
foreach (glm::vec3 point, points[0]) {
|
||||
glm::vec3 center = findCenter(points[0]);
|
||||
QVector<glm::vec3> shrunken = shrinkByMargin(points[0], center, hull->getMargin());
|
||||
foreach (glm::vec3 point, shrunken) {
|
||||
btVector3 btPoint(point[0], point[1], point[2]);
|
||||
hull->addPoint(btPoint, false);
|
||||
}
|
||||
|
@ -53,7 +85,9 @@ btCollisionShape* ShapeInfoUtil::createShapeFromInfo(const ShapeInfo& info) {
|
|||
trans.setIdentity();
|
||||
foreach (QVector<glm::vec3> hullPoints, points) {
|
||||
auto hull = new btConvexHullShape();
|
||||
foreach (glm::vec3 point, hullPoints) {
|
||||
glm::vec3 center = findCenter(points[0]);
|
||||
QVector<glm::vec3> shrunken = shrinkByMargin(hullPoints, center, hull->getMargin());
|
||||
foreach (glm::vec3 point, shrunken) {
|
||||
btVector3 btPoint(point[0], point[1], point[2]);
|
||||
hull->addPoint(btPoint, false);
|
||||
}
|
||||
|
|
20
libraries/render-utils/src/AbstractViewStateInterface.cpp
Normal file
20
libraries/render-utils/src/AbstractViewStateInterface.cpp
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis 2015/05/13
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AbstractViewStateInterface.h"
|
||||
|
||||
static AbstractViewStateInterface* INSTANCE{nullptr};
|
||||
|
||||
AbstractViewStateInterface* AbstractViewStateInterface::instance() {
|
||||
return INSTANCE;
|
||||
}
|
||||
|
||||
void AbstractViewStateInterface::setInstance(AbstractViewStateInterface* instance) {
|
||||
INSTANCE = instance;
|
||||
}
|
||||
|
|
@ -13,6 +13,9 @@
|
|||
#define hifi_AbstractViewStateInterface_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <functional>
|
||||
|
||||
#include <QtGlobal>
|
||||
|
||||
class Transform;
|
||||
class QThread;
|
||||
|
@ -37,7 +40,7 @@ public:
|
|||
/// overrides environment data
|
||||
virtual void overrideEnvironmentData(const EnvironmentData& newData) = 0;
|
||||
virtual void endOverrideEnvironmentData() = 0;
|
||||
|
||||
|
||||
/// gets the shadow view frustum for rendering the view state
|
||||
virtual ViewFrustum* getShadowViewFrustum() = 0;
|
||||
|
||||
|
@ -53,6 +56,12 @@ public:
|
|||
virtual PickRay computePickRay(float x, float y) const = 0;
|
||||
|
||||
virtual const glm::vec3& getAvatarPosition() const = 0;
|
||||
|
||||
virtual void postLambdaEvent(std::function<void()> f) = 0;
|
||||
virtual qreal getDevicePixelRatio() = 0;
|
||||
|
||||
static AbstractViewStateInterface* instance();
|
||||
static void setInstance(AbstractViewStateInterface* instance);
|
||||
};
|
||||
|
||||
|
||||
|
|
376
libraries/render-utils/src/OffscreenQmlSurface.cpp
Normal file
376
libraries/render-utils/src/OffscreenQmlSurface.cpp
Normal file
|
@ -0,0 +1,376 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015-05-13
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include "OffscreenQmlSurface.h"
|
||||
|
||||
#include <QOpenGLFramebufferObject>
|
||||
#include <QOpenGLDebugLogger>
|
||||
#include <QGLWidget>
|
||||
#include <QtQml>
|
||||
#include "AbstractViewStateInterface.h"
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
|
||||
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
|
||||
|
||||
// Time between receiving a request to render the offscreen UI actually triggering
|
||||
// the render. Could possibly be increased depending on the framerate we expect to
|
||||
// achieve.
|
||||
static const int SMALL_INTERVAL = 5;
|
||||
|
||||
OffscreenQmlSurface::OffscreenQmlSurface() {
|
||||
}
|
||||
|
||||
OffscreenQmlSurface::~OffscreenQmlSurface() {
|
||||
// Make sure the context is current while doing cleanup. Note that we use the
|
||||
// offscreen surface here because passing 'this' at this point is not safe: the
|
||||
// underlying platform window may already be destroyed. To avoid all the trouble, use
|
||||
// another surface that is valid for sure.
|
||||
makeCurrent();
|
||||
|
||||
// Delete the render control first since it will free the scenegraph resources.
|
||||
// Destroy the QQuickWindow only afterwards.
|
||||
delete _renderControl;
|
||||
|
||||
delete _qmlComponent;
|
||||
delete _quickWindow;
|
||||
delete _qmlEngine;
|
||||
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::create(QOpenGLContext* shareContext) {
|
||||
OffscreenGlCanvas::create(shareContext);
|
||||
|
||||
makeCurrent();
|
||||
|
||||
// Create a QQuickWindow that is associated with out render control. Note that this
|
||||
// window never gets created or shown, meaning that it will never get an underlying
|
||||
// native (platform) window.
|
||||
QQuickWindow::setDefaultAlphaBuffer(true);
|
||||
_quickWindow = new QQuickWindow(_renderControl);
|
||||
_quickWindow->setColor(QColor(255, 255, 255, 0));
|
||||
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
|
||||
// Create a QML engine.
|
||||
_qmlEngine = new QQmlEngine;
|
||||
if (!_qmlEngine->incubationController()) {
|
||||
_qmlEngine->setIncubationController(_quickWindow->incubationController());
|
||||
}
|
||||
|
||||
// When Quick says there is a need to render, we will not render immediately. Instead,
|
||||
// a timer with a small interval is used to get better performance.
|
||||
_updateTimer.setSingleShot(true);
|
||||
_updateTimer.setInterval(SMALL_INTERVAL);
|
||||
connect(&_updateTimer, &QTimer::timeout, this, &OffscreenQmlSurface::updateQuick);
|
||||
|
||||
// Now hook up the signals. For simplicy we don't differentiate between
|
||||
// renderRequested (only render is needed, no sync) and sceneChanged (polish and sync
|
||||
// is needed too).
|
||||
connect(_renderControl, &QQuickRenderControl::renderRequested, this, &OffscreenQmlSurface::requestRender);
|
||||
connect(_renderControl, &QQuickRenderControl::sceneChanged, this, &OffscreenQmlSurface::requestUpdate);
|
||||
|
||||
#ifdef DEBUG
|
||||
connect(_quickWindow, &QQuickWindow::focusObjectChanged, [this]{
|
||||
qCDebug(offscreenFocus) << "New focus item " << _quickWindow->focusObject();
|
||||
});
|
||||
connect(_quickWindow, &QQuickWindow::activeFocusItemChanged, [this] {
|
||||
qCDebug(offscreenFocus) << "New active focus item " << _quickWindow->activeFocusItem();
|
||||
});
|
||||
#endif
|
||||
|
||||
_qmlComponent = new QQmlComponent(_qmlEngine);
|
||||
// Initialize the render control and our OpenGL resources.
|
||||
makeCurrent();
|
||||
_renderControl->initialize(&_context);
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::resize(const QSize& newSize) {
|
||||
// Qt bug in 5.4 forces this check of pixel ratio,
|
||||
// even though we're rendering offscreen.
|
||||
qreal pixelRatio = 1.0;
|
||||
if (_renderControl && _renderControl->_renderWindow) {
|
||||
pixelRatio = _renderControl->_renderWindow->devicePixelRatio();
|
||||
} else {
|
||||
pixelRatio = AbstractViewStateInterface::instance()->getDevicePixelRatio();
|
||||
}
|
||||
QSize newOffscreenSize = newSize * pixelRatio;
|
||||
if (newOffscreenSize == _fboCache.getSize()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear out any fbos with the old size
|
||||
makeCurrent();
|
||||
qDebug() << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
|
||||
_fboCache.setSize(newSize * pixelRatio);
|
||||
|
||||
if (_quickWindow) {
|
||||
_quickWindow->setGeometry(QRect(QPoint(), newSize));
|
||||
_quickWindow->contentItem()->setSize(newSize);
|
||||
}
|
||||
|
||||
|
||||
// Update our members
|
||||
if (_rootItem) {
|
||||
_rootItem->setSize(newSize);
|
||||
}
|
||||
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
QQuickItem* OffscreenQmlSurface::getRootItem() {
|
||||
return _rootItem;
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::setBaseUrl(const QUrl& baseUrl) {
|
||||
_qmlEngine->setBaseUrl(baseUrl);
|
||||
}
|
||||
|
||||
QObject* OffscreenQmlSurface::load(const QUrl& qmlSource, std::function<void(QQmlContext*, QObject*)> f) {
|
||||
_qmlComponent->loadUrl(qmlSource);
|
||||
if (_qmlComponent->isLoading()) {
|
||||
connect(_qmlComponent, &QQmlComponent::statusChanged, this,
|
||||
[this, f](QQmlComponent::Status){
|
||||
finishQmlLoad(f);
|
||||
});
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return finishQmlLoad(f);
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::requestUpdate() {
|
||||
_polish = true;
|
||||
if (!_updateTimer.isActive()) {
|
||||
_updateTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::requestRender() {
|
||||
if (!_updateTimer.isActive()) {
|
||||
_updateTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
QObject* OffscreenQmlSurface::finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f) {
|
||||
disconnect(_qmlComponent, &QQmlComponent::statusChanged, this, 0);
|
||||
if (_qmlComponent->isError()) {
|
||||
QList<QQmlError> errorList = _qmlComponent->errors();
|
||||
foreach(const QQmlError& error, errorList) {
|
||||
qWarning() << error.url() << error.line() << error;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
QQmlContext* newContext = new QQmlContext(_qmlEngine, qApp);
|
||||
QObject* newObject = _qmlComponent->beginCreate(newContext);
|
||||
if (_qmlComponent->isError()) {
|
||||
QList<QQmlError> errorList = _qmlComponent->errors();
|
||||
foreach(const QQmlError& error, errorList)
|
||||
qWarning() << error.url() << error.line() << error;
|
||||
if (!_rootItem) {
|
||||
qFatal("Unable to finish loading QML root");
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
f(newContext, newObject);
|
||||
_qmlComponent->completeCreate();
|
||||
|
||||
|
||||
// All quick items should be focusable
|
||||
QQuickItem* newItem = qobject_cast<QQuickItem*>(newObject);
|
||||
if (newItem) {
|
||||
// Make sure we make items focusable (critical for
|
||||
// supporting keyboard shortcuts)
|
||||
newItem->setFlag(QQuickItem::ItemIsFocusScope, true);
|
||||
}
|
||||
|
||||
// If we already have a root, just set a couple of flags and the ancestry
|
||||
if (_rootItem) {
|
||||
// Allow child windows to be destroyed from JS
|
||||
QQmlEngine::setObjectOwnership(newObject, QQmlEngine::JavaScriptOwnership);
|
||||
newObject->setParent(_rootItem);
|
||||
if (newItem) {
|
||||
newItem->setParentItem(_rootItem);
|
||||
}
|
||||
return newObject;
|
||||
}
|
||||
|
||||
if (!newItem) {
|
||||
qFatal("Could not load object as root item");
|
||||
return nullptr;
|
||||
}
|
||||
// The root item is ready. Associate it with the window.
|
||||
_rootItem = newItem;
|
||||
_rootItem->setParentItem(_quickWindow->contentItem());
|
||||
_rootItem->setSize(_quickWindow->renderTargetSize());
|
||||
return _rootItem;
|
||||
}
|
||||
|
||||
|
||||
void OffscreenQmlSurface::updateQuick() {
|
||||
if (_paused) {
|
||||
return;
|
||||
}
|
||||
if (!makeCurrent()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Polish, synchronize and render the next frame (into our fbo). In this example
|
||||
// everything happens on the same thread and therefore all three steps are performed
|
||||
// in succession from here. In a threaded setup the render() call would happen on a
|
||||
// separate thread.
|
||||
if (_polish) {
|
||||
_renderControl->polishItems();
|
||||
_renderControl->sync();
|
||||
_polish = false;
|
||||
}
|
||||
|
||||
QOpenGLFramebufferObject* fbo = _fboCache.getReadyFbo();
|
||||
|
||||
_quickWindow->setRenderTarget(fbo);
|
||||
fbo->bind();
|
||||
|
||||
glClearColor(0, 0, 0, 1);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
_renderControl->render();
|
||||
// FIXME The web browsers seem to be leaving GL in an error state.
|
||||
// Need a debug context with sync logging to figure out why.
|
||||
// for now just clear the errors
|
||||
glGetError();
|
||||
// Q_ASSERT(!glGetError());
|
||||
|
||||
_quickWindow->resetOpenGLState();
|
||||
|
||||
QOpenGLFramebufferObject::bindDefault();
|
||||
// Force completion of all the operations before we emit the texture as being ready for use
|
||||
glFinish();
|
||||
|
||||
emit textureUpdated(fbo->texture());
|
||||
}
|
||||
|
||||
QPointF OffscreenQmlSurface::mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject) {
|
||||
vec2 sourceSize;
|
||||
if (dynamic_cast<QWidget*>(sourceObject)) {
|
||||
sourceSize = toGlm(((QWidget*)sourceObject)->size());
|
||||
} else if (dynamic_cast<QWindow*>(sourceObject)) {
|
||||
sourceSize = toGlm(((QWindow*)sourceObject)->size());
|
||||
}
|
||||
vec2 offscreenPosition = toGlm(sourcePosition);
|
||||
offscreenPosition /= sourceSize;
|
||||
offscreenPosition *= vec2(toGlm(_quickWindow->size()));
|
||||
return QPointF(offscreenPosition.x, offscreenPosition.y);
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
//
|
||||
// Event handling customization
|
||||
//
|
||||
|
||||
bool OffscreenQmlSurface::eventFilter(QObject* originalDestination, QEvent* event) {
|
||||
// Only intercept events while we're in an active state
|
||||
if (_paused) {
|
||||
return false;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
// Don't intercept our own events, or we enter an infinite recursion
|
||||
QObject* recurseTest = originalDestination;
|
||||
while (recurseTest) {
|
||||
Q_ASSERT(recurseTest != _rootItem && recurseTest != _quickWindow);
|
||||
recurseTest = recurseTest->parent();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
switch (event->type()) {
|
||||
case QEvent::Resize: {
|
||||
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
||||
QGLWidget* widget = dynamic_cast<QGLWidget*>(originalDestination);
|
||||
if (widget) {
|
||||
this->resize(resizeEvent->size());
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case QEvent::KeyPress:
|
||||
case QEvent::KeyRelease: {
|
||||
event->ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, event)) {
|
||||
return event->isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case QEvent::Wheel: {
|
||||
QWheelEvent* wheelEvent = static_cast<QWheelEvent*>(event);
|
||||
QWheelEvent mappedEvent(
|
||||
mapWindowToUi(wheelEvent->pos(), originalDestination),
|
||||
wheelEvent->delta(), wheelEvent->buttons(),
|
||||
wheelEvent->modifiers(), wheelEvent->orientation());
|
||||
mappedEvent.ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||
return mappedEvent.isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Fall through
|
||||
case QEvent::MouseButtonDblClick:
|
||||
case QEvent::MouseButtonPress:
|
||||
case QEvent::MouseButtonRelease:
|
||||
case QEvent::MouseMove: {
|
||||
QMouseEvent* mouseEvent = static_cast<QMouseEvent*>(event);
|
||||
QPointF originalPos = mouseEvent->localPos();
|
||||
QPointF transformedPos = _mouseTranslator(originalPos);
|
||||
transformedPos = mapWindowToUi(transformedPos, originalDestination);
|
||||
QMouseEvent mappedEvent(mouseEvent->type(),
|
||||
transformedPos,
|
||||
mouseEvent->screenPos(), mouseEvent->button(),
|
||||
mouseEvent->buttons(), mouseEvent->modifiers());
|
||||
if (event->type() == QEvent::MouseMove) {
|
||||
_qmlEngine->rootContext()->setContextProperty("lastMousePosition", transformedPos);
|
||||
}
|
||||
mappedEvent.ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||
return mappedEvent.isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::lockTexture(int texture) {
|
||||
_fboCache.lockTexture(texture);
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::releaseTexture(int texture) {
|
||||
_fboCache.releaseTexture(texture);
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::pause() {
|
||||
_paused = true;
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::resume() {
|
||||
_paused = false;
|
||||
requestRender();
|
||||
}
|
||||
|
||||
bool OffscreenQmlSurface::isPaused() const {
|
||||
return _paused;
|
||||
}
|
||||
|
||||
void OffscreenQmlSurface::setProxyWindow(QWindow* window) {
|
||||
_renderControl->_renderWindow = window;
|
||||
}
|
||||
|
108
libraries/render-utils/src/OffscreenQmlSurface.h
Normal file
108
libraries/render-utils/src/OffscreenQmlSurface.h
Normal file
|
@ -0,0 +1,108 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2015-04-04
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#pragma once
|
||||
#ifndef hifi_OffscreenQmlSurface_h
|
||||
#define hifi_OffscreenQmlSurface_h
|
||||
|
||||
#include <QQmlEngine>
|
||||
#include <QQmlComponent>
|
||||
#include <QQuickItem>
|
||||
#include <QQuickWindow>
|
||||
#include <QQuickRenderControl>
|
||||
#include <QQuickImageProvider>
|
||||
#include <QTimer>
|
||||
#include <QMessageBox>
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <ThreadHelpers.h>
|
||||
|
||||
#include "OffscreenGlCanvas.h"
|
||||
#include "FboCache.h"
|
||||
|
||||
class OffscreenQmlSurface : public OffscreenGlCanvas {
|
||||
Q_OBJECT
|
||||
protected:
|
||||
class QMyQuickRenderControl : public QQuickRenderControl {
|
||||
protected:
|
||||
QWindow* renderWindow(QPoint* offset) Q_DECL_OVERRIDE{
|
||||
if (nullptr == _renderWindow) {
|
||||
return QQuickRenderControl::renderWindow(offset);
|
||||
}
|
||||
if (nullptr != offset) {
|
||||
offset->rx() = offset->ry() = 0;
|
||||
}
|
||||
return _renderWindow;
|
||||
}
|
||||
|
||||
private:
|
||||
QWindow* _renderWindow{ nullptr };
|
||||
friend class OffscreenQmlSurface;
|
||||
};
|
||||
public:
|
||||
OffscreenQmlSurface();
|
||||
virtual ~OffscreenQmlSurface();
|
||||
|
||||
using MouseTranslator = std::function<QPointF(const QPointF&)>;
|
||||
|
||||
void create(QOpenGLContext* context);
|
||||
void resize(const QSize& size);
|
||||
QObject* load(const QUrl& qmlSource, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {});
|
||||
QObject* load(const QString& qmlSourceFile, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {}) {
|
||||
return load(QUrl(qmlSourceFile), f);
|
||||
}
|
||||
|
||||
// Optional values for event handling
|
||||
void setProxyWindow(QWindow* window);
|
||||
void setMouseTranslator(MouseTranslator mouseTranslator) {
|
||||
_mouseTranslator = mouseTranslator;
|
||||
}
|
||||
|
||||
void pause();
|
||||
void resume();
|
||||
bool isPaused() const;
|
||||
|
||||
void setBaseUrl(const QUrl& baseUrl);
|
||||
QQuickItem* getRootItem();
|
||||
|
||||
virtual bool eventFilter(QObject* originalDestination, QEvent* event);
|
||||
|
||||
signals:
|
||||
void textureUpdated(GLuint texture);
|
||||
|
||||
public slots:
|
||||
void requestUpdate();
|
||||
void requestRender();
|
||||
void lockTexture(int texture);
|
||||
void releaseTexture(int texture);
|
||||
|
||||
private:
|
||||
QObject* finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f);
|
||||
QPointF mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject);
|
||||
|
||||
private slots:
|
||||
void updateQuick();
|
||||
|
||||
protected:
|
||||
QQuickWindow* _quickWindow{ nullptr };
|
||||
|
||||
private:
|
||||
QMyQuickRenderControl* _renderControl{ new QMyQuickRenderControl };
|
||||
QQmlEngine* _qmlEngine{ nullptr };
|
||||
QQmlComponent* _qmlComponent{ nullptr };
|
||||
QQuickItem* _rootItem{ nullptr };
|
||||
QTimer _updateTimer;
|
||||
FboCache _fboCache;
|
||||
bool _polish{ true };
|
||||
bool _paused{ true };
|
||||
MouseTranslator _mouseTranslator{ [](const QPointF& p) { return p; } };
|
||||
};
|
||||
|
||||
#endif
|
|
@ -16,14 +16,6 @@
|
|||
#include "MessageDialog.h"
|
||||
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(offscreenFocus)
|
||||
Q_LOGGING_CATEGORY(offscreenFocus, "hifi.offscreen.focus")
|
||||
|
||||
// Time between receiving a request to render the offscreen UI actually triggering
|
||||
// the render. Could possibly be increased depending on the framerate we expect to
|
||||
// achieve.
|
||||
static const int SMALL_INTERVAL = 5;
|
||||
|
||||
class OffscreenUiRoot : public QQuickItem {
|
||||
Q_OBJECT
|
||||
public:
|
||||
|
@ -36,6 +28,25 @@ public:
|
|||
};
|
||||
|
||||
|
||||
|
||||
// This hack allows the QML UI to work with keys that are also bound as
|
||||
// shortcuts at the application level. However, it seems as though the
|
||||
// bound actions are still getting triggered. At least for backspace.
|
||||
// Not sure why.
|
||||
//
|
||||
// However, the problem may go away once we switch to the new menu system,
|
||||
// so I think it's OK for the time being.
|
||||
bool OffscreenUi::shouldSwallowShortcut(QEvent* event) {
|
||||
Q_ASSERT(event->type() == QEvent::ShortcutOverride);
|
||||
QObject* focusObject = _quickWindow->focusObject();
|
||||
if (focusObject != _quickWindow && focusObject != getRootItem()) {
|
||||
//qDebug() << "Swallowed shortcut " << static_cast<QKeyEvent*>(event)->key();
|
||||
event->accept();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
OffscreenUiRoot::OffscreenUiRoot(QQuickItem* parent) : QQuickItem(parent) {
|
||||
}
|
||||
|
||||
|
@ -48,378 +59,15 @@ OffscreenUi::OffscreenUi() {
|
|||
}
|
||||
|
||||
OffscreenUi::~OffscreenUi() {
|
||||
// Make sure the context is current while doing cleanup. Note that we use the
|
||||
// offscreen surface here because passing 'this' at this point is not safe: the
|
||||
// underlying platform window may already be destroyed. To avoid all the trouble, use
|
||||
// another surface that is valid for sure.
|
||||
makeCurrent();
|
||||
|
||||
// Delete the render control first since it will free the scenegraph resources.
|
||||
// Destroy the QQuickWindow only afterwards.
|
||||
delete _renderControl;
|
||||
|
||||
delete _qmlComponent;
|
||||
delete _quickWindow;
|
||||
delete _qmlEngine;
|
||||
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
void OffscreenUi::create(QOpenGLContext* shareContext) {
|
||||
OffscreenGlCanvas::create(shareContext);
|
||||
|
||||
makeCurrent();
|
||||
|
||||
// Create a QQuickWindow that is associated with out render control. Note that this
|
||||
// window never gets created or shown, meaning that it will never get an underlying
|
||||
// native (platform) window.
|
||||
QQuickWindow::setDefaultAlphaBuffer(true);
|
||||
_quickWindow = new QQuickWindow(_renderControl);
|
||||
_quickWindow->setColor(QColor(255, 255, 255, 0));
|
||||
_quickWindow->setFlags(_quickWindow->flags() | static_cast<Qt::WindowFlags>(Qt::WA_TranslucentBackground));
|
||||
// Create a QML engine.
|
||||
_qmlEngine = new QQmlEngine;
|
||||
if (!_qmlEngine->incubationController()) {
|
||||
_qmlEngine->setIncubationController(_quickWindow->incubationController());
|
||||
}
|
||||
|
||||
// When Quick says there is a need to render, we will not render immediately. Instead,
|
||||
// a timer with a small interval is used to get better performance.
|
||||
_updateTimer.setSingleShot(true);
|
||||
_updateTimer.setInterval(SMALL_INTERVAL);
|
||||
connect(&_updateTimer, &QTimer::timeout, this, &OffscreenUi::updateQuick);
|
||||
|
||||
// Now hook up the signals. For simplicy we don't differentiate between
|
||||
// renderRequested (only render is needed, no sync) and sceneChanged (polish and sync
|
||||
// is needed too).
|
||||
connect(_renderControl, &QQuickRenderControl::renderRequested, this, &OffscreenUi::requestRender);
|
||||
connect(_renderControl, &QQuickRenderControl::sceneChanged, this, &OffscreenUi::requestUpdate);
|
||||
|
||||
#ifdef DEBUG
|
||||
connect(_quickWindow, &QQuickWindow::focusObjectChanged, [this]{
|
||||
qCDebug(offscreenFocus) << "New focus item " << _quickWindow->focusObject();
|
||||
});
|
||||
connect(_quickWindow, &QQuickWindow::activeFocusItemChanged, [this] {
|
||||
qCDebug(offscreenFocus) << "New active focus item " << _quickWindow->activeFocusItem();
|
||||
});
|
||||
#endif
|
||||
|
||||
_qmlComponent = new QQmlComponent(_qmlEngine);
|
||||
// Initialize the render control and our OpenGL resources.
|
||||
makeCurrent();
|
||||
_renderControl->initialize(&_context);
|
||||
}
|
||||
|
||||
void OffscreenUi::addImportPath(const QString& path) {
|
||||
_qmlEngine->addImportPath(path);
|
||||
}
|
||||
|
||||
void OffscreenUi::resize(const QSize& newSize) {
|
||||
makeCurrent();
|
||||
|
||||
qreal pixelRatio = _renderControl->_renderWindow ? _renderControl->_renderWindow->devicePixelRatio() : 1.0;
|
||||
QSize newOffscreenSize = newSize * pixelRatio;
|
||||
if (newOffscreenSize == _fboCache.getSize()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Clear out any fbos with the old size
|
||||
qDebug() << "Offscreen UI resizing to " << newSize.width() << "x" << newSize.height() << " with pixel ratio " << pixelRatio;
|
||||
_fboCache.setSize(newSize * pixelRatio);
|
||||
|
||||
if (_quickWindow) {
|
||||
_quickWindow->setGeometry(QRect(QPoint(), newSize));
|
||||
_quickWindow->contentItem()->setSize(newSize);
|
||||
}
|
||||
|
||||
|
||||
// Update our members
|
||||
if (_rootItem) {
|
||||
_rootItem->setSize(newSize);
|
||||
}
|
||||
|
||||
doneCurrent();
|
||||
}
|
||||
|
||||
QQuickItem* OffscreenUi::getRootItem() {
|
||||
return _rootItem;
|
||||
}
|
||||
|
||||
void OffscreenUi::setBaseUrl(const QUrl& baseUrl) {
|
||||
_qmlEngine->setBaseUrl(baseUrl);
|
||||
}
|
||||
|
||||
QObject* OffscreenUi::load(const QUrl& qmlSource, std::function<void(QQmlContext*, QObject*)> f) {
|
||||
_qmlComponent->loadUrl(qmlSource);
|
||||
if (_qmlComponent->isLoading()) {
|
||||
connect(_qmlComponent, &QQmlComponent::statusChanged, this,
|
||||
[this, f](QQmlComponent::Status){
|
||||
finishQmlLoad(f);
|
||||
});
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
return finishQmlLoad(f);
|
||||
}
|
||||
|
||||
void OffscreenUi::requestUpdate() {
|
||||
_polish = true;
|
||||
if (!_updateTimer.isActive()) {
|
||||
_updateTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
void OffscreenUi::requestRender() {
|
||||
if (!_updateTimer.isActive()) {
|
||||
_updateTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
QObject* OffscreenUi::finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f) {
|
||||
disconnect(_qmlComponent, &QQmlComponent::statusChanged, this, 0);
|
||||
if (_qmlComponent->isError()) {
|
||||
QList<QQmlError> errorList = _qmlComponent->errors();
|
||||
foreach(const QQmlError& error, errorList) {
|
||||
qWarning() << error.url() << error.line() << error;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
QQmlContext* newContext = new QQmlContext(_qmlEngine, qApp);
|
||||
QObject* newObject = _qmlComponent->beginCreate(newContext);
|
||||
if (_qmlComponent->isError()) {
|
||||
QList<QQmlError> errorList = _qmlComponent->errors();
|
||||
foreach(const QQmlError& error, errorList)
|
||||
qWarning() << error.url() << error.line() << error;
|
||||
if (!_rootItem) {
|
||||
qFatal("Unable to finish loading QML root");
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
f(newContext, newObject);
|
||||
_qmlComponent->completeCreate();
|
||||
|
||||
|
||||
// All quick items should be focusable
|
||||
QQuickItem* newItem = qobject_cast<QQuickItem*>(newObject);
|
||||
if (newItem) {
|
||||
// Make sure we make items focusable (critical for
|
||||
// supporting keyboard shortcuts)
|
||||
newItem->setFlag(QQuickItem::ItemIsFocusScope, true);
|
||||
}
|
||||
|
||||
// If we already have a root, just set a couple of flags and the ancestry
|
||||
if (_rootItem) {
|
||||
// Allow child windows to be destroyed from JS
|
||||
QQmlEngine::setObjectOwnership(newObject, QQmlEngine::JavaScriptOwnership);
|
||||
newObject->setParent(_rootItem);
|
||||
if (newItem) {
|
||||
newItem->setParentItem(_rootItem);
|
||||
}
|
||||
return newObject;
|
||||
}
|
||||
|
||||
if (!newItem) {
|
||||
qFatal("Could not load object as root item");
|
||||
return nullptr;
|
||||
}
|
||||
// The root item is ready. Associate it with the window.
|
||||
_rootItem = newItem;
|
||||
_rootItem->setParentItem(_quickWindow->contentItem());
|
||||
_rootItem->setSize(_quickWindow->renderTargetSize());
|
||||
return _rootItem;
|
||||
}
|
||||
|
||||
|
||||
void OffscreenUi::updateQuick() {
|
||||
if (_paused) {
|
||||
return;
|
||||
}
|
||||
if (!makeCurrent()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Polish, synchronize and render the next frame (into our fbo). In this example
|
||||
// everything happens on the same thread and therefore all three steps are performed
|
||||
// in succession from here. In a threaded setup the render() call would happen on a
|
||||
// separate thread.
|
||||
if (_polish) {
|
||||
_renderControl->polishItems();
|
||||
_renderControl->sync();
|
||||
_polish = false;
|
||||
}
|
||||
|
||||
QOpenGLFramebufferObject* fbo = _fboCache.getReadyFbo();
|
||||
|
||||
_quickWindow->setRenderTarget(fbo);
|
||||
fbo->bind();
|
||||
|
||||
glClearColor(0, 0, 0, 1);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
_renderControl->render();
|
||||
// FIXME The web browsers seem to be leaving GL in an error state.
|
||||
// Need a debug context with sync logging to figure out why.
|
||||
// for now just clear the errors
|
||||
glGetError();
|
||||
// Q_ASSERT(!glGetError());
|
||||
|
||||
_quickWindow->resetOpenGLState();
|
||||
|
||||
QOpenGLFramebufferObject::bindDefault();
|
||||
// Force completion of all the operations before we emit the texture as being ready for use
|
||||
glFinish();
|
||||
|
||||
emit textureUpdated(fbo->texture());
|
||||
}
|
||||
|
||||
QPointF OffscreenUi::mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject) {
|
||||
vec2 sourceSize;
|
||||
if (dynamic_cast<QWidget*>(sourceObject)) {
|
||||
sourceSize = toGlm(((QWidget*)sourceObject)->size());
|
||||
} else if (dynamic_cast<QWindow*>(sourceObject)) {
|
||||
sourceSize = toGlm(((QWindow*)sourceObject)->size());
|
||||
}
|
||||
vec2 offscreenPosition = toGlm(sourcePosition);
|
||||
offscreenPosition /= sourceSize;
|
||||
offscreenPosition *= vec2(toGlm(_quickWindow->size()));
|
||||
return QPointF(offscreenPosition.x, offscreenPosition.y);
|
||||
}
|
||||
|
||||
// This hack allows the QML UI to work with keys that are also bound as
|
||||
// shortcuts at the application level. However, it seems as though the
|
||||
// bound actions are still getting triggered. At least for backspace.
|
||||
// Not sure why.
|
||||
//
|
||||
// However, the problem may go away once we switch to the new menu system,
|
||||
// so I think it's OK for the time being.
|
||||
bool OffscreenUi::shouldSwallowShortcut(QEvent* event) {
|
||||
Q_ASSERT(event->type() == QEvent::ShortcutOverride);
|
||||
QObject* focusObject = _quickWindow->focusObject();
|
||||
if (focusObject != _quickWindow && focusObject != _rootItem) {
|
||||
//qDebug() << "Swallowed shortcut " << static_cast<QKeyEvent*>(event)->key();
|
||||
event->accept();
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
//
|
||||
// Event handling customization
|
||||
//
|
||||
|
||||
bool OffscreenUi::eventFilter(QObject* originalDestination, QEvent* event) {
|
||||
// Only intercept events while we're in an active state
|
||||
if (_paused) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
#ifdef DEBUG
|
||||
// Don't intercept our own events, or we enter an infinite recursion
|
||||
QObject* recurseTest = originalDestination;
|
||||
while (recurseTest) {
|
||||
Q_ASSERT(recurseTest != _rootItem && recurseTest != _quickWindow);
|
||||
recurseTest = recurseTest->parent();
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
switch (event->type()) {
|
||||
case QEvent::Resize: {
|
||||
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
|
||||
QGLWidget* widget = dynamic_cast<QGLWidget*>(originalDestination);
|
||||
if (widget) {
|
||||
this->resize(resizeEvent->size());
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case QEvent::KeyPress:
|
||||
case QEvent::KeyRelease: {
|
||||
event->ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, event)) {
|
||||
return event->isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case QEvent::Wheel: {
|
||||
QWheelEvent* wheelEvent = static_cast<QWheelEvent*>(event);
|
||||
QWheelEvent mappedEvent(
|
||||
mapWindowToUi(wheelEvent->pos(), originalDestination),
|
||||
wheelEvent->delta(), wheelEvent->buttons(),
|
||||
wheelEvent->modifiers(), wheelEvent->orientation());
|
||||
mappedEvent.ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||
return mappedEvent.isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Fall through
|
||||
case QEvent::MouseButtonDblClick:
|
||||
case QEvent::MouseButtonPress:
|
||||
case QEvent::MouseButtonRelease:
|
||||
case QEvent::MouseMove: {
|
||||
QMouseEvent* mouseEvent = static_cast<QMouseEvent*>(event);
|
||||
QPointF originalPos = mouseEvent->localPos();
|
||||
QPointF transformedPos = _mouseTranslator(originalPos);
|
||||
transformedPos = mapWindowToUi(transformedPos, originalDestination);
|
||||
QMouseEvent mappedEvent(mouseEvent->type(),
|
||||
transformedPos,
|
||||
mouseEvent->screenPos(), mouseEvent->button(),
|
||||
mouseEvent->buttons(), mouseEvent->modifiers());
|
||||
if (event->type() == QEvent::MouseMove) {
|
||||
_qmlEngine->rootContext()->setContextProperty("lastMousePosition", transformedPos);
|
||||
}
|
||||
mappedEvent.ignore();
|
||||
if (QCoreApplication::sendEvent(_quickWindow, &mappedEvent)) {
|
||||
return mappedEvent.isAccepted();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void OffscreenUi::lockTexture(int texture) {
|
||||
_fboCache.lockTexture(texture);
|
||||
}
|
||||
|
||||
void OffscreenUi::releaseTexture(int texture) {
|
||||
_fboCache.releaseTexture(texture);
|
||||
}
|
||||
|
||||
void OffscreenUi::pause() {
|
||||
_paused = true;
|
||||
}
|
||||
|
||||
void OffscreenUi::resume() {
|
||||
_paused = false;
|
||||
requestRender();
|
||||
}
|
||||
|
||||
bool OffscreenUi::isPaused() const {
|
||||
return _paused;
|
||||
}
|
||||
|
||||
void OffscreenUi::setProxyWindow(QWindow* window) {
|
||||
_renderControl->_renderWindow = window;
|
||||
}
|
||||
|
||||
void OffscreenUi::show(const QUrl& url, const QString& name, std::function<void(QQmlContext*, QObject*)> f) {
|
||||
QQuickItem* item = _rootItem->findChild<QQuickItem*>(name);
|
||||
QQuickItem* item = getRootItem()->findChild<QQuickItem*>(name);
|
||||
// First load?
|
||||
if (!item) {
|
||||
load(url, f);
|
||||
item = _rootItem->findChild<QQuickItem*>(name);
|
||||
item = getRootItem()->findChild<QQuickItem*>(name);
|
||||
}
|
||||
if (item) {
|
||||
item->setEnabled(true);
|
||||
|
@ -427,11 +75,11 @@ void OffscreenUi::show(const QUrl& url, const QString& name, std::function<void(
|
|||
}
|
||||
|
||||
void OffscreenUi::toggle(const QUrl& url, const QString& name, std::function<void(QQmlContext*, QObject*)> f) {
|
||||
QQuickItem* item = _rootItem->findChild<QQuickItem*>(name);
|
||||
QQuickItem* item = getRootItem()->findChild<QQuickItem*>(name);
|
||||
// First load?
|
||||
if (!item) {
|
||||
load(url, f);
|
||||
item = _rootItem->findChild<QQuickItem*>(name);
|
||||
item = getRootItem()->findChild<QQuickItem*>(name);
|
||||
}
|
||||
if (item) {
|
||||
item->setEnabled(!item->isEnabled());
|
||||
|
|
|
@ -12,25 +12,10 @@
|
|||
#ifndef hifi_OffscreenUi_h
|
||||
#define hifi_OffscreenUi_h
|
||||
|
||||
#include <QQmlEngine>
|
||||
#include <QQmlComponent>
|
||||
#include <QQuickItem>
|
||||
#include <QQuickWindow>
|
||||
#include <QQuickRenderControl>
|
||||
#include <QQuickImageProvider>
|
||||
#include <QTimer>
|
||||
#include <QMessageBox>
|
||||
#include "OffscreenQmlSurface.h"
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <ThreadHelpers.h>
|
||||
#include <DependencyManager.h>
|
||||
|
||||
#include "OffscreenGlCanvas.h"
|
||||
#include "FboCache.h"
|
||||
#include <QQuickItem>
|
||||
|
||||
#define HIFI_QML_DECL \
|
||||
private: \
|
||||
|
@ -96,53 +81,15 @@ private:
|
|||
offscreenUi->load(QML, f); \
|
||||
}
|
||||
|
||||
class OffscreenUi : public OffscreenGlCanvas, public Dependency {
|
||||
class OffscreenUi : public OffscreenQmlSurface, public Dependency {
|
||||
Q_OBJECT
|
||||
|
||||
class QMyQuickRenderControl : public QQuickRenderControl {
|
||||
protected:
|
||||
QWindow* renderWindow(QPoint* offset) Q_DECL_OVERRIDE{
|
||||
if (nullptr == _renderWindow) {
|
||||
return QQuickRenderControl::renderWindow(offset);
|
||||
}
|
||||
if (nullptr != offset) {
|
||||
offset->rx() = offset->ry() = 0;
|
||||
}
|
||||
return _renderWindow;
|
||||
}
|
||||
|
||||
private:
|
||||
QWindow* _renderWindow{ nullptr };
|
||||
friend class OffscreenUi;
|
||||
};
|
||||
|
||||
public:
|
||||
using MouseTranslator = std::function<QPointF(const QPointF&)>;
|
||||
OffscreenUi();
|
||||
virtual ~OffscreenUi();
|
||||
void create(QOpenGLContext* context);
|
||||
void resize(const QSize& size);
|
||||
QObject* load(const QUrl& qmlSource, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {});
|
||||
QObject* load(const QString& qmlSourceFile, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {}) {
|
||||
return load(QUrl(qmlSourceFile), f);
|
||||
}
|
||||
void show(const QUrl& url, const QString& name, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {});
|
||||
void toggle(const QUrl& url, const QString& name, std::function<void(QQmlContext*, QObject*)> f = [](QQmlContext*, QObject*) {});
|
||||
void setBaseUrl(const QUrl& baseUrl);
|
||||
void addImportPath(const QString& path);
|
||||
//QQmlContext* getQmlContext();
|
||||
QQuickItem* getRootItem();
|
||||
void pause();
|
||||
void resume();
|
||||
bool isPaused() const;
|
||||
void setProxyWindow(QWindow* window);
|
||||
bool shouldSwallowShortcut(QEvent* event);
|
||||
QPointF mapWindowToUi(const QPointF& sourcePosition, QObject* sourceObject);
|
||||
virtual bool eventFilter(QObject* originalDestination, QEvent* event);
|
||||
void setMouseTranslator(MouseTranslator mouseTranslator) {
|
||||
_mouseTranslator = mouseTranslator;
|
||||
}
|
||||
|
||||
|
||||
// Messagebox replacement functions
|
||||
using ButtonCallback = std::function<void(QMessageBox::StandardButton)>;
|
||||
|
@ -168,33 +115,6 @@ public:
|
|||
static void critical(const QString& title, const QString& text,
|
||||
ButtonCallback callback = NO_OP_CALLBACK,
|
||||
QMessageBox::StandardButtons buttons = QMessageBox::Ok);
|
||||
|
||||
private:
|
||||
QObject* finishQmlLoad(std::function<void(QQmlContext*, QObject*)> f);
|
||||
|
||||
private slots:
|
||||
void updateQuick();
|
||||
|
||||
public slots:
|
||||
void requestUpdate();
|
||||
void requestRender();
|
||||
void lockTexture(int texture);
|
||||
void releaseTexture(int texture);
|
||||
|
||||
signals:
|
||||
void textureUpdated(GLuint texture);
|
||||
|
||||
private:
|
||||
QMyQuickRenderControl* _renderControl{ new QMyQuickRenderControl };
|
||||
QQuickWindow* _quickWindow{ nullptr };
|
||||
QQmlEngine* _qmlEngine{ nullptr };
|
||||
QQmlComponent* _qmlComponent{ nullptr };
|
||||
QQuickItem* _rootItem{ nullptr };
|
||||
QTimer _updateTimer;
|
||||
FboCache _fboCache;
|
||||
bool _polish{ true };
|
||||
bool _paused{ true };
|
||||
MouseTranslator _mouseTranslator{ [](const QPointF& p) { return p; } };
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
Loading…
Reference in a new issue