mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-10 12:47:44 +02:00
Merge pull request #8289 from highfidelity/RC-16
Beta Release 16 - Includes up to Developer Release 5179
This commit is contained in:
commit
d11e26e665
232 changed files with 10552 additions and 3128 deletions
|
@ -226,8 +226,8 @@ if (NOT ANDROID)
|
|||
add_subdirectory(interface)
|
||||
set_target_properties(interface PROPERTIES FOLDER "Apps")
|
||||
add_subdirectory(tests)
|
||||
add_subdirectory(plugins)
|
||||
endif()
|
||||
add_subdirectory(plugins)
|
||||
add_subdirectory(tools)
|
||||
endif()
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ setup_hifi_project(Core Gui Network Script Quick Widgets WebSockets)
|
|||
link_hifi_libraries(
|
||||
audio avatars octree gpu model fbx entities
|
||||
networking animation recording shared script-engine embedded-webserver
|
||||
controllers physics
|
||||
controllers physics plugins
|
||||
)
|
||||
|
||||
if (WIN32)
|
||||
|
|
|
@ -298,7 +298,8 @@ void AssetServer::handleAssetGetInfo(QSharedPointer<ReceivedMessage> message, Sh
|
|||
message->readPrimitive(&messageID);
|
||||
assetHash = message->readWithoutCopy(SHA256_HASH_LENGTH);
|
||||
|
||||
auto replyPacket = NLPacket::create(PacketType::AssetGetInfoReply);
|
||||
auto size = qint64(sizeof(MessageID) + SHA256_HASH_LENGTH + sizeof(AssetServerError) + sizeof(qint64));
|
||||
auto replyPacket = NLPacket::create(PacketType::AssetGetInfoReply, size, true);
|
||||
|
||||
QByteArray hexHash = assetHash.toHex();
|
||||
|
||||
|
@ -347,7 +348,7 @@ void AssetServer::handleAssetUpload(QSharedPointer<ReceivedMessage> message, Sha
|
|||
// for now this also means it isn't allowed to add assets
|
||||
// so return a packet with error that indicates that
|
||||
|
||||
auto permissionErrorPacket = NLPacket::create(PacketType::AssetUploadReply, sizeof(MessageID) + sizeof(AssetServerError));
|
||||
auto permissionErrorPacket = NLPacket::create(PacketType::AssetUploadReply, sizeof(MessageID) + sizeof(AssetServerError), true);
|
||||
|
||||
MessageID messageID;
|
||||
message->readPrimitive(&messageID);
|
||||
|
|
|
@ -43,7 +43,7 @@ void UploadAssetTask::run() {
|
|||
qDebug() << "UploadAssetTask reading a file of " << fileSize << "bytes from"
|
||||
<< uuidStringWithoutCurlyBraces(_senderNode->getUUID());
|
||||
|
||||
auto replyPacket = NLPacket::create(PacketType::AssetUploadReply);
|
||||
auto replyPacket = NLPacket::create(PacketType::AssetUploadReply, -1, true);
|
||||
replyPacket->writePrimitive(messageID);
|
||||
|
||||
if (fileSize > MAX_UPLOAD_SIZE) {
|
||||
|
|
|
@ -47,6 +47,8 @@
|
|||
#include <NodeList.h>
|
||||
#include <Node.h>
|
||||
#include <OctreeConstants.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <StDev.h>
|
||||
|
@ -60,7 +62,7 @@
|
|||
#include "AudioMixer.h"
|
||||
|
||||
const float LOUDNESS_TO_DISTANCE_RATIO = 0.00001f;
|
||||
const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.18f;
|
||||
const float DEFAULT_ATTENUATION_PER_DOUBLING_IN_DISTANCE = 0.5f; // attenuation = -6dB * log2(distance)
|
||||
const float DEFAULT_NOISE_MUTING_THRESHOLD = 0.003f;
|
||||
const QString AUDIO_MIXER_LOGGING_TARGET_NAME = "audio-mixer";
|
||||
const QString AUDIO_ENV_GROUP_KEY = "audio_env";
|
||||
|
@ -90,6 +92,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
}
|
||||
|
@ -137,13 +141,14 @@ float AudioMixer::gainForSource(const PositionalAudioStream& streamToAdd,
|
|||
}
|
||||
|
||||
if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) {
|
||||
// calculate the distance coefficient using the distance to this node
|
||||
float distanceCoefficient = 1.0f - (logf(distanceBetween / ATTENUATION_BEGINS_AT_DISTANCE) / logf(2.0f)
|
||||
* attenuationPerDoublingInDistance);
|
||||
|
||||
if (distanceCoefficient < 0) {
|
||||
distanceCoefficient = 0;
|
||||
}
|
||||
// translate the zone setting to gain per log2(distance)
|
||||
float g = 1.0f - attenuationPerDoublingInDistance;
|
||||
g = (g < EPSILON) ? EPSILON : g;
|
||||
g = (g > 1.0f) ? 1.0f : g;
|
||||
|
||||
// calculate the distance coefficient using the distance to this node
|
||||
float distanceCoefficient = exp2f(log2f(g) * log2f(distanceBetween/ATTENUATION_BEGINS_AT_DISTANCE));
|
||||
|
||||
// multiply the current attenuation coefficient by the distance coefficient
|
||||
gain *= distanceCoefficient;
|
||||
|
@ -189,8 +194,12 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData&
|
|||
// check if this is a server echo of a source back to itself
|
||||
bool isEcho = (&streamToAdd == &listeningNodeStream);
|
||||
|
||||
// figure out the gain for this source at the listener
|
||||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
// figure out the distance between source and listener
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
|
||||
// figure out the gain for this source at the listener
|
||||
float gain = gainForSource(streamToAdd, listeningNodeStream, relativePosition, isEcho);
|
||||
|
||||
// figure out the azimuth to this source at the listener
|
||||
|
@ -236,7 +245,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData&
|
|||
|
||||
// this is not done for stereo streams since they do not go through the HRTF
|
||||
static int16_t silentMonoBlock[AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL] = {};
|
||||
hrtf.renderSilent(silentMonoBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, gain,
|
||||
hrtf.renderSilent(silentMonoBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++_hrtfSilentRenders;;
|
||||
|
@ -283,7 +292,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData&
|
|||
// silent frame from source
|
||||
|
||||
// we still need to call renderSilent via the HRTF for mono source
|
||||
hrtf.renderSilent(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, gain,
|
||||
hrtf.renderSilent(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++_hrtfSilentRenders;
|
||||
|
@ -296,7 +305,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData&
|
|||
// the mixer is struggling so we're going to drop off some streams
|
||||
|
||||
// we call renderSilent via the HRTF with the actual frame data and a gain of 0.0
|
||||
hrtf.renderSilent(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, 0.0f,
|
||||
hrtf.renderSilent(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, distance, 0.0f,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
|
||||
++_hrtfStruggleRenders;
|
||||
|
@ -307,7 +316,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(AudioMixerClientData&
|
|||
++_hrtfRenders;
|
||||
|
||||
// mono stream, call the HRTF with our block and calculated azimuth and gain
|
||||
hrtf.render(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, gain,
|
||||
hrtf.render(streamBlock, _mixedSamples, HRTF_DATASET_INDEX, azimuth, distance, gain,
|
||||
AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
}
|
||||
|
||||
|
@ -321,7 +330,8 @@ bool AudioMixer::prepareMixForListeningNode(Node* node) {
|
|||
// loop through all other nodes that have sufficient audio to mix
|
||||
|
||||
DependencyManager::get<NodeList>()->eachNode([&](const SharedNodePointer& otherNode){
|
||||
if (otherNode->getLinkedData()) {
|
||||
// make sure that we have audio data for this other node and that it isn't being ignored by our listening node
|
||||
if (otherNode->getLinkedData() && !node->isIgnoringNodeWithID(otherNode->getUUID())) {
|
||||
AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData();
|
||||
|
||||
// enumerate the ARBs attached to the otherNode and add all that should be added to mix
|
||||
|
@ -446,6 +456,91 @@ void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> mes
|
|||
}
|
||||
}
|
||||
|
||||
DisplayPluginList getDisplayPlugins() {
|
||||
DisplayPluginList result;
|
||||
return result;
|
||||
}
|
||||
|
||||
InputPluginList getInputPlugins() {
|
||||
InputPluginList result;
|
||||
return result;
|
||||
}
|
||||
|
||||
void saveInputPluginSettings(const InputPluginList& plugins) {
|
||||
}
|
||||
|
||||
|
||||
void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
QStringList availableCodecs;
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
if (codecPlugins.size() > 0) {
|
||||
for (auto& plugin : codecPlugins) {
|
||||
auto codecName = plugin->getName();
|
||||
qDebug() << "Codec available:" << codecName;
|
||||
availableCodecs.append(codecName);
|
||||
}
|
||||
} else {
|
||||
qDebug() << "No Codecs available...";
|
||||
}
|
||||
|
||||
CodecPluginPointer selectedCodec;
|
||||
QString selectedCodecName;
|
||||
|
||||
QStringList codecPreferenceList = _codecPreferenceOrder.split(",");
|
||||
|
||||
// read the codecs requested by the client
|
||||
const int MAX_PREFERENCE = 99999;
|
||||
int preferredCodecIndex = MAX_PREFERENCE;
|
||||
QString preferredCodec;
|
||||
quint8 numberOfCodecs = 0;
|
||||
message->readPrimitive(&numberOfCodecs);
|
||||
qDebug() << "numberOfCodecs:" << numberOfCodecs;
|
||||
QStringList codecList;
|
||||
for (quint16 i = 0; i < numberOfCodecs; i++) {
|
||||
QString requestedCodec = message->readString();
|
||||
int preferenceOfThisCodec = codecPreferenceList.indexOf(requestedCodec);
|
||||
bool codecAvailable = availableCodecs.contains(requestedCodec);
|
||||
qDebug() << "requestedCodec:" << requestedCodec << "preference:" << preferenceOfThisCodec << "available:" << codecAvailable;
|
||||
if (codecAvailable) {
|
||||
codecList.append(requestedCodec);
|
||||
if (preferenceOfThisCodec >= 0 && preferenceOfThisCodec < preferredCodecIndex) {
|
||||
qDebug() << "This codec is preferred...";
|
||||
selectedCodecName = requestedCodec;
|
||||
preferredCodecIndex = preferenceOfThisCodec;
|
||||
}
|
||||
}
|
||||
}
|
||||
qDebug() << "all requested and available codecs:" << codecList;
|
||||
|
||||
// choose first codec
|
||||
if (!selectedCodecName.isEmpty()) {
|
||||
if (codecPlugins.size() > 0) {
|
||||
for (auto& plugin : codecPlugins) {
|
||||
if (selectedCodecName == plugin->getName()) {
|
||||
qDebug() << "Selecting codec:" << selectedCodecName;
|
||||
selectedCodec = plugin;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
|
||||
// FIXME - why would we not have client data at this point??
|
||||
if (!clientData) {
|
||||
qDebug() << "UNEXPECTED -- didn't have node linked data in " << __FUNCTION__;
|
||||
sendingNode->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(sendingNode->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
clientData->setupCodec(selectedCodec, selectedCodecName);
|
||||
|
||||
qDebug() << "selectedCodecName:" << selectedCodecName;
|
||||
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
||||
// enumerate the connected listeners to remove HRTF objects for the disconnected node
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -458,6 +553,10 @@ void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
|
|||
});
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode) {
|
||||
sendingNode->parseIgnoreRequestMessage(packet);
|
||||
}
|
||||
|
||||
void AudioMixer::removeHRTFsForFinishedInjector(const QUuid& streamID) {
|
||||
auto injectorClientData = qobject_cast<AudioMixerClientData*>(sender());
|
||||
if (injectorClientData) {
|
||||
|
@ -662,24 +761,36 @@ void AudioMixer::broadcastMixes() {
|
|||
std::unique_ptr<NLPacket> mixPacket;
|
||||
|
||||
if (mixHasAudio) {
|
||||
int mixPacketBytes = sizeof(quint16) + AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
int mixPacketBytes = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE
|
||||
+ AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes);
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
mixPacket->writePrimitive(sequence);
|
||||
|
||||
// write the codec
|
||||
QString codecInPacket = nodeData->getCodecName();
|
||||
mixPacket->writeString(codecInPacket);
|
||||
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
QByteArray encodedBuffer;
|
||||
nodeData->encode(decodedBuffer, encodedBuffer);
|
||||
|
||||
// pack mixed audio samples
|
||||
mixPacket->write(reinterpret_cast<char*>(_clampedSamples),
|
||||
AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
mixPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
} else {
|
||||
int silentPacketBytes = sizeof(quint16) + sizeof(quint16);
|
||||
int silentPacketBytes = sizeof(quint16) + sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE;
|
||||
mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes);
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
mixPacket->writePrimitive(sequence);
|
||||
|
||||
// write the codec
|
||||
QString codecInPacket = nodeData->getCodecName();
|
||||
mixPacket->writeString(codecInPacket);
|
||||
|
||||
// pack number of silent audio samples
|
||||
quint16 numSilentSamples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
|
||||
mixPacket->writePrimitive(numSilentSamples);
|
||||
|
@ -797,6 +908,12 @@ void AudioMixer::parseSettingsObject(const QJsonObject &settingsObject) {
|
|||
if (settingsObject.contains(AUDIO_ENV_GROUP_KEY)) {
|
||||
QJsonObject audioEnvGroupObject = settingsObject[AUDIO_ENV_GROUP_KEY].toObject();
|
||||
|
||||
const QString CODEC_PREFERENCE_ORDER = "codec_preference_order";
|
||||
if (audioEnvGroupObject[CODEC_PREFERENCE_ORDER].isString()) {
|
||||
_codecPreferenceOrder = audioEnvGroupObject[CODEC_PREFERENCE_ORDER].toString();
|
||||
qDebug() << "Codec preference order changed to" << _codecPreferenceOrder;
|
||||
}
|
||||
|
||||
const QString ATTENATION_PER_DOULING_IN_DISTANCE = "attenuation_per_doubling_in_distance";
|
||||
if (audioEnvGroupObject[ATTENATION_PER_DOULING_IN_DISTANCE].isString()) {
|
||||
bool ok = false;
|
||||
|
|
|
@ -45,7 +45,9 @@ private slots:
|
|||
void broadcastMixes();
|
||||
void handleNodeAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
|
||||
void handleNodeKilled(SharedNodePointer killedNode);
|
||||
void handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
|
||||
void removeHRTFsForFinishedInjector(const QUuid& streamID);
|
||||
|
||||
|
@ -91,6 +93,8 @@ private:
|
|||
int _manualEchoMixes { 0 };
|
||||
int _totalMixes { 0 };
|
||||
|
||||
QString _codecPreferenceOrder;
|
||||
|
||||
float _mixedSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
int16_t _clampedSamples[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
|
||||
|
|
|
@ -39,6 +39,14 @@ AudioMixerClientData::AudioMixerClientData(const QUuid& nodeID) :
|
|||
_frameToSendStats = distribution(numberGenerator);
|
||||
}
|
||||
|
||||
AudioMixerClientData::~AudioMixerClientData() {
|
||||
if (_codec) {
|
||||
_codec->releaseDecoder(_decoder);
|
||||
_codec->releaseEncoder(_encoder);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
|
||||
QReadLocker readLocker { &_streamsLock };
|
||||
|
||||
|
@ -101,9 +109,15 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
|
||||
bool isStereo = channelFlag == 1;
|
||||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStreamSettings());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, AudioConstants::MONO);
|
||||
qDebug() << "creating new AvatarAudioStream... codec:" << _selectedCodecName;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec, this, &AudioMixerClientData::sendSelectAudioFormat);
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
QUuid(),
|
||||
std::unique_ptr<PositionalAudioStream> { new AvatarAudioStream(isStereo, AudioMixer::getStreamSettings()) }
|
||||
std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
|
||||
);
|
||||
|
||||
micStreamIt = emplaced.first;
|
||||
|
@ -116,7 +130,6 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
isMicStream = true;
|
||||
} else if (packetType == PacketType::InjectAudio) {
|
||||
// this is injected audio
|
||||
|
||||
// grab the stream identifier for this injected audio
|
||||
message.seek(sizeof(quint16));
|
||||
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
@ -130,9 +143,16 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
|
||||
if (streamIt == _audioStreams.end()) {
|
||||
// we don't have this injected stream yet, so add it
|
||||
auto injectorStream = new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStreamSettings());
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qDebug() << "creating new injectorStream... codec:" << _selectedCodecName;
|
||||
#endif
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
streamIdentifier,
|
||||
std::unique_ptr<InjectedAudioStream> { new InjectedAudioStream(streamIdentifier, isStereo, AudioMixer::getStreamSettings()) }
|
||||
std::unique_ptr<InjectedAudioStream> { injectorStream }
|
||||
);
|
||||
|
||||
streamIt = emplaced.first;
|
||||
|
@ -324,3 +344,52 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
|
||||
return result;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::sendSelectAudioFormat(SharedNodePointer node, const QString& selectedCodecName) {
|
||||
auto replyPacket = NLPacket::create(PacketType::SelectedAudioFormat);
|
||||
replyPacket->writeString(selectedCodecName);
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->sendPacket(std::move(replyPacket), *node);
|
||||
}
|
||||
|
||||
|
||||
void AudioMixerClientData::setupCodec(CodecPluginPointer codec, const QString& codecName) {
|
||||
cleanupCodec(); // cleanup any previously allocated coders first
|
||||
_codec = codec;
|
||||
_selectedCodecName = codecName;
|
||||
if (codec) {
|
||||
_encoder = codec->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO);
|
||||
_decoder = codec->createDecoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
|
||||
}
|
||||
|
||||
auto avatarAudioStream = getAvatarAudioStream();
|
||||
if (avatarAudioStream) {
|
||||
avatarAudioStream->setupCodec(codec, codecName, AudioConstants::MONO);
|
||||
}
|
||||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
// fixup codecs for any active injectors...
|
||||
auto it = _audioStreams.begin();
|
||||
while (it != _audioStreams.end()) {
|
||||
SharedStreamPointer stream = it->second;
|
||||
if (stream->getType() == PositionalAudioStream::Injector) {
|
||||
stream->setupCodec(codec, codecName, stream->isStereo() ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
}
|
||||
++it;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void AudioMixerClientData::cleanupCodec() {
|
||||
// release any old codec encoder/decoder first...
|
||||
if (_codec) {
|
||||
if (_decoder) {
|
||||
_codec->releaseDecoder(_decoder);
|
||||
_decoder = nullptr;
|
||||
}
|
||||
if (_encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,8 @@
|
|||
#include <AudioLimiter.h>
|
||||
#include <UUIDHasher.h>
|
||||
|
||||
#include <plugins/CodecPlugin.h>
|
||||
|
||||
#include "PositionalAudioStream.h"
|
||||
#include "AvatarAudioStream.h"
|
||||
|
||||
|
@ -27,6 +29,7 @@ class AudioMixerClientData : public NodeData {
|
|||
Q_OBJECT
|
||||
public:
|
||||
AudioMixerClientData(const QUuid& nodeID);
|
||||
~AudioMixerClientData();
|
||||
|
||||
using SharedStreamPointer = std::shared_ptr<PositionalAudioStream>;
|
||||
using AudioStreamMap = std::unordered_map<QUuid, SharedStreamPointer>;
|
||||
|
@ -65,9 +68,24 @@ public:
|
|||
|
||||
AudioLimiter audioLimiter;
|
||||
|
||||
void setupCodec(CodecPluginPointer codec, const QString& codecName);
|
||||
void cleanupCodec();
|
||||
void encode(const QByteArray& decodedBuffer, QByteArray& encodedBuffer) {
|
||||
if (_encoder) {
|
||||
_encoder->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
QString getCodecName() { return _selectedCodecName; }
|
||||
|
||||
signals:
|
||||
void injectorStreamFinished(const QUuid& streamIdentifier);
|
||||
|
||||
public slots:
|
||||
void sendSelectAudioFormat(SharedNodePointer node, const QString& selectedCodecName);
|
||||
|
||||
private:
|
||||
QReadWriteLock _streamsLock;
|
||||
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID
|
||||
|
@ -81,6 +99,11 @@ private:
|
|||
AudioStreamStats _downstreamAudioStreamStats;
|
||||
|
||||
int _frameToSendStats { 0 };
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder{ nullptr }; // for outbound mixed stream
|
||||
Decoder* _decoder{ nullptr }; // for mic stream
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerClientData_h
|
||||
|
|
|
@ -45,6 +45,7 @@ AvatarMixer::AvatarMixer(ReceivedMessage& message) :
|
|||
packetReceiver.registerListener(PacketType::AvatarData, this, "handleAvatarDataPacket");
|
||||
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "handleAvatarIdentityPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
connect(nodeList.data(), &NodeList::packetVersionMismatch, this, &AvatarMixer::handlePacketVersionMismatch);
|
||||
|
@ -227,14 +228,15 @@ void AvatarMixer::broadcastAvatarData() {
|
|||
// send back a packet with other active node data to this node
|
||||
nodeList->eachMatchingNode(
|
||||
[&](const SharedNodePointer& otherNode)->bool {
|
||||
if (!otherNode->getLinkedData()) {
|
||||
// make sure we have data for this avatar, that it isn't the same node,
|
||||
// and isn't an avatar that the viewing node has ignored
|
||||
if (!otherNode->getLinkedData()
|
||||
|| otherNode->getUUID() == node->getUUID()
|
||||
|| node->isIgnoringNodeWithID(otherNode->getUUID())) {
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
if (otherNode->getUUID() == node->getUUID()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
},
|
||||
[&](const SharedNodePointer& otherNode) {
|
||||
++numOtherAvatars;
|
||||
|
@ -431,6 +433,10 @@ void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message
|
|||
DependencyManager::get<NodeList>()->processKillNode(*message);
|
||||
}
|
||||
|
||||
void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
senderNode->parseIgnoreRequestMessage(message);
|
||||
}
|
||||
|
||||
void AvatarMixer::sendStatsPacket() {
|
||||
QJsonObject statsObject;
|
||||
statsObject["average_listeners_last_second"] = (float) _sumListeners / (float) _numStatFrames;
|
||||
|
|
|
@ -37,9 +37,11 @@ private slots:
|
|||
void handleAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void domainSettingsRequestComplete();
|
||||
void handlePacketVersionMismatch(PacketType type, const HifiSockAddr& senderSockAddr, const QUuid& senderUUID);
|
||||
|
||||
|
||||
private:
|
||||
void broadcastAvatarData();
|
||||
void parseDomainServerSettings(const QJsonObject& domainSettings);
|
||||
|
|
43
cmake/externals/hifiAudioCodec/CMakeLists.txt
vendored
Normal file
43
cmake/externals/hifiAudioCodec/CMakeLists.txt
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
include(ExternalProject)
|
||||
include(SelectLibraryConfigurations)
|
||||
|
||||
set(EXTERNAL_NAME hifiAudioCodec)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
|
||||
if (WIN32 OR APPLE)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-1.zip
|
||||
URL_MD5 23ec3fe51eaa155ea159a4971856fc13
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
elseif(NOT ANDROID)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-linux.zip
|
||||
URL_MD5 7d37914a18aa4de971d2f45dd3043bde
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
LOG_DOWNLOAD 1
|
||||
)
|
||||
endif()
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
|
||||
|
||||
if (WIN32)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/audio.lib CACHE TYPE INTERNAL)
|
||||
elseif(APPLE)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
|
||||
elseif(NOT ANDROID)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
|
||||
endif()
|
|
@ -43,4 +43,4 @@ macro(ADD_DEPENDENCY_EXTERNAL_PROJECTS)
|
|||
|
||||
endforeach()
|
||||
|
||||
endmacro()
|
||||
endmacro()
|
||||
|
|
|
@ -44,6 +44,8 @@ function(AUTOSCRIBE_SHADER SHADER_FILE)
|
|||
set(SHADER_TARGET ${SHADER_TARGET}_vert.h)
|
||||
elseif(${SHADER_EXT} STREQUAL .slf)
|
||||
set(SHADER_TARGET ${SHADER_TARGET}_frag.h)
|
||||
elseif(${SHADER_EXT} STREQUAL .slg)
|
||||
set(SHADER_TARGET ${SHADER_TARGET}_geom.h)
|
||||
endif()
|
||||
|
||||
set(SHADER_TARGET "${SHADERS_DIR}/${SHADER_TARGET}")
|
||||
|
@ -87,7 +89,7 @@ macro(AUTOSCRIBE_SHADER_LIB)
|
|||
#message(${HIFI_LIBRARIES_SHADER_INCLUDE_FILES})
|
||||
|
||||
file(GLOB_RECURSE SHADER_INCLUDE_FILES src/*.slh)
|
||||
file(GLOB_RECURSE SHADER_SOURCE_FILES src/*.slv src/*.slf)
|
||||
file(GLOB_RECURSE SHADER_SOURCE_FILES src/*.slv src/*.slf src/*.slg)
|
||||
|
||||
#make the shader folder
|
||||
set(SHADERS_DIR "${CMAKE_CURRENT_BINARY_DIR}/shaders/${TARGET_NAME}")
|
||||
|
|
|
@ -16,6 +16,7 @@ macro(install_beside_console)
|
|||
install(
|
||||
TARGETS ${TARGET_NAME}
|
||||
RUNTIME DESTINATION ${COMPONENT_INSTALL_DIR}
|
||||
LIBRARY DESTINATION ${CONSOLE_PLUGIN_INSTALL_DIR}
|
||||
COMPONENT ${SERVER_COMPONENT}
|
||||
)
|
||||
else ()
|
||||
|
|
|
@ -69,6 +69,8 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(CONSOLE_APP_CONTENTS "${CONSOLE_INSTALL_APP_PATH}/Contents")
|
||||
set(COMPONENT_APP_PATH "${CONSOLE_APP_CONTENTS}/MacOS/Components.app")
|
||||
set(COMPONENT_INSTALL_DIR "${COMPONENT_APP_PATH}/Contents/MacOS")
|
||||
set(CONSOLE_PLUGIN_INSTALL_DIR "${COMPONENT_APP_PATH}/Contents/PlugIns")
|
||||
|
||||
|
||||
set(INTERFACE_INSTALL_APP_PATH "${CONSOLE_INSTALL_DIR}/${INTERFACE_BUNDLE_NAME}.app")
|
||||
set(INTERFACE_ICON_FILENAME "${INTERFACE_ICON_PREFIX}.icns")
|
||||
|
|
61
cmake/macros/SetupHifiClientServerPlugin.cmake
Normal file
61
cmake/macros/SetupHifiClientServerPlugin.cmake
Normal file
|
@ -0,0 +1,61 @@
|
|||
#
|
||||
# Created by Brad Hefta-Gaub on 2016/07/07
|
||||
# Copyright 2016 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
macro(SETUP_HIFI_CLIENT_SERVER_PLUGIN)
|
||||
set(${TARGET_NAME}_SHARED 1)
|
||||
setup_hifi_library(${ARGV})
|
||||
if (NOT DEFINED SERVER_ONLY)
|
||||
add_dependencies(interface ${TARGET_NAME})
|
||||
endif()
|
||||
set_target_properties(${TARGET_NAME} PROPERTIES FOLDER "Plugins")
|
||||
|
||||
if (APPLE)
|
||||
set(CLIENT_PLUGIN_PATH "${INTERFACE_BUNDLE_NAME}.app/Contents/PlugIns")
|
||||
set(SERVER_PLUGIN_PATH "plugins")
|
||||
else()
|
||||
set(CLIENT_PLUGIN_PATH "plugins")
|
||||
set(SERVER_PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux" OR CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(CLIENT_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${CLIENT_PLUGIN_PATH}/")
|
||||
set(SERVER_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/assignment-client/${SERVER_PLUGIN_PATH}/")
|
||||
elseif (APPLE)
|
||||
set(CLIENT_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/$<CONFIGURATION>/${CLIENT_PLUGIN_PATH}/")
|
||||
set(SERVER_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/assignment-client/$<CONFIGURATION>/${SERVER_PLUGIN_PATH}/")
|
||||
else()
|
||||
set(CLIENT_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/$<CONFIGURATION>/${CLIENT_PLUGIN_PATH}/")
|
||||
set(SERVER_PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/assignment-client/$<CONFIGURATION>/${SERVER_PLUGIN_PATH}/")
|
||||
endif()
|
||||
|
||||
# create the destination for the client plugin binaries
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E make_directory
|
||||
${CLIENT_PLUGIN_FULL_PATH}
|
||||
)
|
||||
# copy the client plugin binaries
|
||||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy
|
||||
"$<TARGET_FILE:${TARGET_NAME}>"
|
||||
${CLIENT_PLUGIN_FULL_PATH}
|
||||
)
|
||||
|
||||
# create the destination for the server plugin binaries
|
||||
add_custom_command(
|
||||
TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E make_directory
|
||||
${SERVER_PLUGIN_FULL_PATH}
|
||||
)
|
||||
# copy the server plugin binaries
|
||||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy
|
||||
"$<TARGET_FILE:${TARGET_NAME}>"
|
||||
${SERVER_PLUGIN_FULL_PATH}
|
||||
)
|
||||
|
||||
endmacro()
|
|
@ -589,8 +589,8 @@
|
|||
"name": "attenuation_per_doubling_in_distance",
|
||||
"label": "Default Domain Attenuation",
|
||||
"help": "Factor between 0 and 1.0 (0: No attenuation, 1.0: extreme attenuation)",
|
||||
"placeholder": "0.18",
|
||||
"default": "0.18",
|
||||
"placeholder": "0.5",
|
||||
"default": "0.5",
|
||||
"advanced": false
|
||||
},
|
||||
{
|
||||
|
@ -686,7 +686,7 @@
|
|||
"name": "coefficient",
|
||||
"label": "Attenuation coefficient",
|
||||
"can_set": true,
|
||||
"placeholder": "0.18"
|
||||
"placeholder": "0.5"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -718,6 +718,14 @@
|
|||
"placeholder": "(in percent)"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "codec_preference_order",
|
||||
"label": "Audio Codec Preference Order",
|
||||
"help": "List of codec names in order of preferred usage",
|
||||
"placeholder": "hifiAC, zlib, pcm",
|
||||
"default": "hifiAC,zlib,pcm",
|
||||
"advanced": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -389,6 +389,8 @@ void DomainServer::setupNodeListAndAssignments() {
|
|||
const QVariant* idValueVariant = valueForKeyPath(settingsMap, METAVERSE_DOMAIN_ID_KEY_PATH);
|
||||
if (idValueVariant) {
|
||||
nodeList->setSessionUUID(idValueVariant->toString());
|
||||
} else {
|
||||
nodeList->setSessionUUID(QUuid::createUuid()); // Use random UUID
|
||||
}
|
||||
|
||||
connect(nodeList.data(), &LimitedNodeList::nodeAdded, this, &DomainServer::nodeAdded);
|
||||
|
@ -1081,9 +1083,11 @@ void DomainServer::sendHeartbeatToMetaverse(const QString& networkAddress) {
|
|||
// Setup the domain object to send to the data server
|
||||
QJsonObject domainObject;
|
||||
|
||||
// add the version
|
||||
// add the versions
|
||||
static const QString VERSION_KEY = "version";
|
||||
domainObject[VERSION_KEY] = BuildInfo::VERSION;
|
||||
static const QString PROTOCOL_KEY = "protocol";
|
||||
domainObject[PROTOCOL_KEY] = protocolVersionsSignatureBase64();
|
||||
|
||||
// add networking
|
||||
if (!networkAddress.isEmpty()) {
|
||||
|
@ -1117,7 +1121,12 @@ void DomainServer::sendHeartbeatToMetaverse(const QString& networkAddress) {
|
|||
QString domainUpdateJSON = QString("{\"domain\":%1}").arg(QString(QJsonDocument(domainObject).toJson(QJsonDocument::Compact)));
|
||||
|
||||
static const QString DOMAIN_UPDATE = "/api/v1/domains/%1";
|
||||
DependencyManager::get<AccountManager>()->sendRequest(DOMAIN_UPDATE.arg(uuidStringWithoutCurlyBraces(getID())),
|
||||
QString path = DOMAIN_UPDATE.arg(uuidStringWithoutCurlyBraces(getID()));
|
||||
#if DEV_BUILD || PR_BUILD
|
||||
qDebug() << "Domain metadata sent to" << path;
|
||||
qDebug() << "Domain metadata update:" << domainUpdateJSON;
|
||||
#endif
|
||||
DependencyManager::get<AccountManager>()->sendRequest(path,
|
||||
AccountManagerAuth::Optional,
|
||||
QNetworkAccessManager::PutOperation,
|
||||
JSONCallbackParameters(nullptr, QString(), this, "handleMetaverseHeartbeatError"),
|
||||
|
|
|
@ -58,6 +58,7 @@ set(INTERFACE_SRCS ${INTERFACE_SRCS} "${QT_UI_HEADERS}" "${QT_RESOURCES}")
|
|||
# qt5_create_translation_custom(${QM} ${INTERFACE_SRCS} ${QT_UI_FILES} ${TS})
|
||||
|
||||
if (APPLE)
|
||||
|
||||
# configure CMake to use a custom Info.plist
|
||||
set_target_properties(${this_target} PROPERTIES MACOSX_BUNDLE_INFO_PLIST MacOSXBundleInfo.plist.in)
|
||||
|
||||
|
@ -229,6 +230,13 @@ if (APPLE)
|
|||
|
||||
set(SCRIPTS_INSTALL_DIR "${INTERFACE_INSTALL_APP_PATH}/Contents/Resources")
|
||||
|
||||
# copy script files beside the executable
|
||||
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${CMAKE_SOURCE_DIR}/scripts"
|
||||
$<TARGET_FILE_DIR:${TARGET_NAME}>/../Resources/scripts
|
||||
)
|
||||
|
||||
# call the fixup_interface macro to add required bundling commands for installation
|
||||
fixup_interface()
|
||||
|
||||
|
@ -263,6 +271,7 @@ else (APPLE)
|
|||
endif (APPLE)
|
||||
|
||||
if (SCRIPTS_INSTALL_DIR)
|
||||
|
||||
# setup install of scripts beside interface executable
|
||||
install(
|
||||
DIRECTORY "${CMAKE_SOURCE_DIR}/scripts/"
|
||||
|
|
|
@ -3,9 +3,17 @@
|
|||
"channels": [
|
||||
{ "from": "Hydra.LY", "filters": "invert", "to": "Standard.LY" },
|
||||
{ "from": "Hydra.LX", "to": "Standard.LX" },
|
||||
{ "from": "Hydra.LT", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "Hydra.LT", "to": "Standard.LT" },
|
||||
{ "from": "Hydra.RY", "filters": "invert", "to": "Standard.RY" },
|
||||
{ "from": "Hydra.RX", "to": "Standard.RX" },
|
||||
{ "from": "Hydra.RT", "to": "Standard.RTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "Hydra.RT", "to": "Standard.RT" },
|
||||
|
||||
{ "from": "Hydra.LB", "to": "Standard.LB" },
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
{
|
||||
"name": "Oculus Touch to Standard",
|
||||
"channels": [
|
||||
{ "from": "OculusTouch.A", "to": "Standard.A" },
|
||||
{ "from": "OculusTouch.B", "to": "Standard.B" },
|
||||
{ "from": "OculusTouch.X", "to": "Standard.X" },
|
||||
{ "from": "OculusTouch.Y", "to": "Standard.Y" },
|
||||
{ "from": "OculusTouch.A", "to": "Standard.RightPrimaryThumb" },
|
||||
{ "from": "OculusTouch.B", "to": "Standard.RightSecondaryThumb" },
|
||||
{ "from": "OculusTouch.X", "to": "Standard.LeftPrimaryThumb" },
|
||||
{ "from": "OculusTouch.Y", "to": "Standard.LeftSecondaryThumb" },
|
||||
|
||||
{ "from": "OculusTouch.LY", "filters": "invert", "to": "Standard.LY" },
|
||||
{ "from": "OculusTouch.LX", "to": "Standard.LX" },
|
||||
{ "from": "OculusTouch.LT", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.LT", "to": "Standard.LT" },
|
||||
{ "from": "OculusTouch.LS", "to": "Standard.LS" },
|
||||
{ "from": "OculusTouch.LeftGrip", "to": "Standard.LeftGrip" },
|
||||
|
@ -15,6 +19,10 @@
|
|||
|
||||
{ "from": "OculusTouch.RY", "filters": "invert", "to": "Standard.RY" },
|
||||
{ "from": "OculusTouch.RX", "to": "Standard.RX" },
|
||||
{ "from": "OculusTouch.RT", "to": "Standard.RTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.RT", "to": "Standard.RT" },
|
||||
{ "from": "OculusTouch.RS", "to": "Standard.RS" },
|
||||
{ "from": "OculusTouch.RightGrip", "to": "Standard.RightGrip" },
|
||||
|
|
|
@ -10,14 +10,7 @@
|
|||
{ "from": "Standard.RB", "to": "Actions.UiNavGroup" },
|
||||
{ "from": [ "Standard.A", "Standard.X" ], "to": "Actions.UiNavSelect" },
|
||||
{ "from": [ "Standard.B", "Standard.Y" ], "to": "Actions.UiNavBack" },
|
||||
{
|
||||
"from": [ "Standard.RT", "Standard.LT" ],
|
||||
"to": "Actions.UiNavSelect",
|
||||
"filters": [
|
||||
{ "type": "deadZone", "min": 0.5 },
|
||||
"constrainToInteger"
|
||||
]
|
||||
},
|
||||
{ "from": [ "Standard.RTClick", "Standard.LTClick" ], "to": "Actions.UiNavSelect" },
|
||||
{
|
||||
"from": "Standard.LX", "to": "Actions.UiNavLateral",
|
||||
"filters": [
|
||||
|
|
23
interface/resources/controllers/touchscreen.json
Normal file
23
interface/resources/controllers/touchscreen.json
Normal file
|
@ -0,0 +1,23 @@
|
|||
{
|
||||
"name": "Touchscreen to Actions",
|
||||
"channels": [
|
||||
{ "from": "Touchscreen.GesturePinchOut", "to": "Actions.BoomOut", "filters": [ { "type": "scale", "scale": 0.02 } ] },
|
||||
{ "from": "Touchscreen.GesturePinchIn", "to": "Actions.BoomIn", "filters": [ { "type": "scale", "scale": 0.02 } ] },
|
||||
|
||||
{ "from": { "makeAxis" : [
|
||||
[ "Touchscreen.DragLeft" ],
|
||||
[ "Touchscreen.DragRight" ]
|
||||
]
|
||||
},
|
||||
"to": "Actions.Yaw", "filters": [ { "type": "scale", "scale": 0.12 } ]
|
||||
},
|
||||
|
||||
{ "from": { "makeAxis" : [
|
||||
[ "Touchscreen.DragUp" ],
|
||||
[ "Touchscreen.DragDown" ]
|
||||
]
|
||||
},
|
||||
"to": "Actions.Pitch", "filters": [ { "type": "scale", "scale": 0.04 } ]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -9,6 +9,8 @@
|
|||
{ "type": "deadZone", "min": 0.05 }
|
||||
]
|
||||
},
|
||||
{ "from": "Vive.LTClick", "to": "Standard.LTClick" },
|
||||
|
||||
{ "from": "Vive.LeftGrip", "to": "Standard.LeftGrip" },
|
||||
{ "from": "Vive.LS", "to": "Standard.LS" },
|
||||
{ "from": "Vive.LSTouch", "to": "Standard.LSTouch" },
|
||||
|
@ -21,6 +23,8 @@
|
|||
{ "type": "deadZone", "min": 0.05 }
|
||||
]
|
||||
},
|
||||
{ "from": "Vive.RTClick", "to": "Standard.RTClick" },
|
||||
|
||||
{ "from": "Vive.RightGrip", "to": "Standard.RightGrip" },
|
||||
{ "from": "Vive.RS", "to": "Standard.RS" },
|
||||
{ "from": "Vive.RSTouch", "to": "Standard.RSTouch" },
|
||||
|
|
BIN
interface/resources/images/Loading-Inner-H.png
Normal file
BIN
interface/resources/images/Loading-Inner-H.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 1.5 KiB |
BIN
interface/resources/images/Loading-Outer-Ring.png
Normal file
BIN
interface/resources/images/Loading-Outer-Ring.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.6 KiB |
BIN
interface/resources/images/default-domain.gif
Normal file
BIN
interface/resources/images/default-domain.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.6 KiB |
BIN
interface/resources/images/preview.png
Normal file
BIN
interface/resources/images/preview.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 110 KiB |
|
@ -13,6 +13,7 @@ import QtQuick 2.4
|
|||
import "controls"
|
||||
import "styles"
|
||||
import "windows"
|
||||
import "hifi"
|
||||
|
||||
Window {
|
||||
id: root
|
||||
|
@ -44,11 +45,50 @@ Window {
|
|||
anchors.centerIn = parent;
|
||||
}
|
||||
|
||||
function goCard(card) {
|
||||
addressLine.text = card.userStory.name;
|
||||
toggleOrGo(true);
|
||||
}
|
||||
property var allDomains: [];
|
||||
property var suggestionChoices: [];
|
||||
property var domainsBaseUrl: null;
|
||||
property int cardWidth: 200;
|
||||
property int cardHeight: 152;
|
||||
|
||||
AddressBarDialog {
|
||||
id: addressBarDialog
|
||||
implicitWidth: backgroundImage.width
|
||||
implicitHeight: backgroundImage.height
|
||||
|
||||
Row {
|
||||
width: backgroundImage.width;
|
||||
anchors {
|
||||
bottom: backgroundImage.top;
|
||||
bottomMargin: 2 * hifi.layout.spacing;
|
||||
right: backgroundImage.right;
|
||||
rightMargin: -104; // FIXME
|
||||
}
|
||||
spacing: hifi.layout.spacing;
|
||||
Card {
|
||||
id: s0;
|
||||
width: cardWidth;
|
||||
height: cardHeight;
|
||||
goFunction: goCard
|
||||
}
|
||||
Card {
|
||||
id: s1;
|
||||
width: cardWidth;
|
||||
height: cardHeight;
|
||||
goFunction: goCard
|
||||
}
|
||||
Card {
|
||||
id: s2;
|
||||
width: cardWidth;
|
||||
height: cardHeight;
|
||||
goFunction: goCard
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: backgroundImage
|
||||
source: "../images/address-bar.svg"
|
||||
|
@ -130,22 +170,178 @@ Window {
|
|||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
|
||||
helperText: "Go to: place, @user, /path, network address"
|
||||
onTextChanged: filterChoicesByText()
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
|
||||
// TODO: make available to other .qml.
|
||||
var request = new XMLHttpRequest();
|
||||
// QT bug: apparently doesn't handle onload. Workaround using readyState.
|
||||
request.onreadystatechange = function () {
|
||||
var READY_STATE_DONE = 4;
|
||||
var HTTP_OK = 200;
|
||||
if (request.readyState >= READY_STATE_DONE) {
|
||||
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
|
||||
response = !error && request.responseText,
|
||||
contentType = !error && request.getResponseHeader('content-type');
|
||||
if (!error && contentType.indexOf('application/json') === 0) {
|
||||
try {
|
||||
response = JSON.parse(response);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
}
|
||||
cb(error, response);
|
||||
}
|
||||
};
|
||||
request.open("GET", url, true);
|
||||
request.send();
|
||||
}
|
||||
// call iterator(element, icb) once for each element of array, and then cb(error) when icb(error) has been called by each iterator.
|
||||
// short-circuits if error. Note that iterator MUST be an asynchronous function. (Use setTimeout if necessary.)
|
||||
function asyncEach(array, iterator, cb) {
|
||||
var count = array.length;
|
||||
function icb(error) {
|
||||
if (!--count || error) {
|
||||
count = -1; // don't cb multiple times (e.g., if error)
|
||||
cb(error);
|
||||
}
|
||||
}
|
||||
if (!count) {
|
||||
return cb();
|
||||
}
|
||||
array.forEach(function (element) {
|
||||
iterator(element, icb);
|
||||
});
|
||||
}
|
||||
|
||||
function identity(x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
function addPictureToDomain(domainInfo, cb) { // asynchronously add thumbnail and lobby to domainInfo, if available, and cb(error)
|
||||
// This requests data for all the names at once, and just uses the first one to come back.
|
||||
// We might change this to check one at a time, which would be less requests and more latency.
|
||||
asyncEach([domainInfo.name].concat(domainInfo.names || null).filter(identity), function (name, icb) {
|
||||
var url = "https://metaverse.highfidelity.com/api/v1/places/" + name;
|
||||
getRequest(url, function (error, json) {
|
||||
var previews = !error && json.data.place.previews;
|
||||
if (previews) {
|
||||
if (!domainInfo.thumbnail) { // just grab the first one
|
||||
domainInfo.thumbnail = previews.thumbnail;
|
||||
}
|
||||
if (!domainInfo.lobby) {
|
||||
domainInfo.lobby = previews.lobby;
|
||||
}
|
||||
}
|
||||
icb(error);
|
||||
});
|
||||
}, cb);
|
||||
}
|
||||
|
||||
function getDomains(options, cb) { // cb(error, arrayOfData)
|
||||
if (!options.page) {
|
||||
options.page = 1;
|
||||
}
|
||||
if (!domainsBaseUrl) {
|
||||
var domainsOptions = [
|
||||
'open', // published hours handle now
|
||||
'active', // has at least one person connected. FIXME: really want any place that is verified accessible.
|
||||
// FIXME: really want places I'm allowed in, not just open ones.
|
||||
'restriction=open', // Not by whitelist, etc. FIXME: If logged in, add hifi to the restriction options, in order to include places that require login.
|
||||
// FIXME add maturity
|
||||
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
|
||||
'sort_by=users',
|
||||
'sort_order=desc',
|
||||
];
|
||||
domainsBaseUrl = "https://metaverse.highfidelity.com/api/v1/domains/all?" + domainsOptions.join('&');
|
||||
}
|
||||
var url = domainsBaseUrl + "&page=" + options.page + "&users=" + options.minUsers + "-" + options.maxUsers;
|
||||
getRequest(url, function (error, json) {
|
||||
if (!error && (json.status !== 'success')) {
|
||||
error = new Error("Bad response: " + JSON.stringify(json));
|
||||
}
|
||||
if (error) {
|
||||
error.message += ' for ' + url;
|
||||
return cb(error);
|
||||
}
|
||||
var domains = json.data.domains;
|
||||
if (json.current_page < json.total_pages) {
|
||||
options.page++;
|
||||
return getDomains(options, function (error, others) {
|
||||
cb(error, domains.concat(others));
|
||||
});
|
||||
}
|
||||
cb(null, domains);
|
||||
});
|
||||
}
|
||||
|
||||
function filterChoicesByText() {
|
||||
function fill1(target, data) {
|
||||
if (!data) {
|
||||
target.visible = false;
|
||||
return;
|
||||
}
|
||||
console.log('suggestion:', JSON.stringify(data));
|
||||
target.userStory = data;
|
||||
target.image.source = data.lobby || target.defaultPicture;
|
||||
target.placeText = data.name;
|
||||
target.usersText = data.online_users + ((data.online_users === 1) ? ' user' : ' users');
|
||||
target.visible = true;
|
||||
}
|
||||
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity);
|
||||
var filtered = !words.length ? suggestionChoices : allDomains.filter(function (domain) {
|
||||
var text = domain.names.concat(domain.tags).join(' ');
|
||||
if (domain.description) {
|
||||
text += domain.description;
|
||||
}
|
||||
text = text.toUpperCase();
|
||||
return words.every(function (word) {
|
||||
return text.indexOf(word) >= 0;
|
||||
});
|
||||
});
|
||||
fill1(s0, filtered[0]);
|
||||
fill1(s1, filtered[1]);
|
||||
fill1(s2, filtered[2]);
|
||||
}
|
||||
|
||||
function fillDestinations() {
|
||||
allDomains = suggestionChoices = [];
|
||||
getDomains({minUsers: 0, maxUsers: 20}, function (error, domains) {
|
||||
if (error) {
|
||||
console.log('domain query failed:', error);
|
||||
return filterChoicesByText();
|
||||
}
|
||||
var here = AddressManager.hostname; // don't show where we are now.
|
||||
allDomains = domains.filter(function (domain) { return domain.name !== here; });
|
||||
// Whittle down suggestions to those that have at least one user, and try to get pictures.
|
||||
suggestionChoices = allDomains.filter(function (domain) { return domain.online_users; });
|
||||
asyncEach(domains, addPictureToDomain, function (error) {
|
||||
if (error) {
|
||||
console.log('place picture query failed:', error);
|
||||
}
|
||||
// Whittle down more by requiring a picture.
|
||||
suggestionChoices = suggestionChoices.filter(function (domain) { return domain.lobby; });
|
||||
filterChoicesByText();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
onVisibleChanged: {
|
||||
if (visible) {
|
||||
addressLine.forceActiveFocus()
|
||||
fillDestinations();
|
||||
} else {
|
||||
addressLine.text = ""
|
||||
}
|
||||
}
|
||||
|
||||
function toggleOrGo() {
|
||||
function toggleOrGo(fromSuggestions) {
|
||||
if (addressLine.text !== "") {
|
||||
addressBarDialog.loadAddress(addressLine.text)
|
||||
addressBarDialog.loadAddress(addressLine.text, fromSuggestions)
|
||||
}
|
||||
root.shown = false;
|
||||
}
|
||||
|
|
|
@ -3,12 +3,14 @@ import QtQuick.Controls 1.2
|
|||
import QtWebEngine 1.1
|
||||
|
||||
import "controls-uit"
|
||||
import "styles" as HifiStyles
|
||||
import "styles-uit"
|
||||
import "windows"
|
||||
|
||||
ScrollingWindow {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
HifiStyles.HifiConstants { id: hifistyles }
|
||||
title: "Browser"
|
||||
resizable: true
|
||||
destroyOnHidden: true
|
||||
|
@ -46,7 +48,7 @@ ScrollingWindow {
|
|||
id: back;
|
||||
enabled: webview.canGoBack;
|
||||
text: hifi.glyphs.backward
|
||||
color: enabled ? hifi.colors.text : hifi.colors.disabledText
|
||||
color: enabled ? hifistyles.colors.text : hifistyles.colors.disabledText
|
||||
size: 48
|
||||
MouseArea { anchors.fill: parent; onClicked: webview.goBack() }
|
||||
}
|
||||
|
@ -55,7 +57,7 @@ ScrollingWindow {
|
|||
id: forward;
|
||||
enabled: webview.canGoForward;
|
||||
text: hifi.glyphs.forward
|
||||
color: enabled ? hifi.colors.text : hifi.colors.disabledText
|
||||
color: enabled ? hifistyles.colors.text : hifistyles.colors.disabledText
|
||||
size: 48
|
||||
MouseArea { anchors.fill: parent; onClicked: webview.goForward() }
|
||||
}
|
||||
|
@ -64,7 +66,7 @@ ScrollingWindow {
|
|||
id: reload;
|
||||
enabled: webview.canGoForward;
|
||||
text: webview.loading ? hifi.glyphs.close : hifi.glyphs.reload
|
||||
color: enabled ? hifi.colors.text : hifi.colors.disabledText
|
||||
color: enabled ? hifistyles.colors.text : hifistyles.colors.disabledText
|
||||
size: 48
|
||||
MouseArea { anchors.fill: parent; onClicked: webview.goForward() }
|
||||
}
|
||||
|
@ -105,7 +107,7 @@ ScrollingWindow {
|
|||
focus: true
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
placeholderText: "Enter URL"
|
||||
Component.onCompleted: scriptsModel.filterRegExp = new RegExp("^.*$", "i")
|
||||
Component.onCompleted: ScriptDiscoveryService.scriptsModelFilter.filterRegExp = new RegExp("^.*$", "i")
|
||||
Keys.onPressed: {
|
||||
switch(event.key) {
|
||||
case Qt.Key_Enter:
|
||||
|
|
|
@ -3,13 +3,16 @@ import QtQuick 2.3
|
|||
import QtQuick.Controls 1.3
|
||||
import QtQuick.Controls.Styles 1.3
|
||||
import QtGraphicalEffects 1.0
|
||||
|
||||
import "controls-uit"
|
||||
import "styles" as HifiStyles
|
||||
import "styles-uit"
|
||||
import "windows"
|
||||
|
||||
ScrollingWindow {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
HifiStyles.HifiConstants { id: hifistyles }
|
||||
objectName: "UpdateDialog"
|
||||
width: updateDialog.implicitWidth
|
||||
height: updateDialog.implicitHeight
|
||||
|
@ -40,22 +43,6 @@ ScrollingWindow {
|
|||
|
||||
width: updateDialog.contentWidth + updateDialog.borderWidth * 2
|
||||
height: mainContent.height + updateDialog.borderWidth * 2 - updateDialog.closeMargin / 2
|
||||
|
||||
MouseArea {
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
drag {
|
||||
target: root
|
||||
minimumX: 0
|
||||
minimumY: 0
|
||||
maximumX: root.parent ? root.maximumX : 0
|
||||
maximumY: root.parent ? root.maximumY : 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
|
@ -89,7 +76,7 @@ ScrollingWindow {
|
|||
text: "Update Available"
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.5
|
||||
pixelSize: hifistyles.fonts.pixelSize * 1.5
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
color: "#303030"
|
||||
|
@ -100,10 +87,10 @@ ScrollingWindow {
|
|||
text: updateDialog.updateAvailableDetails
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.6
|
||||
pixelSize: hifistyles.fonts.pixelSize * 0.6
|
||||
letterSpacing: -0.5
|
||||
}
|
||||
color: hifi.colors.text
|
||||
color: hifistyles.colors.text
|
||||
anchors {
|
||||
top: updateAvailable.bottom
|
||||
}
|
||||
|
@ -130,12 +117,12 @@ ScrollingWindow {
|
|||
Text {
|
||||
id: releaseNotes
|
||||
wrapMode: Text.Wrap
|
||||
width: parent.width - updateDialog.closeMargin
|
||||
width: parent.parent.width - updateDialog.closeMargin
|
||||
text: updateDialog.releaseNotes
|
||||
color: hifi.colors.text
|
||||
color: hifistyles.colors.text
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.65
|
||||
pixelSize: hifistyles.fonts.pixelSize * 0.65
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,7 +144,7 @@ ScrollingWindow {
|
|||
color: "#0c9ab4" // Same as logo
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.2
|
||||
pixelSize: hifistyles.fonts.pixelSize * 1.2
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
anchors {
|
||||
|
@ -169,7 +156,7 @@ ScrollingWindow {
|
|||
MouseArea {
|
||||
id: cancelButtonAction
|
||||
anchors.fill: parent
|
||||
onClicked: updateDialog.closeDialog()
|
||||
onClicked: root.shown = false
|
||||
cursorShape: "PointingHandCursor"
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +172,7 @@ ScrollingWindow {
|
|||
color: "#0c9ab4" // Same as logo
|
||||
font {
|
||||
family: updateDialog.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 1.2
|
||||
pixelSize: hifistyles.fonts.pixelSize * 1.2
|
||||
weight: Font.DemiBold
|
||||
}
|
||||
anchors {
|
||||
|
|
0
interface/resources/qml/controls-uit/ComboBox.qml
Executable file → Normal file
0
interface/resources/qml/controls-uit/ComboBox.qml
Executable file → Normal file
0
interface/resources/qml/controls-uit/SpinBox.qml
Executable file → Normal file
0
interface/resources/qml/controls-uit/SpinBox.qml
Executable file → Normal file
|
@ -25,6 +25,7 @@ FocusScope {
|
|||
property rect recommendedRect: Qt.rect(0,0,0,0);
|
||||
property var expectedChildren;
|
||||
property bool repositionLocked: true
|
||||
property bool hmdHandMouseActive: false
|
||||
|
||||
onRepositionLockedChanged: {
|
||||
if (!repositionLocked) {
|
||||
|
|
96
interface/resources/qml/hifi/Card.qml
Normal file
96
interface/resources/qml/hifi/Card.qml
Normal file
|
@ -0,0 +1,96 @@
|
|||
//
|
||||
// Card.qml
|
||||
// qml/hifi
|
||||
//
|
||||
// Displays a clickable card representing a user story or destination.
|
||||
//
|
||||
// Created by Howard Stearns on 7/13/2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import Hifi 1.0
|
||||
import QtQuick 2.5
|
||||
import QtGraphicalEffects 1.0
|
||||
import "../styles-uit"
|
||||
|
||||
Rectangle {
|
||||
property var goFunction: null;
|
||||
property var userStory: null;
|
||||
property alias image: lobby;
|
||||
property alias placeText: place.text;
|
||||
property alias usersText: users.text;
|
||||
property int textPadding: 20;
|
||||
property int textSize: 24;
|
||||
property string defaultPicture: "../../images/default-domain.gif";
|
||||
HifiConstants { id: hifi }
|
||||
Image {
|
||||
id: lobby;
|
||||
width: parent.width;
|
||||
height: parent.height;
|
||||
source: defaultPicture;
|
||||
fillMode: Image.PreserveAspectCrop;
|
||||
// source gets filled in later
|
||||
anchors.verticalCenter: parent.verticalCenter;
|
||||
anchors.left: parent.left;
|
||||
onStatusChanged: {
|
||||
if (status == Image.Error) {
|
||||
console.log("source: " + source + ": failed to load " + JSON.stringify(userStory));
|
||||
source = defaultPicture;
|
||||
}
|
||||
}
|
||||
}
|
||||
property int dropHorizontalOffset: 0;
|
||||
property int dropVerticalOffset: 1;
|
||||
property int dropRadius: 2;
|
||||
property int dropSamples: 9;
|
||||
property int dropSpread: 0;
|
||||
DropShadow {
|
||||
source: place;
|
||||
anchors.fill: place;
|
||||
horizontalOffset: dropHorizontalOffset;
|
||||
verticalOffset: dropVerticalOffset;
|
||||
radius: dropRadius;
|
||||
samples: dropSamples;
|
||||
color: hifi.colors.black;
|
||||
spread: dropSpread;
|
||||
}
|
||||
DropShadow {
|
||||
source: users;
|
||||
anchors.fill: users;
|
||||
horizontalOffset: dropHorizontalOffset;
|
||||
verticalOffset: dropVerticalOffset;
|
||||
radius: dropRadius;
|
||||
samples: dropSamples;
|
||||
color: hifi.colors.black;
|
||||
spread: dropSpread;
|
||||
}
|
||||
RalewaySemiBold {
|
||||
id: place;
|
||||
color: hifi.colors.white;
|
||||
size: textSize;
|
||||
anchors {
|
||||
top: parent.top;
|
||||
left: parent.left;
|
||||
margins: textPadding;
|
||||
}
|
||||
}
|
||||
RalewayRegular {
|
||||
id: users;
|
||||
size: textSize;
|
||||
color: hifi.colors.white;
|
||||
anchors {
|
||||
bottom: parent.bottom;
|
||||
right: parent.right;
|
||||
margins: textPadding;
|
||||
}
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
acceptedButtons: Qt.LeftButton;
|
||||
onClicked: goFunction(parent);
|
||||
hoverEnabled: true;
|
||||
}
|
||||
}
|
0
interface/resources/qml/hifi/dialogs/AttachmentsDialog.qml
Executable file → Normal file
0
interface/resources/qml/hifi/dialogs/AttachmentsDialog.qml
Executable file → Normal file
|
@ -7,7 +7,7 @@ PreferencesDialog {
|
|||
id: root
|
||||
objectName: "AvatarPreferencesDialog"
|
||||
title: "Avatar Settings"
|
||||
showCategories: [ "Avatar Basics", "Avatar Tuning", "Avatar Camera" ]
|
||||
showCategories: [ "Avatar Basics", "Snapshots", "Avatar Tuning", "Avatar Camera" ]
|
||||
property var settings: Settings {
|
||||
category: root.objectName
|
||||
property alias x: root.x
|
||||
|
|
|
@ -1,117 +0,0 @@
|
|||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtQuick.Controls.Styles 1.4
|
||||
import QtQuick.XmlListModel 2.0
|
||||
|
||||
import "../../windows"
|
||||
import "../../js/Utils.js" as Utils
|
||||
import "../models"
|
||||
|
||||
Window {
|
||||
id: root
|
||||
resizable: true
|
||||
width: 516
|
||||
height: 616
|
||||
minSize: Qt.vector2d(500, 600);
|
||||
maxSize: Qt.vector2d(1000, 800);
|
||||
|
||||
property alias source: image.source
|
||||
|
||||
Rectangle {
|
||||
anchors.fill: parent
|
||||
color: "white"
|
||||
|
||||
Item {
|
||||
anchors { fill: parent; margins: 8 }
|
||||
|
||||
Image {
|
||||
id: image
|
||||
anchors { top: parent.top; left: parent.left; right: parent.right; bottom: notesLabel.top; bottomMargin: 8 }
|
||||
fillMode: Image.PreserveAspectFit
|
||||
}
|
||||
|
||||
Text {
|
||||
id: notesLabel
|
||||
anchors { left: parent.left; bottom: notes.top; bottomMargin: 8; }
|
||||
text: "Notes about this image"
|
||||
font.pointSize: 14
|
||||
font.bold: true
|
||||
color: "#666"
|
||||
}
|
||||
|
||||
TextArea {
|
||||
id: notes
|
||||
anchors { left: parent.left; bottom: parent.bottom; right: shareButton.left; rightMargin: 8 }
|
||||
height: 60
|
||||
}
|
||||
|
||||
Button {
|
||||
id: shareButton
|
||||
anchors { verticalCenter: notes.verticalCenter; right: parent.right; }
|
||||
width: 120; height: 50
|
||||
text: "Share"
|
||||
|
||||
style: ButtonStyle {
|
||||
background: Rectangle {
|
||||
implicitWidth: 120
|
||||
implicitHeight: 50
|
||||
border.width: control.activeFocus ? 2 : 1
|
||||
color: "#333"
|
||||
radius: 9
|
||||
}
|
||||
label: Text {
|
||||
color: shareButton.enabled ? "white" : "gray"
|
||||
font.pixelSize: 18
|
||||
font.bold: true
|
||||
verticalAlignment: Text.AlignVCenter
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
anchors.fill: parent
|
||||
text: shareButton.text
|
||||
}
|
||||
}
|
||||
|
||||
onClicked: {
|
||||
enabled = false;
|
||||
uploadTimer.start();
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: uploadTimer
|
||||
running: false
|
||||
interval: 5
|
||||
repeat: false
|
||||
onTriggered: {
|
||||
var uploaded = SnapshotUploader.uploadSnapshot(root.source.toString())
|
||||
console.log("Uploaded result " + uploaded)
|
||||
if (!uploaded) {
|
||||
console.log("Upload failed ");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Action {
|
||||
id: shareAction
|
||||
text: qsTr("OK")
|
||||
enabled: root.result ? true : false
|
||||
shortcut: Qt.Key_Return
|
||||
onTriggered: {
|
||||
root.destroy();
|
||||
}
|
||||
}
|
||||
|
||||
Action {
|
||||
id: cancelAction
|
||||
text: qsTr("Cancel")
|
||||
shortcut: Qt.Key_Escape
|
||||
onTriggered: {
|
||||
root.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
0
interface/resources/qml/hifi/dialogs/attachments/Attachment.qml
Executable file → Normal file
0
interface/resources/qml/hifi/dialogs/attachments/Attachment.qml
Executable file → Normal file
|
@ -19,14 +19,13 @@ Window {
|
|||
shown: true
|
||||
width: content.width
|
||||
height: content.height
|
||||
visible: true
|
||||
// Disable this window from being able to call 'desktop.raise() and desktop.showDesktop'
|
||||
activator: Item {}
|
||||
property bool horizontal: true
|
||||
property real buttonSize: 50;
|
||||
property var buttons: []
|
||||
property var container: horizontal ? row : column
|
||||
|
||||
|
||||
Settings {
|
||||
category: "toolbar/" + window.objectName
|
||||
property alias x: window.x
|
||||
|
|
|
@ -4,48 +4,20 @@ import QtQuick.Controls 1.4
|
|||
Item {
|
||||
id: button
|
||||
property alias imageURL: image.source
|
||||
property alias alpha: button.opacity
|
||||
property alias alpha: image.opacity
|
||||
property var subImage;
|
||||
property int yOffset: 0
|
||||
property int buttonState: 0
|
||||
property int hoverOffset: 0
|
||||
property int hoverState: -1
|
||||
property int defaultState: -1
|
||||
property var toolbar;
|
||||
property real size: 50 // toolbar ? toolbar.buttonSize : 50
|
||||
width: size; height: size
|
||||
property bool pinned: false
|
||||
clip: true
|
||||
|
||||
Behavior on opacity {
|
||||
NumberAnimation {
|
||||
duration: 150
|
||||
easing.type: Easing.InOutCubic
|
||||
}
|
||||
}
|
||||
|
||||
property alias fadeTargetProperty: button.opacity
|
||||
|
||||
onFadeTargetPropertyChanged: {
|
||||
visible = (fadeTargetProperty !== 0.0);
|
||||
}
|
||||
|
||||
onVisibleChanged: {
|
||||
if ((!visible && fadeTargetProperty != 0.0) || (visible && fadeTargetProperty == 0.0)) {
|
||||
var target = visible;
|
||||
visible = !visible;
|
||||
fadeTargetProperty = target ? 1.0 : 0.0;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
function updateOffset() {
|
||||
yOffset = size * (buttonState + hoverOffset);
|
||||
}
|
||||
onButtonStateChanged: {
|
||||
hoverOffset = 0; // subtle: show the new state without hover. don't wait for mouse to be moved away
|
||||
// The above is per UX design, but ALSO avoid a subtle issue that would be a problem because
|
||||
// the hand controllers don't move the mouse when not triggered, so releasing the trigger would
|
||||
// never show unhovered.
|
||||
updateOffset();
|
||||
yOffset = size * buttonState;
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
|
@ -64,18 +36,28 @@ Item {
|
|||
width: parent.width
|
||||
}
|
||||
|
||||
Timer {
|
||||
id: asyncClickSender
|
||||
interval: 10
|
||||
repeat: false
|
||||
running: false
|
||||
onTriggered: button.clicked();
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: mouseArea
|
||||
hoverEnabled: true
|
||||
anchors.fill: parent
|
||||
onClicked: button.clicked();
|
||||
onClicked: asyncClickSender.start();
|
||||
onEntered: {
|
||||
hoverOffset = 2;
|
||||
updateOffset();
|
||||
if (hoverState >= 0) {
|
||||
buttonState = hoverState;
|
||||
}
|
||||
}
|
||||
onExited: {
|
||||
hoverOffset = 0;
|
||||
updateOffset();
|
||||
if (defaultState >= 0) {
|
||||
buttonState = defaultState;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ FocusScope {
|
|||
onVisibleChanged: recalcSize();
|
||||
onCountChanged: recalcSize();
|
||||
focus: true
|
||||
highlightMoveDuration: 0
|
||||
|
||||
highlight: Rectangle {
|
||||
anchors {
|
||||
|
|
93
interface/resources/qml/windows/Decoration.qml
Normal file
93
interface/resources/qml/windows/Decoration.qml
Normal file
|
@ -0,0 +1,93 @@
|
|||
//
|
||||
// DefaultFrame.qml
|
||||
//
|
||||
// Created by Bradley Austin Davis on 12 Jan 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtGraphicalEffects 1.0
|
||||
|
||||
import "."
|
||||
import "../styles-uit"
|
||||
|
||||
Rectangle {
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
signal inflateDecorations();
|
||||
signal deflateDecorations();
|
||||
|
||||
property int frameMargin: 9
|
||||
property int frameMarginLeft: frameMargin
|
||||
property int frameMarginRight: frameMargin
|
||||
property int frameMarginTop: 2 * frameMargin + iconSize
|
||||
property int frameMarginBottom: iconSize + 11
|
||||
|
||||
anchors {
|
||||
topMargin: -frameMarginTop
|
||||
leftMargin: -frameMarginLeft
|
||||
rightMargin: -frameMarginRight
|
||||
bottomMargin: -frameMarginBottom
|
||||
}
|
||||
anchors.fill: parent
|
||||
color: hifi.colors.baseGrayHighlight40
|
||||
border {
|
||||
width: hifi.dimensions.borderWidth
|
||||
color: hifi.colors.faintGray50
|
||||
}
|
||||
radius: hifi.dimensions.borderRadius
|
||||
|
||||
// Enable dragging of the window,
|
||||
// detect mouseover of the window (including decoration)
|
||||
MouseArea {
|
||||
id: decorationMouseArea
|
||||
anchors.fill: parent
|
||||
drag.target: window
|
||||
hoverEnabled: true
|
||||
onEntered: window.mouseEntered();
|
||||
onExited: {
|
||||
if (!containsMouseGlobal()) {
|
||||
window.mouseExited();
|
||||
}
|
||||
}
|
||||
|
||||
function containsMouseGlobal() {
|
||||
var reticlePos = Reticle.position;
|
||||
var globalPosition = decorationMouseArea.mapToItem(desktop, 0, 0);
|
||||
var localPosition = {
|
||||
x: reticlePos.x - globalPosition.x,
|
||||
y: reticlePos.y - globalPosition.y,
|
||||
};
|
||||
return localPosition.x >= 0 && localPosition.x <= width &&
|
||||
localPosition.y >= 0 && localPosition.y <= height;
|
||||
}
|
||||
|
||||
}
|
||||
Connections {
|
||||
target: window
|
||||
onMouseEntered: {
|
||||
if (desktop.hmdHandMouseActive) {
|
||||
root.inflateDecorations()
|
||||
}
|
||||
}
|
||||
onMouseExited: {
|
||||
root.deflateDecorations();
|
||||
}
|
||||
}
|
||||
Connections {
|
||||
target: desktop
|
||||
onHmdHandMouseActiveChanged: {
|
||||
if (desktop.hmdHandMouseActive) {
|
||||
if (decorationMouseArea.containsMouse) {
|
||||
root.inflateDecorations();
|
||||
}
|
||||
} else {
|
||||
root.deflateDecorations();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -16,104 +16,6 @@ import "../styles-uit"
|
|||
|
||||
Frame {
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
Rectangle {
|
||||
// Dialog frame
|
||||
id: frameContent
|
||||
|
||||
readonly property int iconSize: hifi.dimensions.frameIconSize
|
||||
readonly property int frameMargin: 9
|
||||
readonly property int frameMarginLeft: frameMargin
|
||||
readonly property int frameMarginRight: frameMargin
|
||||
readonly property int frameMarginTop: 2 * frameMargin + iconSize
|
||||
readonly property int frameMarginBottom: iconSize + 11
|
||||
|
||||
anchors {
|
||||
topMargin: -frameMarginTop
|
||||
leftMargin: -frameMarginLeft
|
||||
rightMargin: -frameMarginRight
|
||||
bottomMargin: -frameMarginBottom
|
||||
}
|
||||
anchors.fill: parent
|
||||
color: hifi.colors.baseGrayHighlight40
|
||||
border {
|
||||
width: hifi.dimensions.borderWidth
|
||||
color: hifi.colors.faintGray50
|
||||
}
|
||||
radius: hifi.dimensions.borderRadius
|
||||
|
||||
// Enable dragging of the window
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
drag.target: window
|
||||
}
|
||||
|
||||
Row {
|
||||
id: controlsRow
|
||||
anchors {
|
||||
right: parent.right;
|
||||
top: parent.top;
|
||||
topMargin: frameContent.frameMargin + 1 // Move down a little to visually align with the title
|
||||
rightMargin: frameContent.frameMarginRight;
|
||||
}
|
||||
spacing: frameContent.iconSize / 4
|
||||
|
||||
HiFiGlyphs {
|
||||
// "Pin" button
|
||||
visible: window.pinnable
|
||||
text: window.pinned ? hifi.glyphs.pinInverted : hifi.glyphs.pin
|
||||
color: pinClickArea.pressed ? hifi.colors.redHighlight : hifi.colors.white
|
||||
size: frameContent.iconSize
|
||||
MouseArea {
|
||||
id: pinClickArea
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
propagateComposedEvents: true
|
||||
onClicked: window.pinned = !window.pinned;
|
||||
}
|
||||
}
|
||||
|
||||
HiFiGlyphs {
|
||||
// "Close" button
|
||||
visible: window ? window.closable : false
|
||||
text: closeClickArea.containsPress ? hifi.glyphs.closeInverted : hifi.glyphs.close
|
||||
color: closeClickArea.containsMouse ? hifi.colors.redHighlight : hifi.colors.white
|
||||
size: frameContent.iconSize
|
||||
MouseArea {
|
||||
id: closeClickArea
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onClicked: window.shown = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RalewayRegular {
|
||||
// Title
|
||||
id: titleText
|
||||
anchors {
|
||||
left: parent.left
|
||||
leftMargin: frameContent.frameMarginLeft + hifi.dimensions.contentMargin.x
|
||||
right: controlsRow.left
|
||||
rightMargin: frameContent.iconSize
|
||||
top: parent.top
|
||||
topMargin: frameContent.frameMargin
|
||||
}
|
||||
text: window ? window.title : ""
|
||||
color: hifi.colors.white
|
||||
size: hifi.fontSizes.overlayTitle
|
||||
}
|
||||
|
||||
DropShadow {
|
||||
source: titleText
|
||||
anchors.fill: titleText
|
||||
horizontalOffset: 2
|
||||
verticalOffset: 2
|
||||
samples: 2
|
||||
color: hifi.colors.baseGrayShadow60
|
||||
visible: (window && window.focus)
|
||||
cached: true
|
||||
}
|
||||
}
|
||||
DefaultFrameDecoration {}
|
||||
}
|
||||
|
||||
|
|
113
interface/resources/qml/windows/DefaultFrameDecoration.qml
Normal file
113
interface/resources/qml/windows/DefaultFrameDecoration.qml
Normal file
|
@ -0,0 +1,113 @@
|
|||
//
|
||||
// DefaultFrame.qml
|
||||
//
|
||||
// Created by Bradley Austin Davis on 12 Jan 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtGraphicalEffects 1.0
|
||||
|
||||
import "."
|
||||
import "../styles-uit"
|
||||
|
||||
Decoration {
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
// Dialog frame
|
||||
id: root
|
||||
|
||||
property int iconSize: hifi.dimensions.frameIconSize
|
||||
frameMargin: 9
|
||||
frameMarginLeft: frameMargin
|
||||
frameMarginRight: frameMargin
|
||||
frameMarginTop: 2 * frameMargin + iconSize
|
||||
frameMarginBottom: iconSize + 11
|
||||
|
||||
onInflateDecorations: {
|
||||
if (!HMD.active) {
|
||||
return;
|
||||
}
|
||||
root.frameMargin = 18
|
||||
titleText.size = hifi.fontSizes.overlayTitle * 2
|
||||
root.iconSize = hifi.dimensions.frameIconSize * 2
|
||||
}
|
||||
|
||||
onDeflateDecorations: {
|
||||
root.frameMargin = 9
|
||||
titleText.size = hifi.fontSizes.overlayTitle
|
||||
root.iconSize = hifi.dimensions.frameIconSize
|
||||
}
|
||||
|
||||
|
||||
Row {
|
||||
id: controlsRow
|
||||
anchors {
|
||||
right: parent.right;
|
||||
top: parent.top;
|
||||
topMargin: root.frameMargin + 1 // Move down a little to visually align with the title
|
||||
rightMargin: root.frameMarginRight;
|
||||
}
|
||||
spacing: root.iconSize / 4
|
||||
|
||||
HiFiGlyphs {
|
||||
// "Pin" button
|
||||
visible: window.pinnable
|
||||
text: window.pinned ? hifi.glyphs.pinInverted : hifi.glyphs.pin
|
||||
color: pinClickArea.pressed ? hifi.colors.redHighlight : hifi.colors.white
|
||||
size: root.iconSize
|
||||
MouseArea {
|
||||
id: pinClickArea
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
propagateComposedEvents: true
|
||||
onClicked: window.pinned = !window.pinned;
|
||||
}
|
||||
}
|
||||
|
||||
HiFiGlyphs {
|
||||
// "Close" button
|
||||
visible: window ? window.closable : false
|
||||
text: closeClickArea.containsPress ? hifi.glyphs.closeInverted : hifi.glyphs.close
|
||||
color: closeClickArea.containsMouse ? hifi.colors.redHighlight : hifi.colors.white
|
||||
size: root.iconSize
|
||||
MouseArea {
|
||||
id: closeClickArea
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
onClicked: window.shown = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RalewayRegular {
|
||||
// Title
|
||||
id: titleText
|
||||
anchors {
|
||||
left: parent.left
|
||||
leftMargin: root.frameMarginLeft + hifi.dimensions.contentMargin.x
|
||||
right: controlsRow.left
|
||||
rightMargin: root.iconSize
|
||||
top: parent.top
|
||||
topMargin: root.frameMargin
|
||||
}
|
||||
text: window ? window.title : ""
|
||||
color: hifi.colors.white
|
||||
size: hifi.fontSizes.overlayTitle
|
||||
}
|
||||
|
||||
DropShadow {
|
||||
source: titleText
|
||||
anchors.fill: titleText
|
||||
horizontalOffset: 2
|
||||
verticalOffset: 2
|
||||
samples: 2
|
||||
color: hifi.colors.baseGrayShadow60
|
||||
visible: (window && window.focus)
|
||||
cached: true
|
||||
}
|
||||
}
|
||||
|
|
@ -22,10 +22,10 @@ Item {
|
|||
|
||||
property bool gradientsSupported: desktop.gradientsSupported
|
||||
|
||||
readonly property int frameMarginLeft: frameContent.frameMarginLeft
|
||||
readonly property int frameMarginRight: frameContent.frameMarginRight
|
||||
readonly property int frameMarginTop: frameContent.frameMarginTop
|
||||
readonly property int frameMarginBottom: frameContent.frameMarginBottom
|
||||
readonly property int frameMarginLeft: frame.decoration ? frame.decoration.frameMarginLeft : 0
|
||||
readonly property int frameMarginRight: frame.decoration ? frame.decoration.frameMarginRight : 0
|
||||
readonly property int frameMarginTop: frame.decoration ? frame.decoration.frameMarginTop : 0
|
||||
readonly property int frameMarginBottom: frame.decoration ? frame.decoration.frameMarginBottom : 0
|
||||
|
||||
// Frames always fill their parents, but their decorations may extend
|
||||
// beyond the window via negative margin sizes
|
||||
|
@ -103,16 +103,14 @@ Item {
|
|||
}
|
||||
onReleased: {
|
||||
if (hid) {
|
||||
pane.visible = true
|
||||
frameContent.visible = true
|
||||
window.content.visible = true
|
||||
hid = false;
|
||||
}
|
||||
}
|
||||
onPositionChanged: {
|
||||
if (pressed) {
|
||||
if (pane.visible) {
|
||||
pane.visible = false;
|
||||
frameContent.visible = false
|
||||
if (window.content.visible) {
|
||||
window.content.visible = false;
|
||||
hid = true;
|
||||
}
|
||||
var delta = Qt.vector2d(mouseX, mouseY).minus(pressOrigin);
|
||||
|
|
|
@ -16,81 +16,11 @@ import "../styles-uit"
|
|||
|
||||
Frame {
|
||||
HifiConstants { id: hifi }
|
||||
property bool horizontalSpacers: false
|
||||
property bool verticalSpacers: false
|
||||
property alias horizontalSpacers: decoration.horizontalSpacers
|
||||
property alias verticalSpacers: decoration.verticalSpacers
|
||||
|
||||
Rectangle {
|
||||
// Dialog frame
|
||||
id: frameContent
|
||||
readonly property int frameMargin: 6
|
||||
readonly property int frameMarginLeft: frameMargin + (horizontalSpacers ? 12 : 0)
|
||||
readonly property int frameMarginRight: frameMargin + (horizontalSpacers ? 12 : 0)
|
||||
readonly property int frameMarginTop: frameMargin + (verticalSpacers ? 12 : 0)
|
||||
readonly property int frameMarginBottom: frameMargin + (verticalSpacers ? 12 : 0)
|
||||
|
||||
Rectangle {
|
||||
visible: horizontalSpacers
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 6
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: 8
|
||||
height: window.height
|
||||
color: "gray";
|
||||
radius: 4
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: horizontalSpacers
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 6
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: 8
|
||||
height: window.height
|
||||
color: "gray";
|
||||
radius: 4
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: verticalSpacers
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 6
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
height: 8
|
||||
width: window.width
|
||||
color: "gray";
|
||||
radius: 4
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: verticalSpacers
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 6
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
height: 8
|
||||
width: window.width
|
||||
color: "gray";
|
||||
radius: 4
|
||||
}
|
||||
|
||||
anchors {
|
||||
leftMargin: -frameMarginLeft
|
||||
rightMargin: -frameMarginRight
|
||||
topMargin: -frameMarginTop
|
||||
bottomMargin: -frameMarginBottom
|
||||
}
|
||||
anchors.fill: parent
|
||||
color: hifi.colors.baseGrayHighlight40
|
||||
border {
|
||||
width: hifi.dimensions.borderWidth
|
||||
color: hifi.colors.faintGray50
|
||||
}
|
||||
radius: hifi.dimensions.borderRadius / 2
|
||||
|
||||
// Enable dragging of the window
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
drag.target: window
|
||||
}
|
||||
ToolFrameDecoration {
|
||||
id: decoration
|
||||
}
|
||||
}
|
||||
|
||||
|
|
96
interface/resources/qml/windows/ToolFrameDecoration.qml
Normal file
96
interface/resources/qml/windows/ToolFrameDecoration.qml
Normal file
|
@ -0,0 +1,96 @@
|
|||
//
|
||||
// DefaultFrame.qml
|
||||
//
|
||||
// Created by Bradley Austin Davis on 12 Jan 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtGraphicalEffects 1.0
|
||||
|
||||
import "."
|
||||
import "../styles-uit"
|
||||
|
||||
Decoration {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
|
||||
property bool horizontalSpacers: false
|
||||
property bool verticalSpacers: false
|
||||
|
||||
// Dialog frame
|
||||
property int spacerWidth: 8
|
||||
property int spacerRadius: 4
|
||||
property int spacerMargin: 12
|
||||
frameMargin: 6
|
||||
frameMarginLeft: frameMargin + (horizontalSpacers ? spacerMargin : 0)
|
||||
frameMarginRight: frameMargin + (horizontalSpacers ? spacerMargin : 0)
|
||||
frameMarginTop: frameMargin + (verticalSpacers ? spacerMargin : 0)
|
||||
frameMarginBottom: frameMargin + (verticalSpacers ? spacerMargin : 0)
|
||||
radius: hifi.dimensions.borderRadius / 2
|
||||
|
||||
onInflateDecorations: {
|
||||
if (!HMD.active) {
|
||||
return;
|
||||
}
|
||||
root.frameMargin = 18
|
||||
root.spacerWidth = 16
|
||||
root.spacerRadius = 8
|
||||
root.spacerMargin = 8
|
||||
}
|
||||
|
||||
onDeflateDecorations: {
|
||||
root.frameMargin = 6
|
||||
root.spacerWidth = 8
|
||||
root.spacerRadius = 4
|
||||
root.spacerMargin = 12
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: horizontalSpacers
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 6
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: root.spacerWidth
|
||||
height: decoration.height - 12
|
||||
color: "gray";
|
||||
radius: root.spacerRadius
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: horizontalSpacers
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 6
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
width: root.spacerWidth
|
||||
height: decoration.height - 12
|
||||
color: "gray";
|
||||
radius: root.spacerRadius
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: verticalSpacers
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 6
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
height: root.spacerWidth
|
||||
width: decoration.width - 12
|
||||
color: "gray";
|
||||
radius: root.spacerRadius
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
visible: verticalSpacers
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 6
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
height: root.spacerWidth
|
||||
width: decoration.width - 12
|
||||
color: "gray";
|
||||
radius: root.spacerRadius
|
||||
}
|
||||
}
|
||||
|
|
@ -31,6 +31,8 @@ Fadable {
|
|||
// Signals
|
||||
//
|
||||
signal windowDestroyed();
|
||||
signal mouseEntered();
|
||||
signal mouseExited();
|
||||
|
||||
//
|
||||
// Native properties
|
||||
|
@ -114,7 +116,6 @@ Fadable {
|
|||
acceptedButtons: Qt.AllButtons
|
||||
enabled: window.visible
|
||||
onPressed: {
|
||||
//console.log("Pressed on activator area");
|
||||
window.raise();
|
||||
mouse.accepted = false;
|
||||
}
|
||||
|
@ -209,6 +210,9 @@ Fadable {
|
|||
|
||||
var targetVisibility = getTargetVisibility();
|
||||
if (targetVisibility === visible) {
|
||||
if (force) {
|
||||
window.raise();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -284,4 +288,7 @@ Fadable {
|
|||
break;
|
||||
}
|
||||
}
|
||||
|
||||
onMouseEntered: console.log("Mouse entered " + window)
|
||||
onMouseExited: console.log("Mouse exited " + window)
|
||||
}
|
||||
|
|
35
interface/resources/shaders/hmd_hand_lasers.frag
Normal file
35
interface/resources/shaders/hmd_hand_lasers.frag
Normal file
|
@ -0,0 +1,35 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#version 410 core
|
||||
|
||||
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
|
||||
|
||||
layout(location = 0) in vec3 inLineDistance;
|
||||
|
||||
out vec4 FragColor;
|
||||
|
||||
void main() {
|
||||
vec2 d = inLineDistance.xy;
|
||||
d.y = abs(d.y);
|
||||
d.x = abs(d.x);
|
||||
if (d.x > 1.0) {
|
||||
d.x = (d.x - 1.0) / 0.02;
|
||||
} else {
|
||||
d.x = 0.0;
|
||||
}
|
||||
float alpha = 1.0 - length(d);
|
||||
if (alpha <= 0.0) {
|
||||
discard;
|
||||
}
|
||||
alpha = pow(alpha, 10.0);
|
||||
if (alpha < 0.05) {
|
||||
discard;
|
||||
}
|
||||
FragColor = vec4(color.rgb, alpha);
|
||||
}
|
70
interface/resources/shaders/hmd_hand_lasers.geom
Normal file
70
interface/resources/shaders/hmd_hand_lasers.geom
Normal file
|
@ -0,0 +1,70 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#version 410 core
|
||||
#extension GL_EXT_geometry_shader4 : enable
|
||||
|
||||
layout(location = 0) out vec3 outLineDistance;
|
||||
|
||||
layout(lines) in;
|
||||
layout(triangle_strip, max_vertices = 24) out;
|
||||
|
||||
vec3[2] getOrthogonals(in vec3 n, float scale) {
|
||||
float yDot = abs(dot(n, vec3(0, 1, 0)));
|
||||
|
||||
vec3 result[2];
|
||||
if (yDot < 0.9) {
|
||||
result[0] = normalize(cross(n, vec3(0, 1, 0)));
|
||||
} else {
|
||||
result[0] = normalize(cross(n, vec3(1, 0, 0)));
|
||||
}
|
||||
// The cross of result[0] and n is orthogonal to both, which are orthogonal to each other
|
||||
result[1] = cross(result[0], n);
|
||||
result[0] *= scale;
|
||||
result[1] *= scale;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
vec2 orthogonal(vec2 v) {
|
||||
vec2 result = v.yx;
|
||||
result.y *= -1.0;
|
||||
return result;
|
||||
}
|
||||
|
||||
void main() {
|
||||
vec2 endpoints[2];
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
endpoints[i] = gl_PositionIn[i].xy / gl_PositionIn[i].w;
|
||||
}
|
||||
vec2 lineNormal = normalize(endpoints[1] - endpoints[0]);
|
||||
vec2 lineOrthogonal = orthogonal(lineNormal);
|
||||
lineNormal *= 0.02;
|
||||
lineOrthogonal *= 0.02;
|
||||
|
||||
gl_Position = gl_PositionIn[0];
|
||||
gl_Position.xy -= lineOrthogonal;
|
||||
outLineDistance = vec3(-1.02, -1, gl_Position.z);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = gl_PositionIn[0];
|
||||
gl_Position.xy += lineOrthogonal;
|
||||
outLineDistance = vec3(-1.02, 1, gl_Position.z);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = gl_PositionIn[1];
|
||||
gl_Position.xy -= lineOrthogonal;
|
||||
outLineDistance = vec3(1.02, -1, gl_Position.z);
|
||||
EmitVertex();
|
||||
|
||||
gl_Position = gl_PositionIn[1];
|
||||
gl_Position.xy += lineOrthogonal;
|
||||
outLineDistance = vec3(1.02, 1, gl_Position.z);
|
||||
EmitVertex();
|
||||
|
||||
EndPrimitive();
|
||||
}
|
15
interface/resources/shaders/hmd_hand_lasers.vert
Normal file
15
interface/resources/shaders/hmd_hand_lasers.vert
Normal file
|
@ -0,0 +1,15 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#version 410 core
|
||||
uniform mat4 mvp = mat4(1);
|
||||
|
||||
in vec3 Position;
|
||||
|
||||
void main() {
|
||||
gl_Position = mvp * vec4(Position, 1);
|
||||
}
|
78
interface/resources/shaders/hmd_reproject.frag
Normal file
78
interface/resources/shaders/hmd_reproject.frag
Normal file
|
@ -0,0 +1,78 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#version 410 core
|
||||
|
||||
uniform sampler2D sampler;
|
||||
uniform mat3 reprojection = mat3(1);
|
||||
uniform mat4 inverseProjections[2];
|
||||
uniform mat4 projections[2];
|
||||
|
||||
in vec2 vTexCoord;
|
||||
in vec3 vPosition;
|
||||
|
||||
out vec4 FragColor;
|
||||
|
||||
void main() {
|
||||
vec2 uv = vTexCoord;
|
||||
|
||||
mat4 eyeInverseProjection;
|
||||
mat4 eyeProjection;
|
||||
|
||||
float xoffset = 1.0;
|
||||
vec2 uvmin = vec2(0.0);
|
||||
vec2 uvmax = vec2(1.0);
|
||||
// determine the correct projection and inverse projection to use.
|
||||
if (vTexCoord.x < 0.5) {
|
||||
uvmax.x = 0.5;
|
||||
eyeInverseProjection = inverseProjections[0];
|
||||
eyeProjection = projections[0];
|
||||
} else {
|
||||
xoffset = -1.0;
|
||||
uvmin.x = 0.5;
|
||||
uvmax.x = 1.0;
|
||||
eyeInverseProjection = inverseProjections[1];
|
||||
eyeProjection = projections[1];
|
||||
}
|
||||
|
||||
// Account for stereo in calculating the per-eye NDC coordinates
|
||||
vec4 ndcSpace = vec4(vPosition, 1.0);
|
||||
ndcSpace.x *= 2.0;
|
||||
ndcSpace.x += xoffset;
|
||||
|
||||
// Convert from NDC to eyespace
|
||||
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
|
||||
eyeSpace /= eyeSpace.w;
|
||||
|
||||
// Convert to a noramlized ray
|
||||
vec3 ray = eyeSpace.xyz;
|
||||
ray = normalize(ray);
|
||||
|
||||
// Adjust the ray by the rotation
|
||||
ray = reprojection * ray;
|
||||
|
||||
// Project back on to the texture plane
|
||||
ray *= eyeSpace.z / ray.z;
|
||||
|
||||
// Update the eyespace vector
|
||||
eyeSpace.xyz = ray;
|
||||
|
||||
// Reproject back into NDC
|
||||
ndcSpace = eyeProjection * eyeSpace;
|
||||
ndcSpace /= ndcSpace.w;
|
||||
ndcSpace.x -= xoffset;
|
||||
ndcSpace.x /= 2.0;
|
||||
|
||||
// Calculate the new UV coordinates
|
||||
uv = (ndcSpace.xy / 2.0) + 0.5;
|
||||
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
|
||||
FragColor = vec4(0.0, 0.0, 0.0, 1.0);
|
||||
} else {
|
||||
FragColor = texture(sampler, uv);
|
||||
}
|
||||
}
|
20
interface/resources/shaders/hmd_reproject.vert
Normal file
20
interface/resources/shaders/hmd_reproject.vert
Normal file
|
@ -0,0 +1,20 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#version 410 core
|
||||
in vec3 Position;
|
||||
in vec2 TexCoord;
|
||||
|
||||
out vec3 vPosition;
|
||||
out vec2 vTexCoord;
|
||||
|
||||
void main() {
|
||||
gl_Position = vec4(Position, 1);
|
||||
vTexCoord = TexCoord;
|
||||
vPosition = Position;
|
||||
}
|
65
interface/resources/shaders/hmd_ui_glow.frag
Normal file
65
interface/resources/shaders/hmd_ui_glow.frag
Normal file
|
@ -0,0 +1,65 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#version 410 core
|
||||
|
||||
uniform sampler2D sampler;
|
||||
uniform float alpha = 1.0;
|
||||
uniform vec4 glowPoints = vec4(-1);
|
||||
uniform vec4 glowColors[2];
|
||||
uniform vec2 resolution = vec2(3960.0, 1188.0);
|
||||
uniform float radius = 0.005;
|
||||
|
||||
in vec3 vPosition;
|
||||
in vec2 vTexCoord;
|
||||
in vec4 vGlowPoints;
|
||||
|
||||
out vec4 FragColor;
|
||||
|
||||
float easeInOutCubic(float f) {
|
||||
const float d = 1.0;
|
||||
const float b = 0.0;
|
||||
const float c = 1.0;
|
||||
float t = f;
|
||||
if ((t /= d / 2.0) < 1.0) return c / 2.0 * t * t * t + b;
|
||||
return c / 2.0 * ((t -= 2.0) * t * t + 2.0) + b;
|
||||
}
|
||||
|
||||
void main() {
|
||||
vec2 aspect = resolution;
|
||||
aspect /= resolution.x;
|
||||
FragColor = texture(sampler, vTexCoord);
|
||||
|
||||
float glowIntensity = 0.0;
|
||||
float dist1 = distance(vTexCoord * aspect, glowPoints.xy * aspect);
|
||||
float dist2 = distance(vTexCoord * aspect, glowPoints.zw * aspect);
|
||||
float dist = min(dist1, dist2);
|
||||
vec3 glowColor = glowColors[0].rgb;
|
||||
if (dist2 < dist1) {
|
||||
glowColor = glowColors[1].rgb;
|
||||
}
|
||||
|
||||
if (dist <= radius) {
|
||||
glowIntensity = 1.0 - (dist / radius);
|
||||
glowColor.rgb = pow(glowColor, vec3(1.0 - glowIntensity));
|
||||
glowIntensity = easeInOutCubic(glowIntensity);
|
||||
glowIntensity = pow(glowIntensity, 0.5);
|
||||
}
|
||||
|
||||
if (alpha <= 0.0) {
|
||||
if (glowIntensity <= 0.0) {
|
||||
discard;
|
||||
}
|
||||
|
||||
FragColor = vec4(glowColor, glowIntensity);
|
||||
return;
|
||||
}
|
||||
|
||||
FragColor.rgb = mix(FragColor.rgb, glowColor.rgb, glowIntensity);
|
||||
FragColor.a *= alpha;
|
||||
}
|
23
interface/resources/shaders/hmd_ui_glow.vert
Normal file
23
interface/resources/shaders/hmd_ui_glow.vert
Normal file
|
@ -0,0 +1,23 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/07/11
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#version 410 core
|
||||
|
||||
uniform mat4 mvp = mat4(1);
|
||||
|
||||
in vec3 Position;
|
||||
in vec2 TexCoord;
|
||||
|
||||
out vec3 vPosition;
|
||||
out vec2 vTexCoord;
|
||||
|
||||
void main() {
|
||||
gl_Position = mvp * vec4(Position, 1);
|
||||
vTexCoord = TexCoord;
|
||||
vPosition = Position;
|
||||
}
|
|
@ -85,6 +85,7 @@
|
|||
#include <PhysicsEngine.h>
|
||||
#include <PhysicsHelpers.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <RenderableWebEntityItem.h>
|
||||
#include <RenderShadowTask.h>
|
||||
#include <RenderDeferredTask.h>
|
||||
|
@ -97,6 +98,7 @@
|
|||
#include <Tooltip.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <UserActivityLogger.h>
|
||||
#include <UsersScriptingInterface.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <recording/Recorder.h>
|
||||
#include <shared/StringHelpers.h>
|
||||
|
@ -168,9 +170,18 @@ static QTimer locationUpdateTimer;
|
|||
static QTimer identityPacketTimer;
|
||||
static QTimer pingTimer;
|
||||
|
||||
static const int MAX_CONCURRENT_RESOURCE_DOWNLOADS = 16;
|
||||
|
||||
// For processing on QThreadPool, target 2 less than the ideal number of threads, leaving
|
||||
// 2 logical cores available for time sensitive tasks.
|
||||
static const int MIN_PROCESSING_THREAD_POOL_SIZE = 2;
|
||||
static const int PROCESSING_THREAD_POOL_SIZE = std::max(MIN_PROCESSING_THREAD_POOL_SIZE,
|
||||
QThread::idealThreadCount() - 2);
|
||||
|
||||
static const QString SNAPSHOT_EXTENSION = ".jpg";
|
||||
static const QString SVO_EXTENSION = ".svo";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
static const QString SVO_JSON_EXTENSION = ".svo.json";
|
||||
static const QString JSON_EXTENSION = ".json";
|
||||
static const QString JS_EXTENSION = ".js";
|
||||
static const QString FST_EXTENSION = ".fst";
|
||||
static const QString FBX_EXTENSION = ".fbx";
|
||||
|
@ -202,13 +213,16 @@ static const QString DESKTOP_LOCATION = QStandardPaths::writableLocation(QStanda
|
|||
|
||||
Setting::Handle<int> maxOctreePacketsPerSecond("maxOctreePPS", DEFAULT_MAX_OCTREE_PPS);
|
||||
|
||||
static const QString MARKETPLACE_CDN_HOSTNAME = "mpassets.highfidelity.com";
|
||||
|
||||
const QHash<QString, Application::AcceptURLMethod> Application::_acceptedExtensions {
|
||||
{ SNAPSHOT_EXTENSION, &Application::acceptSnapshot },
|
||||
{ SVO_EXTENSION, &Application::importSVOFromURL },
|
||||
{ SVO_JSON_EXTENSION, &Application::importSVOFromURL },
|
||||
{ AVA_JSON_EXTENSION, &Application::askToWearAvatarAttachmentUrl },
|
||||
{ JSON_EXTENSION, &Application::importJSONFromURL },
|
||||
{ JS_EXTENSION, &Application::askToLoadScript },
|
||||
{ FST_EXTENSION, &Application::askToSetAvatarUrl },
|
||||
{ AVA_JSON_EXTENSION, &Application::askToWearAvatarAttachmentUrl }
|
||||
{ FST_EXTENSION, &Application::askToSetAvatarUrl }
|
||||
};
|
||||
|
||||
class DeadlockWatchdogThread : public QThread {
|
||||
|
@ -427,6 +441,7 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
DependencyManager::set<FramebufferCache>();
|
||||
DependencyManager::set<AnimationCache>();
|
||||
DependencyManager::set<ModelBlender>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
DependencyManager::set<AvatarManager>();
|
||||
DependencyManager::set<LODManager>();
|
||||
DependencyManager::set<StandAloneJSConsole>();
|
||||
|
@ -511,13 +526,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
|
||||
PluginManager::getInstance()->setContainer(pluginContainer);
|
||||
|
||||
// FIXME this may be excessively conservative. On the other hand
|
||||
// maybe I'm used to having an 8-core machine
|
||||
// Perhaps find the ideal thread count and subtract 2 or 3
|
||||
// (main thread, present thread, random OS load)
|
||||
// More threads == faster concurrent loads, but also more concurrent
|
||||
// load on the GPU until we can serialize GPU transfers (off the main thread)
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(2);
|
||||
QThreadPool::globalInstance()->setMaxThreadCount(PROCESSING_THREAD_POOL_SIZE);
|
||||
thread()->setPriority(QThread::HighPriority);
|
||||
thread()->setObjectName("Main Thread");
|
||||
|
||||
|
@ -728,7 +737,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
connect(&identityPacketTimer, &QTimer::timeout, getMyAvatar(), &MyAvatar::sendIdentityPacket);
|
||||
identityPacketTimer.start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
|
||||
|
||||
ResourceCache::setRequestLimit(3);
|
||||
ResourceCache::setRequestLimit(MAX_CONCURRENT_RESOURCE_DOWNLOADS);
|
||||
|
||||
_glWidget = new GLCanvas();
|
||||
getApplicationCompositor().setRenderingWidget(_glWidget);
|
||||
|
@ -953,8 +962,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
return DependencyManager::get<OffscreenUi>()->navigationFocused() ? 1 : 0;
|
||||
});
|
||||
|
||||
// Setup the keyboardMouseDevice and the user input mapper with the default bindings
|
||||
// Setup the _keyboardMouseDevice, _touchscreenDevice and the user input mapper with the default bindings
|
||||
userInputMapper->registerDevice(_keyboardMouseDevice->getInputDevice());
|
||||
// if the _touchscreenDevice is not supported it will not be registered
|
||||
if (_touchscreenDevice) {
|
||||
userInputMapper->registerDevice(_touchscreenDevice->getInputDevice());
|
||||
}
|
||||
|
||||
// force the model the look at the correct directory (weird order of operations issue)
|
||||
scriptEngines->setScriptsLocation(scriptEngines->getScriptsLocation());
|
||||
|
@ -1237,6 +1250,11 @@ QString Application::getUserAgent() {
|
|||
userAgent += " " + formatPluginName(ip->getName());
|
||||
}
|
||||
}
|
||||
// for codecs, we include all of them, even if not active
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& cp : codecPlugins) {
|
||||
userAgent += " " + formatPluginName(cp->getName());
|
||||
}
|
||||
|
||||
return userAgent;
|
||||
}
|
||||
|
@ -1517,7 +1535,6 @@ void Application::initializeUi() {
|
|||
|
||||
// For some reason there is already an "Application" object in the QML context,
|
||||
// though I can't find it. Hence, "ApplicationInterface"
|
||||
rootContext->setContextProperty("SnapshotUploader", new SnapshotUploader());
|
||||
rootContext->setContextProperty("ApplicationInterface", this);
|
||||
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
|
||||
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
|
||||
|
@ -1596,6 +1613,9 @@ void Application::initializeUi() {
|
|||
if (KeyboardMouseDevice::NAME == inputPlugin->getName()) {
|
||||
_keyboardMouseDevice = std::dynamic_pointer_cast<KeyboardMouseDevice>(inputPlugin);
|
||||
}
|
||||
if (TouchscreenDevice::NAME == inputPlugin->getName()) {
|
||||
_touchscreenDevice = std::dynamic_pointer_cast<TouchscreenDevice>(inputPlugin);
|
||||
}
|
||||
}
|
||||
_window->setMenuBar(new Menu());
|
||||
|
||||
|
@ -1969,7 +1989,22 @@ void Application::resizeGL() {
|
|||
}
|
||||
}
|
||||
|
||||
bool Application::importJSONFromURL(const QString& urlString) {
|
||||
// we only load files that terminate in just .json (not .svo.json and not .ava.json)
|
||||
// if they come from the High Fidelity Marketplace Assets CDN
|
||||
|
||||
QUrl jsonURL { urlString };
|
||||
|
||||
if (jsonURL.host().endsWith(MARKETPLACE_CDN_HOSTNAME)) {
|
||||
emit svoImportRequested(urlString);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool Application::importSVOFromURL(const QString& urlString) {
|
||||
|
||||
emit svoImportRequested(urlString);
|
||||
return true;
|
||||
}
|
||||
|
@ -2077,6 +2112,9 @@ bool Application::event(QEvent* event) {
|
|||
case QEvent::TouchUpdate:
|
||||
touchUpdateEvent(static_cast<QTouchEvent*>(event));
|
||||
return true;
|
||||
case QEvent::Gesture:
|
||||
touchGestureEvent((QGestureEvent*)event);
|
||||
return true;
|
||||
case QEvent::Wheel:
|
||||
wheelEvent(static_cast<QWheelEvent*>(event));
|
||||
return true;
|
||||
|
@ -2708,6 +2746,9 @@ void Application::touchUpdateEvent(QTouchEvent* event) {
|
|||
if (_keyboardMouseDevice->isActive()) {
|
||||
_keyboardMouseDevice->touchUpdateEvent(event);
|
||||
}
|
||||
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
|
||||
_touchscreenDevice->touchUpdateEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::touchBeginEvent(QTouchEvent* event) {
|
||||
|
@ -2726,6 +2767,9 @@ void Application::touchBeginEvent(QTouchEvent* event) {
|
|||
if (_keyboardMouseDevice->isActive()) {
|
||||
_keyboardMouseDevice->touchBeginEvent(event);
|
||||
}
|
||||
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
|
||||
_touchscreenDevice->touchBeginEvent(event);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
@ -2743,10 +2787,19 @@ void Application::touchEndEvent(QTouchEvent* event) {
|
|||
if (_keyboardMouseDevice->isActive()) {
|
||||
_keyboardMouseDevice->touchEndEvent(event);
|
||||
}
|
||||
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
|
||||
_touchscreenDevice->touchEndEvent(event);
|
||||
}
|
||||
|
||||
// put any application specific touch behavior below here..
|
||||
}
|
||||
|
||||
void Application::touchGestureEvent(QGestureEvent* event) {
|
||||
if (_touchscreenDevice && _touchscreenDevice->isActive()) {
|
||||
_touchscreenDevice->touchGestureEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::wheelEvent(QWheelEvent* event) const {
|
||||
_altPressed = false;
|
||||
_controllerScriptingInterface->emitWheelEvent(event); // send events to any registered scripts
|
||||
|
@ -3049,17 +3102,20 @@ bool Application::exportEntities(const QString& filename, const QVector<EntityIt
|
|||
}
|
||||
|
||||
bool Application::exportEntities(const QString& filename, float x, float y, float z, float scale) {
|
||||
glm::vec3 offset(x, y, z);
|
||||
glm::vec3 center(x, y, z);
|
||||
glm::vec3 minCorner = center - vec3(scale);
|
||||
float cubeSize = scale * 2;
|
||||
AACube boundingCube(minCorner, cubeSize);
|
||||
QVector<EntityItemPointer> entities;
|
||||
QVector<EntityItemID> ids;
|
||||
auto entityTree = getEntities()->getTree();
|
||||
entityTree->withReadLock([&] {
|
||||
entityTree->findEntities(AACube(offset, scale), entities);
|
||||
entityTree->findEntities(boundingCube, entities);
|
||||
foreach(EntityItemPointer entity, entities) {
|
||||
ids << entity->getEntityItemID();
|
||||
}
|
||||
});
|
||||
return exportEntities(filename, ids, &offset);
|
||||
return exportEntities(filename, ids, ¢er);
|
||||
}
|
||||
|
||||
void Application::loadSettings() {
|
||||
|
@ -4436,6 +4492,9 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
}
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
DependencyManager::get<AudioClient>()->negotiateAudioFormat();
|
||||
}
|
||||
}
|
||||
|
||||
void Application::nodeKilled(SharedNodePointer node) {
|
||||
|
@ -4646,6 +4705,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
qScriptRegisterMetaType(scriptEngine, RayToOverlayIntersectionResultToScriptValue,
|
||||
RayToOverlayIntersectionResultFromScriptValue);
|
||||
|
||||
scriptEngine->registerGlobalObject("OffscreenFlags", DependencyManager::get<OffscreenUi>()->getFlags());
|
||||
scriptEngine->registerGlobalObject("Desktop", DependencyManager::get<DesktopScriptingInterface>().data());
|
||||
scriptEngine->registerGlobalObject("Toolbars", DependencyManager::get<ToolbarScriptingInterface>().data());
|
||||
|
||||
|
@ -4698,6 +4758,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Reticle", getApplicationCompositor().getReticleInterface());
|
||||
|
||||
scriptEngine->registerGlobalObject("UserActivityLogger", DependencyManager::get<UserActivityLoggerScriptingInterface>().data());
|
||||
scriptEngine->registerGlobalObject("Users", DependencyManager::get<UsersScriptingInterface>().data());
|
||||
}
|
||||
|
||||
bool Application::canAcceptURL(const QString& urlString) const {
|
||||
|
@ -4803,7 +4864,17 @@ bool Application::askToSetAvatarUrl(const QString& url) {
|
|||
|
||||
bool Application::askToLoadScript(const QString& scriptFilenameOrURL) {
|
||||
QMessageBox::StandardButton reply;
|
||||
QString message = "Would you like to run this script:\n" + scriptFilenameOrURL;
|
||||
|
||||
QString shortName = scriptFilenameOrURL;
|
||||
|
||||
QUrl scriptURL { scriptFilenameOrURL };
|
||||
|
||||
if (scriptURL.host().endsWith(MARKETPLACE_CDN_HOSTNAME)) {
|
||||
shortName = shortName.mid(shortName.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
QString message = "Would you like to run this script:\n" + shortName;
|
||||
|
||||
reply = OffscreenUi::question(getWindow(), "Run Script", message, QMessageBox::Yes | QMessageBox::No);
|
||||
|
||||
if (reply == QMessageBox::Yes) {
|
||||
|
@ -4989,16 +5060,9 @@ void Application::takeSnapshot() {
|
|||
player->setMedia(QUrl::fromLocalFile(inf.absoluteFilePath()));
|
||||
player->play();
|
||||
|
||||
QString fileName = Snapshot::saveSnapshot(getActiveDisplayPlugin()->getScreenshot());
|
||||
QString path = Snapshot::saveSnapshot(getActiveDisplayPlugin()->getScreenshot());
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
if (!accountManager->isLoggedIn()) {
|
||||
return;
|
||||
}
|
||||
|
||||
DependencyManager::get<OffscreenUi>()->load("hifi/dialogs/SnapshotShareDialog.qml", [=](QQmlContext*, QObject* dialog) {
|
||||
dialog->setProperty("source", QUrl::fromLocalFile(fileName));
|
||||
});
|
||||
emit DependencyManager::get<WindowScriptingInterface>()->snapshotTaken(path);
|
||||
}
|
||||
|
||||
float Application::getRenderResolutionScale() const {
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
#include <EntityEditPacketSender.h>
|
||||
#include <EntityTreeRenderer.h>
|
||||
#include <input-plugins/KeyboardMouseDevice.h>
|
||||
#include <input-plugins/TouchscreenDevice.h>
|
||||
#include <OctreeQuery.h>
|
||||
#include <PhysicalEntitySimulation.h>
|
||||
#include <PhysicsEngine.h>
|
||||
|
@ -380,6 +381,7 @@ private:
|
|||
|
||||
void displaySide(RenderArgs* renderArgs, Camera& whichCamera, bool selfAvatarOnly = false);
|
||||
|
||||
bool importJSONFromURL(const QString& urlString);
|
||||
bool importSVOFromURL(const QString& urlString);
|
||||
|
||||
bool nearbyEntitiesAreReadyForPhysics();
|
||||
|
@ -402,6 +404,7 @@ private:
|
|||
void touchBeginEvent(QTouchEvent* event);
|
||||
void touchEndEvent(QTouchEvent* event);
|
||||
void touchUpdateEvent(QTouchEvent* event);
|
||||
void touchGestureEvent(QGestureEvent* event);
|
||||
|
||||
void wheelEvent(QWheelEvent* event) const;
|
||||
void dropEvent(QDropEvent* event);
|
||||
|
@ -454,6 +457,7 @@ private:
|
|||
|
||||
std::shared_ptr<controller::StateController> _applicationStateDevice; // Default ApplicationDevice reflecting the state of different properties of the session
|
||||
std::shared_ptr<KeyboardMouseDevice> _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
std::shared_ptr<TouchscreenDevice> _touchscreenDevice; // the good old touchscreen
|
||||
SimpleMovingAverage _avatarSimsPerSecond {10};
|
||||
int _avatarSimsPerSecondReport {0};
|
||||
quint64 _lastAvatarSimsPerSecondUpdate {0};
|
||||
|
|
|
@ -24,9 +24,14 @@
|
|||
static const QString FILENAME_FORMAT = "hifi-log_%1_%2.txt";
|
||||
static const QString DATETIME_FORMAT = "yyyy-MM-dd_hh.mm.ss";
|
||||
static const QString LOGS_DIRECTORY = "Logs";
|
||||
static const QString IPADDR_WILDCARD = "[0-9]*.[0-9]*.[0-9]*.[0-9]*";
|
||||
static const QString DATETIME_WILDCARD = "20[0-9][0-9]-[0,1][0-9]-[0-3][0-9]_[0-2][0-9].[0-6][0-9].[0-6][0-9]";
|
||||
static const QString FILENAME_WILDCARD = "hifi-log_" + IPADDR_WILDCARD + "_" + DATETIME_WILDCARD + ".txt";
|
||||
// Max log size is 512 KB. We send log files to our crash reporter, so we want to keep this relatively
|
||||
// small so it doesn't go over the 2MB zipped limit for all of the files we send.
|
||||
static const qint64 MAX_LOG_SIZE = 512 * 1024;
|
||||
// Max log files found in the log directory is 100.
|
||||
static const qint64 MAX_LOG_DIR_SIZE = 512 * 1024 * 100;
|
||||
// Max log age is 1 hour
|
||||
static const uint64_t MAX_LOG_AGE_USECS = USECS_PER_SECOND * 3600;
|
||||
|
||||
|
@ -71,6 +76,22 @@ void FilePersistThread::rollFileIfNecessary(QFile& file, bool notifyListenersIfR
|
|||
|
||||
_lastRollTime = now;
|
||||
}
|
||||
QStringList nameFilters;
|
||||
nameFilters << FILENAME_WILDCARD;
|
||||
|
||||
QDir logQDir(FileUtils::standardPath(LOGS_DIRECTORY));
|
||||
logQDir.setNameFilters(nameFilters);
|
||||
logQDir.setSorting(QDir::Time);
|
||||
QFileInfoList filesInDir = logQDir.entryInfoList();
|
||||
qint64 totalSizeOfDir = 0;
|
||||
foreach(QFileInfo dirItm, filesInDir){
|
||||
if (totalSizeOfDir < MAX_LOG_DIR_SIZE){
|
||||
totalSizeOfDir += dirItm.size();
|
||||
} else {
|
||||
QFile file(dirItm.filePath());
|
||||
file.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1085,6 +1085,15 @@ void Avatar::computeShapeInfo(ShapeInfo& shapeInfo) {
|
|||
shapeInfo.setOffset(uniformScale * _skeletonModel->getBoundingCapsuleOffset());
|
||||
}
|
||||
|
||||
void Avatar::getCapsule(glm::vec3& start, glm::vec3& end, float& radius) {
|
||||
ShapeInfo shapeInfo;
|
||||
computeShapeInfo(shapeInfo);
|
||||
glm::vec3 halfExtents = shapeInfo.getHalfExtents(); // x = radius, y = halfHeight
|
||||
start = getPosition() - glm::vec3(0, halfExtents.y, 0) + shapeInfo.getOffset();
|
||||
end = getPosition() + glm::vec3(0, halfExtents.y, 0) + shapeInfo.getOffset();
|
||||
radius = halfExtents.x;
|
||||
}
|
||||
|
||||
void Avatar::setMotionState(AvatarMotionState* motionState) {
|
||||
_motionState = motionState;
|
||||
}
|
||||
|
|
|
@ -154,6 +154,7 @@ public:
|
|||
virtual void rebuildCollisionShape();
|
||||
|
||||
virtual void computeShapeInfo(ShapeInfo& shapeInfo);
|
||||
void getCapsule(glm::vec3& start, glm::vec3& end, float& radius);
|
||||
|
||||
AvatarMotionState* getMotionState() { return _motionState; }
|
||||
|
||||
|
|
|
@ -17,11 +17,14 @@
|
|||
#include "CharacterController.h"
|
||||
|
||||
const uint16_t AvatarActionHold::holdVersion = 1;
|
||||
const int AvatarActionHold::velocitySmoothFrames = 6;
|
||||
|
||||
|
||||
AvatarActionHold::AvatarActionHold(const QUuid& id, EntityItemPointer ownerEntity) :
|
||||
ObjectActionSpring(id, ownerEntity)
|
||||
{
|
||||
_type = ACTION_TYPE_HOLD;
|
||||
_measuredLinearVelocities.resize(AvatarActionHold::velocitySmoothFrames);
|
||||
#if WANT_DEBUG
|
||||
qDebug() << "AvatarActionHold::AvatarActionHold";
|
||||
#endif
|
||||
|
@ -204,8 +207,40 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
|
|||
}
|
||||
|
||||
withWriteLock([&]{
|
||||
if (_previousSet &&
|
||||
_positionalTarget != _previousPositionalTarget) { // don't average in a zero velocity if we get the same data
|
||||
glm::vec3 oneFrameVelocity = (_positionalTarget - _previousPositionalTarget) / deltaTimeStep;
|
||||
|
||||
_measuredLinearVelocities[_measuredLinearVelocitiesIndex++] = oneFrameVelocity;
|
||||
if (_measuredLinearVelocitiesIndex >= AvatarActionHold::velocitySmoothFrames) {
|
||||
_measuredLinearVelocitiesIndex = 0;
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec3 measuredLinearVelocity;
|
||||
for (int i = 0; i < AvatarActionHold::velocitySmoothFrames; i++) {
|
||||
// there is a bit of lag between when someone releases the trigger and when the software reacts to
|
||||
// the release. we calculate the velocity from previous frames but we don't include several
|
||||
// of the most recent.
|
||||
//
|
||||
// if _measuredLinearVelocitiesIndex is
|
||||
// 0 -- ignore i of 3 4 5
|
||||
// 1 -- ignore i of 4 5 0
|
||||
// 2 -- ignore i of 5 0 1
|
||||
// 3 -- ignore i of 0 1 2
|
||||
// 4 -- ignore i of 1 2 3
|
||||
// 5 -- ignore i of 2 3 4
|
||||
if ((i + 1) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
|
||||
(i + 2) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
|
||||
(i + 3) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex) {
|
||||
continue;
|
||||
}
|
||||
measuredLinearVelocity += _measuredLinearVelocities[i];
|
||||
}
|
||||
measuredLinearVelocity /= (float)(AvatarActionHold::velocitySmoothFrames - 3); // 3 because of the 3 we skipped, above
|
||||
|
||||
if (_kinematicSetVelocity) {
|
||||
rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget));
|
||||
rigidBody->setLinearVelocity(glmToBullet(measuredLinearVelocity));
|
||||
rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget));
|
||||
}
|
||||
|
||||
|
|
|
@ -64,6 +64,10 @@ private:
|
|||
glm::vec3 _palmOffsetFromRigidBody;
|
||||
// leaving this here for future refernece.
|
||||
// glm::quat _palmRotationFromRigidBody;
|
||||
|
||||
static const int velocitySmoothFrames;
|
||||
QVector<glm::vec3> _measuredLinearVelocities;
|
||||
int _measuredLinearVelocitiesIndex { 0 };
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarActionHold_h
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
#include <RegisteredMetaTypes.h>
|
||||
#include <Rig.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <UsersScriptingInterface.h>
|
||||
#include <UUID.h>
|
||||
|
||||
#include "Application.h"
|
||||
|
@ -69,10 +70,15 @@ AvatarManager::AvatarManager(QObject* parent) :
|
|||
// register a meta type for the weak pointer we'll use for the owning avatar mixer for each avatar
|
||||
qRegisterMetaType<QWeakPointer<Node> >("NodeWeakPointer");
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto& packetReceiver = nodeList->getPacketReceiver();
|
||||
packetReceiver.registerListener(PacketType::BulkAvatarData, this, "processAvatarDataPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, this, "processKillAvatar");
|
||||
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "processAvatarIdentityPacket");
|
||||
|
||||
// when we hear that the user has ignored an avatar by session UUID
|
||||
// immediately remove that avatar instead of waiting for the absence of packets from avatar mixer
|
||||
connect(nodeList.data(), &NodeList::ignoredNode, this, &AvatarManager::removeAvatar);
|
||||
}
|
||||
|
||||
AvatarManager::~AvatarManager() {
|
||||
|
@ -85,7 +91,8 @@ void AvatarManager::init() {
|
|||
_avatarHash.insert(MY_AVATAR_KEY, _myAvatar);
|
||||
}
|
||||
|
||||
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderAvatarsChanged, this, &AvatarManager::updateAvatarRenderStatus, Qt::QueuedConnection);
|
||||
connect(DependencyManager::get<SceneScriptingInterface>().data(), &SceneScriptingInterface::shouldRenderAvatarsChanged,
|
||||
this, &AvatarManager::updateAvatarRenderStatus, Qt::QueuedConnection);
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
|
@ -398,3 +405,76 @@ AvatarSharedPointer AvatarManager::getAvatarBySessionID(const QUuid& sessionID)
|
|||
|
||||
return findAvatar(sessionID);
|
||||
}
|
||||
|
||||
RayToAvatarIntersectionResult AvatarManager::findRayIntersection(const PickRay& ray,
|
||||
const QScriptValue& avatarIdsToInclude,
|
||||
const QScriptValue& avatarIdsToDiscard) {
|
||||
RayToAvatarIntersectionResult result;
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(const_cast<AvatarManager*>(this), "findRayIntersection", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(RayToAvatarIntersectionResult, result),
|
||||
Q_ARG(const PickRay&, ray),
|
||||
Q_ARG(const QScriptValue&, avatarIdsToInclude),
|
||||
Q_ARG(const QScriptValue&, avatarIdsToDiscard));
|
||||
return result;
|
||||
}
|
||||
|
||||
QVector<EntityItemID> avatarsToInclude = qVectorEntityItemIDFromScriptValue(avatarIdsToInclude);
|
||||
QVector<EntityItemID> avatarsToDiscard = qVectorEntityItemIDFromScriptValue(avatarIdsToDiscard);
|
||||
|
||||
glm::vec3 normDirection = glm::normalize(ray.direction);
|
||||
|
||||
for (auto avatarData : _avatarHash) {
|
||||
auto avatar = std::static_pointer_cast<Avatar>(avatarData);
|
||||
if ((avatarsToInclude.size() > 0 && !avatarsToInclude.contains(avatar->getID())) ||
|
||||
(avatarsToDiscard.size() > 0 && avatarsToDiscard.contains(avatar->getID()))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
float distance;
|
||||
BoxFace face;
|
||||
glm::vec3 surfaceNormal;
|
||||
|
||||
SkeletonModelPointer avatarModel = avatar->getSkeletonModel();
|
||||
|
||||
// It's better to intersect the ray against the avatar's actual mesh, but this is currently difficult to
|
||||
// do, because the transformed mesh data only exists over in GPU-land. As a compromise, this code
|
||||
// intersects against the avatars capsule and then against the (T-pose) mesh. The end effect is that picking
|
||||
// against the avatar is sort-of right, but you likely wont be able to pick against the arms.
|
||||
|
||||
// TODO -- find a way to extract transformed avatar mesh data from the rendering engine.
|
||||
|
||||
// if we weren't picking against the capsule, we would want to pick against the avatarBounds...
|
||||
// AABox avatarBounds = avatarModel->getRenderableMeshBound();
|
||||
// if (!avatarBounds.findRayIntersection(ray.origin, normDirection, distance, face, surfaceNormal)) {
|
||||
// // ray doesn't intersect avatar's bounding-box
|
||||
// continue;
|
||||
// }
|
||||
|
||||
glm::vec3 start;
|
||||
glm::vec3 end;
|
||||
float radius;
|
||||
avatar->getCapsule(start, end, radius);
|
||||
bool intersects = findRayCapsuleIntersection(ray.origin, normDirection, start, end, radius, distance);
|
||||
if (!intersects) {
|
||||
// ray doesn't intersect avatar's capsule
|
||||
continue;
|
||||
}
|
||||
|
||||
QString extraInfo;
|
||||
intersects = avatarModel->findRayIntersectionAgainstSubMeshes(ray.origin, normDirection,
|
||||
distance, face, surfaceNormal, extraInfo, true);
|
||||
|
||||
if (intersects && (!result.intersects || distance < result.distance)) {
|
||||
result.intersects = true;
|
||||
result.avatarID = avatar->getID();
|
||||
result.distance = distance;
|
||||
}
|
||||
}
|
||||
|
||||
if (result.intersects) {
|
||||
result.intersection = ray.origin + normDirection * result.distance;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -70,10 +70,17 @@ public:
|
|||
|
||||
void addAvatarToSimulation(Avatar* avatar);
|
||||
|
||||
Q_INVOKABLE RayToAvatarIntersectionResult findRayIntersection(const PickRay& ray,
|
||||
const QScriptValue& avatarIdsToInclude = QScriptValue(),
|
||||
const QScriptValue& avatarIdsToDiscard = QScriptValue());
|
||||
|
||||
public slots:
|
||||
void setShouldShowReceiveStats(bool shouldShowReceiveStats) { _shouldShowReceiveStats = shouldShowReceiveStats; }
|
||||
void updateAvatarRenderStatus(bool shouldRenderAvatars);
|
||||
|
||||
private slots:
|
||||
virtual void removeAvatar(const QUuid& sessionUUID) override;
|
||||
|
||||
private:
|
||||
explicit AvatarManager(QObject* parent = 0);
|
||||
explicit AvatarManager(const AvatarManager& other);
|
||||
|
@ -84,7 +91,6 @@ private:
|
|||
virtual AvatarSharedPointer newSharedAvatar() override;
|
||||
virtual AvatarSharedPointer addAvatar(const QUuid& sessionUUID, const QWeakPointer<Node>& mixerWeakPointer) override;
|
||||
|
||||
virtual void removeAvatar(const QUuid& sessionUUID) override;
|
||||
virtual void handleRemovedAvatar(const AvatarSharedPointer& removedAvatar) override;
|
||||
|
||||
QVector<AvatarSharedPointer> _avatarFades;
|
||||
|
|
|
@ -212,8 +212,8 @@ public:
|
|||
virtual void clearJointsData() override;
|
||||
|
||||
Q_INVOKABLE void useFullAvatarURL(const QUrl& fullAvatarURL, const QString& modelName = QString());
|
||||
Q_INVOKABLE const QUrl& getFullAvatarURLFromPreferences() const { return _fullAvatarURLFromPreferences; }
|
||||
Q_INVOKABLE const QString& getFullAvatarModelName() const { return _fullAvatarModelName; }
|
||||
Q_INVOKABLE QUrl getFullAvatarURLFromPreferences() const { return _fullAvatarURLFromPreferences; }
|
||||
Q_INVOKABLE QString getFullAvatarModelName() const { return _fullAvatarModelName; }
|
||||
void resetFullAvatarURL();
|
||||
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <OffscreenUi.h>
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include "Application.h"
|
||||
|
||||
|
@ -110,13 +111,17 @@ QString HMDScriptingInterface::preferredAudioOutput() const {
|
|||
}
|
||||
|
||||
bool HMDScriptingInterface::setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([offscreenUi, enabled] {
|
||||
offscreenUi->getDesktop()->setProperty("hmdHandMouseActive", enabled);
|
||||
});
|
||||
return qApp->getActiveDisplayPlugin()->setHandLaser(hands,
|
||||
enabled ? DisplayPlugin::HandLaserMode::Overlay : DisplayPlugin::HandLaserMode::None,
|
||||
color, direction);
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableHandLasers(int hands) const {
|
||||
qApp->getActiveDisplayPlugin()->setHandLaser(hands, DisplayPlugin::HandLaserMode::None);
|
||||
setHandLasers(hands, false, vec4(0), vec3(0));
|
||||
}
|
||||
|
||||
bool HMDScriptingInterface::suppressKeyboard() {
|
||||
|
|
|
@ -8,13 +8,15 @@
|
|||
|
||||
#include "ToolbarScriptingInterface.h"
|
||||
|
||||
#include <QtCore/QThread>
|
||||
|
||||
#include <OffscreenUi.h>
|
||||
|
||||
class QmlWrapper : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
QmlWrapper(QObject* qmlObject, QObject* parent = nullptr)
|
||||
: QObject(parent), _qmlObject(qmlObject) {
|
||||
: QObject(parent), _qmlObject(qmlObject) {
|
||||
}
|
||||
|
||||
Q_INVOKABLE void writeProperty(QString propertyName, QVariant propertyValue) {
|
||||
|
@ -79,7 +81,11 @@ public:
|
|||
|
||||
Q_INVOKABLE QObject* addButton(const QVariant& properties) {
|
||||
QVariant resultVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(_qmlObject, "addButton", Qt::BlockingQueuedConnection, Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, properties));
|
||||
Qt::ConnectionType connectionType = Qt::AutoConnection;
|
||||
if (QThread::currentThread() != _qmlObject->thread()) {
|
||||
connectionType = Qt::BlockingQueuedConnection;
|
||||
}
|
||||
bool invokeResult = QMetaObject::invokeMethod(_qmlObject, "addButton", connectionType, Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, properties));
|
||||
if (!invokeResult) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -91,14 +97,22 @@ public:
|
|||
|
||||
return new ToolbarButtonProxy(rawButton, this);
|
||||
}
|
||||
|
||||
Q_INVOKABLE void removeButton(const QVariant& name) {
|
||||
QMetaObject::invokeMethod(_qmlObject, "removeButton", Qt::AutoConnection, Q_ARG(QVariant, name));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
QObject* ToolbarScriptingInterface::getToolbar(const QString& toolbarId) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto desktop = offscreenUi->getDesktop();
|
||||
Qt::ConnectionType connectionType = Qt::AutoConnection;
|
||||
if (QThread::currentThread() != desktop->thread()) {
|
||||
connectionType = Qt::BlockingQueuedConnection;
|
||||
}
|
||||
QVariant resultVar;
|
||||
bool invokeResult = QMetaObject::invokeMethod(desktop, "getToolbar", Qt::BlockingQueuedConnection, Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, toolbarId));
|
||||
bool invokeResult = QMetaObject::invokeMethod(desktop, "getToolbar", connectionType, Q_RETURN_ARG(QVariant, resultVar), Q_ARG(QVariant, toolbarId));
|
||||
if (!invokeResult) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -112,4 +126,4 @@ QObject* ToolbarScriptingInterface::getToolbar(const QString& toolbarId) {
|
|||
}
|
||||
|
||||
|
||||
#include "ToolbarScriptingInterface.moc"
|
||||
#include "ToolbarScriptingInterface.moc"
|
||||
|
|
|
@ -46,6 +46,7 @@ signals:
|
|||
void domainChanged(const QString& domainHostname);
|
||||
void svoImportRequested(const QString& url);
|
||||
void domainConnectionRefused(const QString& reasonMessage, int reasonCode);
|
||||
void snapshotTaken(const QString& path);
|
||||
|
||||
private slots:
|
||||
WebWindowClass* doCreateWebWindow(const QString& title, const QString& url, int width, int height);
|
||||
|
|
|
@ -40,10 +40,10 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
|
|||
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
|
||||
}
|
||||
|
||||
void AddressBarDialog::loadAddress(const QString& address) {
|
||||
void AddressBarDialog::loadAddress(const QString& address, bool fromSuggestions) {
|
||||
qDebug() << "Called LoadAddress with address " << address;
|
||||
if (!address.isEmpty()) {
|
||||
DependencyManager::get<AddressManager>()->handleLookupString(address);
|
||||
DependencyManager::get<AddressManager>()->handleLookupString(address, fromSuggestions);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ protected:
|
|||
void displayAddressOfflineMessage();
|
||||
void displayAddressNotFoundMessage();
|
||||
|
||||
Q_INVOKABLE void loadAddress(const QString& address);
|
||||
Q_INVOKABLE void loadAddress(const QString& address, bool fromSuggestions = false);
|
||||
Q_INVOKABLE void loadHome();
|
||||
Q_INVOKABLE void loadBack();
|
||||
Q_INVOKABLE void loadForward();
|
||||
|
|
|
@ -35,7 +35,7 @@ void setupPreferences() {
|
|||
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
static const QString AVATAR_BASICS { "Avatar Basics" };
|
||||
{
|
||||
auto getter = [=]()->QString {return myAvatar->getDisplayName(); };
|
||||
auto getter = [=]()->QString { return myAvatar->getDisplayName(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setDisplayName(value); };
|
||||
auto preference = new EditPreference(AVATAR_BASICS, "Avatar display name (optional)", getter, setter);
|
||||
preference->setPlaceholderText("Not showing a name");
|
||||
|
@ -43,7 +43,7 @@ void setupPreferences() {
|
|||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->QString {return myAvatar->getCollisionSoundURL(); };
|
||||
auto getter = [=]()->QString { return myAvatar->getCollisionSoundURL(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setCollisionSoundURL(value); };
|
||||
auto preference = new EditPreference(AVATAR_BASICS, "Avatar collision sound URL (optional)", getter, setter);
|
||||
preference->setPlaceholderText("Enter the URL of a sound to play when you bump into something");
|
||||
|
@ -56,20 +56,24 @@ void setupPreferences() {
|
|||
auto preference = new AvatarPreference(AVATAR_BASICS, "Appearance", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
{
|
||||
auto getter = [=]()->bool {return myAvatar->getSnapTurn(); };
|
||||
auto getter = [=]()->bool { return myAvatar->getSnapTurn(); };
|
||||
auto setter = [=](bool value) { myAvatar->setSnapTurn(value); };
|
||||
preferences->addPreference(new CheckPreference(AVATAR_BASICS, "Snap turn when in HMD", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->bool {return myAvatar->getClearOverlayWhenMoving(); };
|
||||
auto getter = [=]()->bool { return myAvatar->getClearOverlayWhenMoving(); };
|
||||
auto setter = [=](bool value) { myAvatar->setClearOverlayWhenMoving(value); };
|
||||
preferences->addPreference(new CheckPreference(AVATAR_BASICS, "Clear overlays when moving", getter, setter));
|
||||
}
|
||||
|
||||
// Snapshots
|
||||
static const QString SNAPSHOTS { "Snapshots" };
|
||||
{
|
||||
auto getter = []()->QString { return Snapshot::snapshotsLocation.get(); };
|
||||
auto setter = [](const QString& value) { Snapshot::snapshotsLocation.set(value); };
|
||||
auto preference = new BrowsePreference("Snapshots", "Put my snapshots here", getter, setter);
|
||||
auto preference = new BrowsePreference(SNAPSHOTS, "Put my snapshots here", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
|
||||
|
@ -85,7 +89,7 @@ void setupPreferences() {
|
|||
}));
|
||||
|
||||
{
|
||||
auto getter = []()->bool {return !Menu::getInstance()->isOptionChecked(MenuOption::DisableActivityLogger); };
|
||||
auto getter = []()->bool { return !Menu::getInstance()->isOptionChecked(MenuOption::DisableActivityLogger); };
|
||||
auto setter = [](bool value) { Menu::getInstance()->setIsOptionChecked(MenuOption::DisableActivityLogger, !value); };
|
||||
preferences->addPreference(new CheckPreference("Privacy", "Send data", getter, setter));
|
||||
}
|
||||
|
@ -184,7 +188,7 @@ void setupPreferences() {
|
|||
|
||||
static const QString AUDIO("Audio");
|
||||
{
|
||||
auto getter = []()->bool {return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getDynamicJitterBuffers(); };
|
||||
auto getter = []()->bool { return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getDynamicJitterBuffers(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->getReceivedAudioStream().setDynamicJitterBuffers(value); };
|
||||
preferences->addPreference(new CheckPreference(AUDIO, "Enable dynamic jitter buffers", getter, setter));
|
||||
}
|
||||
|
@ -207,7 +211,7 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool {return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getUseStDevForJitterCalc(); };
|
||||
auto getter = []()->bool { return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getUseStDevForJitterCalc(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->getReceivedAudioStream().setUseStDevForJitterCalc(value); };
|
||||
preferences->addPreference(new CheckPreference(AUDIO, "Use standard deviation for dynamic jitter calc", getter, setter));
|
||||
}
|
||||
|
@ -236,7 +240,7 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool {return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getRepetitionWithFade(); };
|
||||
auto getter = []()->bool { return DependencyManager::get<AudioClient>()->getReceivedAudioStream().getRepetitionWithFade(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->getReceivedAudioStream().setRepetitionWithFade(value); };
|
||||
preferences->addPreference(new CheckPreference(AUDIO, "Repetition with fade", getter, setter));
|
||||
}
|
||||
|
@ -250,7 +254,7 @@ void setupPreferences() {
|
|||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = []()->bool {return DependencyManager::get<AudioClient>()->getOutputStarveDetectionEnabled(); };
|
||||
auto getter = []()->bool { return DependencyManager::get<AudioClient>()->getOutputStarveDetectionEnabled(); };
|
||||
auto setter = [](bool value) { DependencyManager::get<AudioClient>()->setOutputStarveDetectionEnabled(value); };
|
||||
auto preference = new CheckPreference(AUDIO, "Output starve detection (automatic buffer size increase)", getter, setter);
|
||||
preferences->addPreference(preference);
|
||||
|
|
|
@ -44,6 +44,7 @@ const QString URL = "highfidelity_url";
|
|||
|
||||
Setting::Handle<QString> Snapshot::snapshotsLocation("snapshotsLocation",
|
||||
QStandardPaths::writableLocation(QStandardPaths::DesktopLocation));
|
||||
Setting::Handle<bool> Snapshot::hasSetSnapshotsLocation("hasSetSnapshotsLocation", false);
|
||||
|
||||
SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
|
||||
|
||||
|
@ -103,7 +104,14 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary) {
|
|||
const int IMAGE_QUALITY = 100;
|
||||
|
||||
if (!isTemporary) {
|
||||
QString snapshotFullPath = snapshotsLocation.get();
|
||||
QString snapshotFullPath;
|
||||
if (!hasSetSnapshotsLocation.get()) {
|
||||
snapshotFullPath = QFileDialog::getExistingDirectory(nullptr, "Choose Snapshots Directory", snapshotsLocation.get());
|
||||
hasSetSnapshotsLocation.set(true);
|
||||
snapshotsLocation.set(snapshotFullPath);
|
||||
} else {
|
||||
snapshotFullPath = snapshotsLocation.get();
|
||||
}
|
||||
|
||||
if (!snapshotFullPath.endsWith(QDir::separator())) {
|
||||
snapshotFullPath.append(QDir::separator());
|
||||
|
@ -133,118 +141,3 @@ QFile* Snapshot::savedFileForSnapshot(QImage & shot, bool isTemporary) {
|
|||
return imageTempFile;
|
||||
}
|
||||
}
|
||||
|
||||
const QString FORUM_URL = "https://alphas.highfidelity.io";
|
||||
const QString FORUM_UPLOADS_URL = FORUM_URL + "/uploads";
|
||||
const QString FORUM_POST_URL = FORUM_URL + "/posts";
|
||||
const QString FORUM_REPLY_TO_TOPIC = "244";
|
||||
const QString FORUM_POST_TEMPLATE = "<img src='%1'/><p>%2</p>";
|
||||
const QString SHARE_DEFAULT_ERROR = "The server isn't responding. Please try again in a few minutes.";
|
||||
const QString SUCCESS_LABEL_TEMPLATE = "Success!!! Go check out your image ...<br/><a style='color:#333;text-decoration:none' href='%1'>%1</a>";
|
||||
|
||||
|
||||
QString SnapshotUploader::uploadSnapshot(const QUrl& fileUrl) {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
if (accountManager->getAccountInfo().getDiscourseApiKey().isEmpty()) {
|
||||
OffscreenUi::warning(nullptr, "", "Your Discourse API key is missing, you cannot share snapshots. Please try to relog.");
|
||||
return QString();
|
||||
}
|
||||
|
||||
QHttpPart apiKeyPart;
|
||||
apiKeyPart.setHeader(QNetworkRequest::ContentDispositionHeader, QVariant("form-data; name=\"api_key\""));
|
||||
apiKeyPart.setBody(accountManager->getAccountInfo().getDiscourseApiKey().toLatin1());
|
||||
|
||||
QString filename = fileUrl.toLocalFile();
|
||||
qDebug() << filename;
|
||||
QFile* file = new QFile(filename);
|
||||
Q_ASSERT(file->exists());
|
||||
file->open(QIODevice::ReadOnly);
|
||||
|
||||
QHttpPart imagePart;
|
||||
imagePart.setHeader(QNetworkRequest::ContentTypeHeader, QVariant("image/jpeg"));
|
||||
imagePart.setHeader(QNetworkRequest::ContentDispositionHeader,
|
||||
QVariant("form-data; name=\"file\"; filename=\"" + file->fileName() + "\""));
|
||||
imagePart.setBodyDevice(file);
|
||||
|
||||
QHttpMultiPart* multiPart = new QHttpMultiPart(QHttpMultiPart::FormDataType);
|
||||
file->setParent(multiPart); // we cannot delete the file now, so delete it with the multiPart
|
||||
multiPart->append(apiKeyPart);
|
||||
multiPart->append(imagePart);
|
||||
|
||||
QUrl url(FORUM_UPLOADS_URL);
|
||||
QNetworkRequest request(url);
|
||||
request.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
|
||||
QString result;
|
||||
QEventLoop loop;
|
||||
|
||||
QSharedPointer<QNetworkReply> reply(NetworkAccessManager::getInstance().post(request, multiPart));
|
||||
QObject::connect(reply.data(), &QNetworkReply::finished, [&] {
|
||||
loop.quit();
|
||||
|
||||
qDebug() << reply->errorString();
|
||||
for (const auto& header : reply->rawHeaderList()) {
|
||||
qDebug() << "Header " << QString(header);
|
||||
}
|
||||
auto replyResult = reply->readAll();
|
||||
qDebug() << QString(replyResult);
|
||||
QJsonDocument jsonResponse = QJsonDocument::fromJson(replyResult);
|
||||
const QJsonObject& responseObject = jsonResponse.object();
|
||||
if (!responseObject.contains("url")) {
|
||||
OffscreenUi::warning(this, "", SHARE_DEFAULT_ERROR);
|
||||
return;
|
||||
}
|
||||
result = responseObject["url"].toString();
|
||||
});
|
||||
loop.exec();
|
||||
return result;
|
||||
}
|
||||
|
||||
QString SnapshotUploader::sendForumPost(const QString& snapshotPath, const QString& notes) {
|
||||
// post to Discourse forum
|
||||
QNetworkRequest request;
|
||||
request.setHeader(QNetworkRequest::UserAgentHeader, HIGH_FIDELITY_USER_AGENT);
|
||||
QUrl forumUrl(FORUM_POST_URL);
|
||||
|
||||
QUrlQuery query;
|
||||
query.addQueryItem("api_key", DependencyManager::get<AccountManager>()->getAccountInfo().getDiscourseApiKey());
|
||||
query.addQueryItem("topic_id", FORUM_REPLY_TO_TOPIC);
|
||||
query.addQueryItem("raw", FORUM_POST_TEMPLATE.arg(snapshotPath, notes));
|
||||
forumUrl.setQuery(query);
|
||||
|
||||
QByteArray postData = forumUrl.toEncoded(QUrl::RemoveFragment);
|
||||
request.setUrl(forumUrl);
|
||||
request.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded");
|
||||
|
||||
QNetworkReply* requestReply = NetworkAccessManager::getInstance().post(request, postData);
|
||||
|
||||
QEventLoop loop;
|
||||
QString result;
|
||||
connect(requestReply, &QNetworkReply::finished, [&] {
|
||||
loop.quit();
|
||||
QJsonDocument jsonResponse = QJsonDocument::fromJson(requestReply->readAll());
|
||||
requestReply->deleteLater();
|
||||
const QJsonObject& responseObject = jsonResponse.object();
|
||||
|
||||
if (!responseObject.contains("id")) {
|
||||
QString errorMessage(SHARE_DEFAULT_ERROR);
|
||||
if (responseObject.contains("errors")) {
|
||||
QJsonArray errorArray = responseObject["errors"].toArray();
|
||||
if (!errorArray.first().toString().isEmpty()) {
|
||||
errorMessage = errorArray.first().toString();
|
||||
}
|
||||
}
|
||||
OffscreenUi::warning(this, "", errorMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
const QString urlTemplate = "%1/t/%2/%3/%4";
|
||||
result = urlTemplate.arg(FORUM_URL,
|
||||
responseObject["topic_slug"].toString(),
|
||||
QString::number(responseObject["topic_id"].toDouble()),
|
||||
QString::number(responseObject["post_number"].toDouble()));
|
||||
});
|
||||
loop.exec();
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -39,16 +39,9 @@ public:
|
|||
static SnapshotMetaData* parseSnapshotData(QString snapshotPath);
|
||||
|
||||
static Setting::Handle<QString> snapshotsLocation;
|
||||
static Setting::Handle<bool> hasSetSnapshotsLocation;
|
||||
private:
|
||||
static QFile* savedFileForSnapshot(QImage & image, bool isTemporary);
|
||||
};
|
||||
|
||||
class SnapshotUploader : public QObject{
|
||||
Q_OBJECT
|
||||
public:
|
||||
SnapshotUploader(QObject* parent = nullptr) : QObject(parent) {}
|
||||
Q_INVOKABLE QString uploadSnapshot(const QUrl& fileUrl);
|
||||
Q_INVOKABLE QString sendForumPost(const QString& snapshotPath, const QString& notes);
|
||||
};
|
||||
|
||||
#endif // hifi_Snapshot_h
|
||||
|
|
|
@ -1,47 +0,0 @@
|
|||
//
|
||||
// SnapshotShareDialog.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Stojce Slavkovski on 2/16/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#if 0
|
||||
|
||||
|
||||
#include <OffscreenUi.h>
|
||||
|
||||
const int NARROW_SNAPSHOT_DIALOG_SIZE = 500;
|
||||
const int WIDE_SNAPSHOT_DIALOG_WIDTH = 650;
|
||||
const int SUCCESS_LABEL_HEIGHT = 140;
|
||||
|
||||
const QString SHARE_BUTTON_STYLE = "border-width:0;border-radius:9px;border-radius:9px;font-family:Arial;font-size:18px;"
|
||||
"font-weight:100;color:#FFFFFF;width: 120px;height: 50px;";
|
||||
const QString SHARE_BUTTON_ENABLED_STYLE = "background-color: #333;";
|
||||
const QString SHARE_BUTTON_DISABLED_STYLE = "background-color: #999;";
|
||||
|
||||
Q_DECLARE_METATYPE(QNetworkAccessManager::Operation)
|
||||
|
||||
SnapshotShareDialog::SnapshotShareDialog(QString fileName, QWidget* parent) :
|
||||
QDialog(parent),
|
||||
_fileName(fileName)
|
||||
{
|
||||
|
||||
|
||||
_ui.snapshotWidget->setPixmap(snaphsotPixmap);
|
||||
_ui.snapshotWidget->adjustSize();
|
||||
}
|
||||
|
||||
void SnapshotShareDialog::accept() {
|
||||
// prevent multiple clicks on share button
|
||||
_ui.shareButton->setEnabled(false);
|
||||
// gray out share button
|
||||
_ui.shareButton->setStyleSheet(SHARE_BUTTON_STYLE + SHARE_BUTTON_DISABLED_STYLE);
|
||||
uploadSnapshot();
|
||||
}
|
||||
|
||||
|
||||
#endif
|
|
@ -48,14 +48,6 @@ const QString& UpdateDialog::releaseNotes() const {
|
|||
return _releaseNotes;
|
||||
}
|
||||
|
||||
void UpdateDialog::closeDialog() {
|
||||
hide();
|
||||
}
|
||||
|
||||
void UpdateDialog::hide() {
|
||||
((QQuickItem*)parent())->setVisible(false);
|
||||
}
|
||||
|
||||
void UpdateDialog::triggerUpgrade() {
|
||||
auto applicationUpdater = DependencyManager::get<AutoUpdater>();
|
||||
applicationUpdater.data()->performAutoUpdate(applicationUpdater.data()->getBuildData().lastKey());
|
||||
|
|
|
@ -21,22 +21,20 @@ class UpdateDialog : public OffscreenQmlDialog {
|
|||
Q_OBJECT
|
||||
HIFI_QML_DECL
|
||||
|
||||
Q_PROPERTY(QString updateAvailableDetails READ updateAvailableDetails)
|
||||
Q_PROPERTY(QString releaseNotes READ releaseNotes)
|
||||
Q_PROPERTY(QString updateAvailableDetails READ updateAvailableDetails CONSTANT)
|
||||
Q_PROPERTY(QString releaseNotes READ releaseNotes CONSTANT)
|
||||
|
||||
public:
|
||||
UpdateDialog(QQuickItem* parent = nullptr);
|
||||
const QString& updateAvailableDetails() const;
|
||||
const QString& releaseNotes() const;
|
||||
|
||||
|
||||
private:
|
||||
QString _updateAvailableDetails;
|
||||
QString _releaseNotes;
|
||||
|
||||
protected:
|
||||
void hide();
|
||||
Q_INVOKABLE void triggerUpgrade();
|
||||
Q_INVOKABLE void closeDialog();
|
||||
|
||||
};
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
|
||||
class Base3DOverlay : public Overlay {
|
||||
Q_OBJECT
|
||||
|
||||
|
||||
public:
|
||||
Base3DOverlay();
|
||||
Base3DOverlay(const Base3DOverlay* base3DOverlay);
|
||||
|
@ -27,10 +27,10 @@ public:
|
|||
const glm::vec3& getPosition() const { return _transform.getTranslation(); }
|
||||
const glm::quat& getRotation() const { return _transform.getRotation(); }
|
||||
const glm::vec3& getScale() const { return _transform.getScale(); }
|
||||
|
||||
|
||||
// TODO: consider implementing registration points in this class
|
||||
const glm::vec3& getCenter() const { return getPosition(); }
|
||||
|
||||
|
||||
float getLineWidth() const { return _lineWidth; }
|
||||
bool getIsSolid() const { return _isSolid; }
|
||||
bool getIsDashedLine() const { return _isDashedLine; }
|
||||
|
@ -43,7 +43,7 @@ public:
|
|||
void setRotation(const glm::quat& value) { _transform.setRotation(value); }
|
||||
void setScale(float value) { _transform.setScale(value); }
|
||||
void setScale(const glm::vec3& value) { _transform.setScale(value); }
|
||||
|
||||
|
||||
void setLineWidth(float lineWidth) { _lineWidth = lineWidth; }
|
||||
void setIsSolid(bool isSolid) { _isSolid = isSolid; }
|
||||
void setIsDashedLine(bool isDashedLine) { _isDashedLine = isDashedLine; }
|
||||
|
@ -55,22 +55,22 @@ public:
|
|||
void setProperties(const QVariantMap& properties) override;
|
||||
QVariant getProperty(const QString& property) override;
|
||||
|
||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
|
||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
|
||||
BoxFace& face, glm::vec3& surfaceNormal);
|
||||
|
||||
virtual bool findRayIntersectionExtraInfo(const glm::vec3& origin, const glm::vec3& direction,
|
||||
virtual bool findRayIntersectionExtraInfo(const glm::vec3& origin, const glm::vec3& direction,
|
||||
float& distance, BoxFace& face, glm::vec3& surfaceNormal, QString& extraInfo) {
|
||||
return findRayIntersection(origin, direction, distance, face, surfaceNormal);
|
||||
}
|
||||
|
||||
protected:
|
||||
Transform _transform;
|
||||
|
||||
|
||||
float _lineWidth;
|
||||
bool _isSolid;
|
||||
bool _isDashedLine;
|
||||
bool _ignoreRayIntersection;
|
||||
bool _drawInFront;
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_Base3DOverlay_h
|
||||
|
|
|
@ -14,30 +14,11 @@
|
|||
#include <GeometryCache.h>
|
||||
#include <RegisteredMetaTypes.h>
|
||||
|
||||
|
||||
QString const Circle3DOverlay::TYPE = "circle3d";
|
||||
|
||||
Circle3DOverlay::Circle3DOverlay() :
|
||||
_startAt(0.0f),
|
||||
_endAt(360.0f),
|
||||
_outerRadius(1.0f),
|
||||
_innerRadius(0.0f),
|
||||
_hasTickMarks(false),
|
||||
_majorTickMarksAngle(0.0f),
|
||||
_minorTickMarksAngle(0.0f),
|
||||
_majorTickMarksLength(0.0f),
|
||||
_minorTickMarksLength(0.0f),
|
||||
_quadVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_lineVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_majorTicksVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_minorTicksVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_lastStartAt(-1.0f),
|
||||
_lastEndAt(-1.0f),
|
||||
_lastOuterRadius(-1.0f),
|
||||
_lastInnerRadius(-1.0f)
|
||||
{
|
||||
_majorTickMarksColor.red = _majorTickMarksColor.green = _majorTickMarksColor.blue = (unsigned char)0;
|
||||
_minorTickMarksColor.red = _minorTickMarksColor.green = _minorTickMarksColor.blue = (unsigned char)0;
|
||||
Circle3DOverlay::Circle3DOverlay() {
|
||||
memset(&_minorTickMarksColor, 0, sizeof(_minorTickMarksColor));
|
||||
memset(&_majorTickMarksColor, 0, sizeof(_majorTickMarksColor));
|
||||
}
|
||||
|
||||
Circle3DOverlay::Circle3DOverlay(const Circle3DOverlay* circle3DOverlay) :
|
||||
|
@ -56,11 +37,7 @@ Circle3DOverlay::Circle3DOverlay(const Circle3DOverlay* circle3DOverlay) :
|
|||
_quadVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_lineVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_majorTicksVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_minorTicksVerticesID(GeometryCache::UNKNOWN_ID),
|
||||
_lastStartAt(-1.0f),
|
||||
_lastEndAt(-1.0f),
|
||||
_lastOuterRadius(-1.0f),
|
||||
_lastInnerRadius(-1.0f)
|
||||
_minorTicksVerticesID(GeometryCache::UNKNOWN_ID)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -70,36 +47,25 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
}
|
||||
|
||||
float alpha = getAlpha();
|
||||
|
||||
if (alpha == 0.0f) {
|
||||
return; // do nothing if our alpha is 0, we're not visible
|
||||
}
|
||||
|
||||
// Create the circle in the coordinates origin
|
||||
float outerRadius = getOuterRadius();
|
||||
float innerRadius = getInnerRadius(); // only used in solid case
|
||||
float startAt = getStartAt();
|
||||
float endAt = getEndAt();
|
||||
|
||||
bool geometryChanged = (startAt != _lastStartAt || endAt != _lastEndAt ||
|
||||
innerRadius != _lastInnerRadius || outerRadius != _lastOuterRadius);
|
||||
|
||||
bool geometryChanged = _dirty;
|
||||
_dirty = false;
|
||||
|
||||
const float FULL_CIRCLE = 360.0f;
|
||||
const float SLICES = 180.0f; // The amount of segment to create the circle
|
||||
const float SLICE_ANGLE = FULL_CIRCLE / SLICES;
|
||||
|
||||
xColor colorX = getColor();
|
||||
const float MAX_COLOR = 255.0f;
|
||||
glm::vec4 color(colorX.red / MAX_COLOR, colorX.green / MAX_COLOR, colorX.blue / MAX_COLOR, alpha);
|
||||
|
||||
bool colorChanged = colorX.red != _lastColor.red || colorX.green != _lastColor.green || colorX.blue != _lastColor.blue;
|
||||
_lastColor = colorX;
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
Q_ASSERT(args->_batch);
|
||||
auto& batch = *args->_batch;
|
||||
if (args->_pipeline) {
|
||||
batch.setPipeline(args->_pipeline->pipeline);
|
||||
}
|
||||
|
||||
// FIXME: THe line width of _lineWidth is not supported anymore, we ll need a workaround
|
||||
|
||||
|
@ -110,81 +76,89 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
// for our overlay, is solid means we draw a ring between the inner and outer radius of the circle, otherwise
|
||||
// we just draw a line...
|
||||
if (getIsSolid()) {
|
||||
if (_quadVerticesID == GeometryCache::UNKNOWN_ID) {
|
||||
if (!_quadVerticesID) {
|
||||
_quadVerticesID = geometryCache->allocateID();
|
||||
}
|
||||
|
||||
if (geometryChanged || colorChanged) {
|
||||
|
||||
if (geometryChanged) {
|
||||
QVector<glm::vec2> points;
|
||||
|
||||
float angle = startAt;
|
||||
float angleInRadians = glm::radians(angle);
|
||||
glm::vec2 mostRecentInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
|
||||
glm::vec2 mostRecentOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
|
||||
while (angle < endAt) {
|
||||
angleInRadians = glm::radians(angle);
|
||||
glm::vec2 thisInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
|
||||
glm::vec2 thisOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
|
||||
points << mostRecentInnerPoint << mostRecentOuterPoint << thisOuterPoint; // first triangle
|
||||
points << mostRecentInnerPoint << thisInnerPoint << thisOuterPoint; // second triangle
|
||||
|
||||
angle += SLICE_ANGLE;
|
||||
QVector<glm::vec4> colors;
|
||||
|
||||
mostRecentInnerPoint = thisInnerPoint;
|
||||
mostRecentOuterPoint = thisOuterPoint;
|
||||
float pulseLevel = updatePulse();
|
||||
vec4 pulseModifier = vec4(1);
|
||||
if (_alphaPulse != 0.0f) {
|
||||
pulseModifier.a = (_alphaPulse >= 0.0f) ? pulseLevel : (1.0f - pulseLevel);
|
||||
}
|
||||
|
||||
// get the last slice portion....
|
||||
angle = endAt;
|
||||
angleInRadians = glm::radians(angle);
|
||||
glm::vec2 lastInnerPoint(cosf(angleInRadians) * innerRadius, sinf(angleInRadians) * innerRadius);
|
||||
glm::vec2 lastOuterPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
if (_colorPulse != 0.0f) {
|
||||
float pulseValue = (_colorPulse >= 0.0f) ? pulseLevel : (1.0f - pulseLevel);
|
||||
pulseModifier = vec4(vec3(pulseValue), pulseModifier.a);
|
||||
}
|
||||
vec4 innerStartColor = vec4(toGlm(_innerStartColor), _innerStartAlpha) * pulseModifier;
|
||||
vec4 outerStartColor = vec4(toGlm(_outerStartColor), _outerStartAlpha) * pulseModifier;
|
||||
vec4 innerEndColor = vec4(toGlm(_innerEndColor), _innerEndAlpha) * pulseModifier;
|
||||
vec4 outerEndColor = vec4(toGlm(_outerEndColor), _outerEndAlpha) * pulseModifier;
|
||||
|
||||
points << mostRecentInnerPoint << mostRecentOuterPoint << lastOuterPoint; // first triangle
|
||||
points << mostRecentInnerPoint << lastInnerPoint << lastOuterPoint; // second triangle
|
||||
|
||||
geometryCache->updateVertices(_quadVerticesID, points, color);
|
||||
if (_innerRadius <= 0) {
|
||||
_solidPrimitive = gpu::TRIANGLE_FAN;
|
||||
points << vec2();
|
||||
colors << innerStartColor;
|
||||
for (float angle = _startAt; angle <= _endAt; angle += SLICE_ANGLE) {
|
||||
float range = (angle - _startAt) / (_endAt - _startAt);
|
||||
float angleRadians = glm::radians(angle);
|
||||
points << glm::vec2(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
|
||||
colors << glm::mix(outerStartColor, outerEndColor, range);
|
||||
}
|
||||
} else {
|
||||
_solidPrimitive = gpu::TRIANGLE_STRIP;
|
||||
for (float angle = _startAt; angle <= _endAt; angle += SLICE_ANGLE) {
|
||||
float range = (angle - _startAt) / (_endAt - _startAt);
|
||||
|
||||
float angleRadians = glm::radians(angle);
|
||||
points << glm::vec2(cosf(angleRadians) * _innerRadius, sinf(angleRadians) * _innerRadius);
|
||||
colors << glm::mix(innerStartColor, innerEndColor, range);
|
||||
|
||||
points << glm::vec2(cosf(angleRadians) * _outerRadius, sinf(angleRadians) * _outerRadius);
|
||||
colors << glm::mix(outerStartColor, outerEndColor, range);
|
||||
}
|
||||
}
|
||||
geometryCache->updateVertices(_quadVerticesID, points, colors);
|
||||
}
|
||||
|
||||
geometryCache->renderVertices(batch, gpu::TRIANGLES, _quadVerticesID);
|
||||
geometryCache->renderVertices(batch, _solidPrimitive, _quadVerticesID);
|
||||
|
||||
} else {
|
||||
if (_lineVerticesID == GeometryCache::UNKNOWN_ID) {
|
||||
if (!_lineVerticesID) {
|
||||
_lineVerticesID = geometryCache->allocateID();
|
||||
}
|
||||
|
||||
if (geometryChanged || colorChanged) {
|
||||
if (geometryChanged) {
|
||||
QVector<glm::vec2> points;
|
||||
|
||||
float angle = startAt;
|
||||
float angle = _startAt;
|
||||
float angleInRadians = glm::radians(angle);
|
||||
glm::vec2 firstPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
glm::vec2 firstPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
|
||||
points << firstPoint;
|
||||
|
||||
while (angle < endAt) {
|
||||
while (angle < _endAt) {
|
||||
angle += SLICE_ANGLE;
|
||||
angleInRadians = glm::radians(angle);
|
||||
glm::vec2 thisPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
glm::vec2 thisPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
|
||||
points << thisPoint;
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
angle += SLICE_ANGLE / 2.0f; // short gap
|
||||
angleInRadians = glm::radians(angle);
|
||||
glm::vec2 dashStartPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
glm::vec2 dashStartPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
|
||||
points << dashStartPoint;
|
||||
}
|
||||
}
|
||||
|
||||
// get the last slice portion....
|
||||
angle = endAt;
|
||||
angle = _endAt;
|
||||
angleInRadians = glm::radians(angle);
|
||||
glm::vec2 lastPoint(cosf(angleInRadians) * outerRadius, sinf(angleInRadians) * outerRadius);
|
||||
glm::vec2 lastPoint(cosf(angleInRadians) * _outerRadius, sinf(angleInRadians) * _outerRadius);
|
||||
points << lastPoint;
|
||||
|
||||
geometryCache->updateVertices(_lineVerticesID, points, color);
|
||||
geometryCache->updateVertices(_lineVerticesID, points, vec4(toGlm(getColor()), getAlpha()));
|
||||
}
|
||||
|
||||
if (getIsDashedLine()) {
|
||||
|
@ -214,13 +188,13 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
if (getMajorTickMarksAngle() > 0.0f && getMajorTickMarksLength() != 0.0f) {
|
||||
|
||||
float tickMarkAngle = getMajorTickMarksAngle();
|
||||
float angle = startAt - fmodf(startAt, tickMarkAngle) + tickMarkAngle;
|
||||
float angle = _startAt - fmodf(_startAt, tickMarkAngle) + tickMarkAngle;
|
||||
float angleInRadians = glm::radians(angle);
|
||||
float tickMarkLength = getMajorTickMarksLength();
|
||||
float startRadius = (tickMarkLength > 0.0f) ? innerRadius : outerRadius;
|
||||
float startRadius = (tickMarkLength > 0.0f) ? _innerRadius : _outerRadius;
|
||||
float endRadius = startRadius + tickMarkLength;
|
||||
|
||||
while (angle <= endAt) {
|
||||
while (angle <= _endAt) {
|
||||
angleInRadians = glm::radians(angle);
|
||||
|
||||
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
|
||||
|
@ -236,13 +210,13 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
if (getMinorTickMarksAngle() > 0.0f && getMinorTickMarksLength() != 0.0f) {
|
||||
|
||||
float tickMarkAngle = getMinorTickMarksAngle();
|
||||
float angle = startAt - fmodf(startAt, tickMarkAngle) + tickMarkAngle;
|
||||
float angle = _startAt - fmodf(_startAt, tickMarkAngle) + tickMarkAngle;
|
||||
float angleInRadians = glm::radians(angle);
|
||||
float tickMarkLength = getMinorTickMarksLength();
|
||||
float startRadius = (tickMarkLength > 0.0f) ? innerRadius : outerRadius;
|
||||
float startRadius = (tickMarkLength > 0.0f) ? _innerRadius : _outerRadius;
|
||||
float endRadius = startRadius + tickMarkLength;
|
||||
|
||||
while (angle <= endAt) {
|
||||
while (angle <= _endAt) {
|
||||
angleInRadians = glm::radians(angle);
|
||||
|
||||
glm::vec2 thisPointA(cosf(angleInRadians) * startRadius, sinf(angleInRadians) * startRadius);
|
||||
|
@ -269,17 +243,10 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
|
||||
geometryCache->renderVertices(batch, gpu::LINES, _minorTicksVerticesID);
|
||||
}
|
||||
|
||||
if (geometryChanged) {
|
||||
_lastStartAt = startAt;
|
||||
_lastEndAt = endAt;
|
||||
_lastInnerRadius = innerRadius;
|
||||
_lastOuterRadius = outerRadius;
|
||||
}
|
||||
}
|
||||
|
||||
const render::ShapeKey Circle3DOverlay::getShapeKey() {
|
||||
auto builder = render::ShapeKey::Builder().withoutCullFace();
|
||||
auto builder = render::ShapeKey::Builder().withoutCullFace().withUnlit();
|
||||
if (getAlpha() != 1.0f) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
|
@ -289,72 +256,102 @@ const render::ShapeKey Circle3DOverlay::getShapeKey() {
|
|||
return builder.build();
|
||||
}
|
||||
|
||||
template<typename T> T fromVariant(const QVariant& v, bool& valid) {
|
||||
valid = v.isValid();
|
||||
return qvariant_cast<T>(v);
|
||||
}
|
||||
|
||||
template<> xColor fromVariant(const QVariant& v, bool& valid) {
|
||||
return xColorFromVariant(v, valid);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
bool updateIfValid(const QVariantMap& properties, const char* key, T& output) {
|
||||
bool valid;
|
||||
T result = fromVariant<T>(properties[key], valid);
|
||||
if (!valid) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Don't signal updates if the value was already set
|
||||
if (result == output) {
|
||||
return false;
|
||||
}
|
||||
|
||||
output = result;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Multicast, many outputs
|
||||
template<typename T>
|
||||
bool updateIfValid(const QVariantMap& properties, const char* key, std::initializer_list<std::reference_wrapper<T>> outputs) {
|
||||
bool valid;
|
||||
T value = fromVariant<T>(properties[key], valid);
|
||||
if (!valid) {
|
||||
return false;
|
||||
}
|
||||
bool updated = false;
|
||||
for (T& output : outputs) {
|
||||
if (output != value) {
|
||||
output = value;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
return updated;
|
||||
}
|
||||
|
||||
// Multicast, multiple possible inputs, in order of preference
|
||||
template<typename T>
|
||||
bool updateIfValid(const QVariantMap& properties, const std::initializer_list<const char*> keys, T& output) {
|
||||
for (const char* key : keys) {
|
||||
if (updateIfValid<T>(properties, key, output)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void Circle3DOverlay::setProperties(const QVariantMap& properties) {
|
||||
Planar3DOverlay::setProperties(properties);
|
||||
_dirty |= updateIfValid<float>(properties, "alpha", { _innerStartAlpha, _innerEndAlpha, _outerStartAlpha, _outerEndAlpha });
|
||||
_dirty |= updateIfValid<float>(properties, "Alpha", { _innerStartAlpha, _innerEndAlpha, _outerStartAlpha, _outerEndAlpha });
|
||||
_dirty |= updateIfValid<float>(properties, "startAlpha", { _innerStartAlpha, _outerStartAlpha });
|
||||
_dirty |= updateIfValid<float>(properties, "endAlpha", { _innerEndAlpha, _outerEndAlpha });
|
||||
_dirty |= updateIfValid<float>(properties, "innerAlpha", { _innerStartAlpha, _innerEndAlpha });
|
||||
_dirty |= updateIfValid<float>(properties, "outerAlpha", { _outerStartAlpha, _outerEndAlpha });
|
||||
_dirty |= updateIfValid(properties, "innerStartAlpha", _innerStartAlpha);
|
||||
_dirty |= updateIfValid(properties, "innerEndAlpha", _innerEndAlpha);
|
||||
_dirty |= updateIfValid(properties, "outerStartAlpha", _outerStartAlpha);
|
||||
_dirty |= updateIfValid(properties, "outerEndAlpha", _outerEndAlpha);
|
||||
|
||||
QVariant startAt = properties["startAt"];
|
||||
if (startAt.isValid()) {
|
||||
setStartAt(startAt.toFloat());
|
||||
}
|
||||
_dirty |= updateIfValid<xColor>(properties, "color", { _innerStartColor, _innerEndColor, _outerStartColor, _outerEndColor });
|
||||
_dirty |= updateIfValid<xColor>(properties, "startColor", { _innerStartColor, _outerStartColor } );
|
||||
_dirty |= updateIfValid<xColor>(properties, "endColor", { _innerEndColor, _outerEndColor } );
|
||||
_dirty |= updateIfValid<xColor>(properties, "innerColor", { _innerStartColor, _innerEndColor } );
|
||||
_dirty |= updateIfValid<xColor>(properties, "outerColor", { _outerStartColor, _outerEndColor } );
|
||||
_dirty |= updateIfValid(properties, "innerStartColor", _innerStartColor);
|
||||
_dirty |= updateIfValid(properties, "innerEndColor", _innerEndColor);
|
||||
_dirty |= updateIfValid(properties, "outerStartColor", _outerStartColor);
|
||||
_dirty |= updateIfValid(properties, "outerEndColor", _outerEndColor);
|
||||
|
||||
QVariant endAt = properties["endAt"];
|
||||
if (endAt.isValid()) {
|
||||
setEndAt(endAt.toFloat());
|
||||
}
|
||||
_dirty |= updateIfValid(properties, "startAt", _startAt);
|
||||
_dirty |= updateIfValid(properties, "endAt", _endAt);
|
||||
|
||||
QVariant outerRadius = properties["radius"];
|
||||
if (!outerRadius.isValid()) {
|
||||
outerRadius = properties["outerRadius"];
|
||||
}
|
||||
if (outerRadius.isValid()) {
|
||||
setOuterRadius(outerRadius.toFloat());
|
||||
}
|
||||
_dirty |= updateIfValid(properties, { "radius", "outerRadius" }, _outerRadius);
|
||||
_dirty |= updateIfValid(properties, "innerRadius", _innerRadius);
|
||||
_dirty |= updateIfValid(properties, "hasTickMarks", _hasTickMarks);
|
||||
_dirty |= updateIfValid(properties, "majorTickMarksAngle", _majorTickMarksAngle);
|
||||
_dirty |= updateIfValid(properties, "minorTickMarksAngle", _minorTickMarksAngle);
|
||||
_dirty |= updateIfValid(properties, "majorTickMarksLength", _majorTickMarksLength);
|
||||
_dirty |= updateIfValid(properties, "minorTickMarksLength", _minorTickMarksLength);
|
||||
_dirty |= updateIfValid(properties, "majorTickMarksColor", _majorTickMarksColor);
|
||||
_dirty |= updateIfValid(properties, "minorTickMarksColor", _minorTickMarksColor);
|
||||
|
||||
QVariant innerRadius = properties["innerRadius"];
|
||||
if (innerRadius.isValid()) {
|
||||
setInnerRadius(innerRadius.toFloat());
|
||||
}
|
||||
|
||||
QVariant hasTickMarks = properties["hasTickMarks"];
|
||||
if (hasTickMarks.isValid()) {
|
||||
setHasTickMarks(hasTickMarks.toBool());
|
||||
}
|
||||
|
||||
QVariant majorTickMarksAngle = properties["majorTickMarksAngle"];
|
||||
if (majorTickMarksAngle.isValid()) {
|
||||
setMajorTickMarksAngle(majorTickMarksAngle.toFloat());
|
||||
}
|
||||
|
||||
QVariant minorTickMarksAngle = properties["minorTickMarksAngle"];
|
||||
if (minorTickMarksAngle.isValid()) {
|
||||
setMinorTickMarksAngle(minorTickMarksAngle.toFloat());
|
||||
}
|
||||
|
||||
QVariant majorTickMarksLength = properties["majorTickMarksLength"];
|
||||
if (majorTickMarksLength.isValid()) {
|
||||
setMajorTickMarksLength(majorTickMarksLength.toFloat());
|
||||
}
|
||||
|
||||
QVariant minorTickMarksLength = properties["minorTickMarksLength"];
|
||||
if (minorTickMarksLength.isValid()) {
|
||||
setMinorTickMarksLength(minorTickMarksLength.toFloat());
|
||||
}
|
||||
|
||||
bool valid;
|
||||
auto majorTickMarksColor = properties["majorTickMarksColor"];
|
||||
if (majorTickMarksColor.isValid()) {
|
||||
auto color = xColorFromVariant(majorTickMarksColor, valid);
|
||||
if (valid) {
|
||||
_majorTickMarksColor = color;
|
||||
}
|
||||
}
|
||||
|
||||
auto minorTickMarksColor = properties["minorTickMarksColor"];
|
||||
if (minorTickMarksColor.isValid()) {
|
||||
auto color = xColorFromVariant(majorTickMarksColor, valid);
|
||||
if (valid) {
|
||||
_minorTickMarksColor = color;
|
||||
}
|
||||
if (_innerStartAlpha < 1.0f || _innerEndAlpha < 1.0f || _outerStartAlpha < 1.0f || _outerEndAlpha < 1.0f) {
|
||||
// Force the alpha to 0.5, since we'll ignore it in the presence of these other values, but we need
|
||||
// it to be non-1 in order to get the right pipeline and non-0 in order to render at all.
|
||||
_alpha = 0.5f;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -59,28 +59,34 @@ public:
|
|||
virtual Circle3DOverlay* createClone() const override;
|
||||
|
||||
protected:
|
||||
float _startAt;
|
||||
float _endAt;
|
||||
float _outerRadius;
|
||||
float _innerRadius;
|
||||
bool _hasTickMarks;
|
||||
float _majorTickMarksAngle;
|
||||
float _minorTickMarksAngle;
|
||||
float _majorTickMarksLength;
|
||||
float _minorTickMarksLength;
|
||||
float _startAt { 0 };
|
||||
float _endAt { 360 };
|
||||
float _outerRadius { 1 };
|
||||
float _innerRadius { 0 };
|
||||
|
||||
xColor _innerStartColor;
|
||||
xColor _innerEndColor;
|
||||
xColor _outerStartColor;
|
||||
xColor _outerEndColor;
|
||||
float _innerStartAlpha;
|
||||
float _innerEndAlpha;
|
||||
float _outerStartAlpha;
|
||||
float _outerEndAlpha;
|
||||
|
||||
bool _hasTickMarks { false };
|
||||
float _majorTickMarksAngle { 0 };
|
||||
float _minorTickMarksAngle { 0 };
|
||||
float _majorTickMarksLength { 0 };
|
||||
float _minorTickMarksLength { 0 };
|
||||
xColor _majorTickMarksColor;
|
||||
xColor _minorTickMarksColor;
|
||||
|
||||
int _quadVerticesID;
|
||||
int _lineVerticesID;
|
||||
int _majorTicksVerticesID;
|
||||
int _minorTicksVerticesID;
|
||||
gpu::Primitive _solidPrimitive { gpu::TRIANGLE_FAN };
|
||||
int _quadVerticesID { 0 };
|
||||
int _lineVerticesID { 0 };
|
||||
int _majorTicksVerticesID { 0 };
|
||||
int _minorTicksVerticesID { 0 };
|
||||
|
||||
xColor _lastColor;
|
||||
float _lastStartAt;
|
||||
float _lastEndAt;
|
||||
float _lastOuterRadius;
|
||||
float _lastInnerRadius;
|
||||
bool _dirty { true };
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -55,12 +55,14 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
batch->setModelTransform(_transform);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, true, true);
|
||||
if (getIsDashedLine()) {
|
||||
// TODO: add support for color to renderDashedLine()
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, true, true);
|
||||
geometryCache->renderDashedLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
} else if (_glow > 0.0f) {
|
||||
geometryCache->renderGlowLine(*batch, _start, _end, colorv4, _glow, _glowWidth, _geometryCacheID);
|
||||
} else {
|
||||
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, true, true);
|
||||
geometryCache->renderLine(*batch, _start, _end, colorv4, _geometryCacheID);
|
||||
}
|
||||
}
|
||||
|
@ -68,7 +70,7 @@ void Line3DOverlay::render(RenderArgs* args) {
|
|||
|
||||
const render::ShapeKey Line3DOverlay::getShapeKey() {
|
||||
auto builder = render::ShapeKey::Builder().withOwnPipeline();
|
||||
if (getAlpha() != 1.0f) {
|
||||
if (getAlpha() != 1.0f || _glow > 0.0f) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
return builder.build();
|
||||
|
@ -94,6 +96,19 @@ void Line3DOverlay::setProperties(const QVariantMap& properties) {
|
|||
if (end.isValid()) {
|
||||
setEnd(vec3FromVariant(end));
|
||||
}
|
||||
|
||||
auto glow = properties["glow"];
|
||||
if (glow.isValid()) {
|
||||
setGlow(glow.toFloat());
|
||||
if (_glow > 0.0f) {
|
||||
_alpha = 0.5f;
|
||||
}
|
||||
}
|
||||
|
||||
auto glowWidth = properties["glow"];
|
||||
if (glowWidth.isValid()) {
|
||||
setGlow(glowWidth.toFloat());
|
||||
}
|
||||
}
|
||||
|
||||
QVariant Line3DOverlay::getProperty(const QString& property) {
|
||||
|
|
|
@ -30,10 +30,14 @@ public:
|
|||
// getters
|
||||
const glm::vec3& getStart() const { return _start; }
|
||||
const glm::vec3& getEnd() const { return _end; }
|
||||
const float& getGlow() const { return _glow; }
|
||||
const float& getGlowWidth() const { return _glowWidth; }
|
||||
|
||||
// setters
|
||||
void setStart(const glm::vec3& start) { _start = start; }
|
||||
void setEnd(const glm::vec3& end) { _end = end; }
|
||||
void setGlow(const float& glow) { _glow = glow; }
|
||||
void setGlowWidth(const float& glowWidth) { _glowWidth = glowWidth; }
|
||||
|
||||
void setProperties(const QVariantMap& properties) override;
|
||||
QVariant getProperty(const QString& property) override;
|
||||
|
@ -43,6 +47,8 @@ public:
|
|||
protected:
|
||||
glm::vec3 _start;
|
||||
glm::vec3 _end;
|
||||
float _glow { 0.0 };
|
||||
float _glowWidth { 0.0 };
|
||||
int _geometryCacheID;
|
||||
};
|
||||
|
||||
|
|
|
@ -19,8 +19,7 @@ QString const ModelOverlay::TYPE = "model";
|
|||
|
||||
ModelOverlay::ModelOverlay()
|
||||
: _model(std::make_shared<Model>(std::make_shared<Rig>())),
|
||||
_modelTextures(QVariantMap()),
|
||||
_updateModel(false)
|
||||
_modelTextures(QVariantMap())
|
||||
{
|
||||
_model->init();
|
||||
_isLoaded = false;
|
||||
|
@ -43,9 +42,12 @@ ModelOverlay::ModelOverlay(const ModelOverlay* modelOverlay) :
|
|||
void ModelOverlay::update(float deltatime) {
|
||||
if (_updateModel) {
|
||||
_updateModel = false;
|
||||
|
||||
_model->setSnapModelToCenter(true);
|
||||
_model->setScale(getDimensions());
|
||||
if (_scaleToFit) {
|
||||
_model->setScaleToFit(true, getScale() * getDimensions());
|
||||
} else {
|
||||
_model->setScale(getScale());
|
||||
}
|
||||
_model->setRotation(getRotation());
|
||||
_model->setTranslation(getPosition());
|
||||
_model->setURL(_url);
|
||||
|
@ -85,41 +87,47 @@ void ModelOverlay::render(RenderArgs* args) {
|
|||
}
|
||||
|
||||
void ModelOverlay::setProperties(const QVariantMap& properties) {
|
||||
auto position = getPosition();
|
||||
auto rotation = getRotation();
|
||||
auto scale = getDimensions();
|
||||
|
||||
Volume3DOverlay::setProperties(properties);
|
||||
|
||||
if (position != getPosition() || rotation != getRotation()) {
|
||||
auto origPosition = getPosition();
|
||||
auto origRotation = getRotation();
|
||||
auto origDimensions = getDimensions();
|
||||
auto origScale = getScale();
|
||||
|
||||
Base3DOverlay::setProperties(properties);
|
||||
|
||||
auto scale = properties["scale"];
|
||||
if (scale.isValid()) {
|
||||
setScale(vec3FromVariant(scale));
|
||||
}
|
||||
|
||||
auto dimensions = properties["dimensions"];
|
||||
if (dimensions.isValid()) {
|
||||
_scaleToFit = true;
|
||||
setDimensions(vec3FromVariant(dimensions));
|
||||
} else if (scale.isValid()) {
|
||||
// if "scale" property is set but "dimentions" is not.
|
||||
// do NOT scale to fit.
|
||||
_scaleToFit = false;
|
||||
}
|
||||
|
||||
if (origPosition != getPosition() || origRotation != getRotation() || origDimensions != getDimensions() || origScale != getScale()) {
|
||||
_updateModel = true;
|
||||
}
|
||||
|
||||
if (scale != getDimensions()) {
|
||||
auto newScale = getDimensions();
|
||||
if (newScale.x <= 0 || newScale.y <= 0 || newScale.z <= 0) {
|
||||
setDimensions(scale);
|
||||
} else {
|
||||
_model->setScaleToFit(true, getDimensions());
|
||||
_updateModel = true;
|
||||
}
|
||||
}
|
||||
|
||||
auto urlValue = properties["url"];
|
||||
if (urlValue.isValid() && urlValue.canConvert<QString>()) {
|
||||
_url = urlValue.toString();
|
||||
_updateModel = true;
|
||||
_isLoaded = false;
|
||||
}
|
||||
|
||||
|
||||
auto texturesValue = properties["textures"];
|
||||
if (texturesValue.isValid() && texturesValue.canConvert(QVariant::Map)) {
|
||||
QVariantMap textureMap = texturesValue.toMap();
|
||||
foreach(const QString& key, textureMap.keys()) {
|
||||
|
||||
|
||||
QUrl newTextureURL = textureMap[key].toUrl();
|
||||
qDebug() << "Updating texture named" << key << "to texture at URL" << newTextureURL;
|
||||
|
||||
|
||||
QMetaObject::invokeMethod(_model.get(), "setTextureWithNameToURL", Qt::AutoConnection,
|
||||
Q_ARG(const QString&, key),
|
||||
Q_ARG(const QUrl&, newTextureURL));
|
||||
|
@ -133,8 +141,11 @@ QVariant ModelOverlay::getProperty(const QString& property) {
|
|||
if (property == "url") {
|
||||
return _url.toString();
|
||||
}
|
||||
if (property == "dimensions" || property == "scale" || property == "size") {
|
||||
return vec3toVariant(_model->getScaleToFitDimensions());
|
||||
if (property == "dimensions" || property == "size") {
|
||||
return vec3toVariant(getDimensions());
|
||||
}
|
||||
if (property == "scale") {
|
||||
return vec3toVariant(getScale());
|
||||
}
|
||||
if (property == "textures") {
|
||||
if (_modelTextures.size() > 0) {
|
||||
|
@ -153,14 +164,14 @@ QVariant ModelOverlay::getProperty(const QString& property) {
|
|||
|
||||
bool ModelOverlay::findRayIntersection(const glm::vec3& origin, const glm::vec3& direction,
|
||||
float& distance, BoxFace& face, glm::vec3& surfaceNormal) {
|
||||
|
||||
|
||||
QString subMeshNameTemp;
|
||||
return _model->findRayIntersectionAgainstSubMeshes(origin, direction, distance, face, surfaceNormal, subMeshNameTemp);
|
||||
}
|
||||
|
||||
bool ModelOverlay::findRayIntersectionExtraInfo(const glm::vec3& origin, const glm::vec3& direction,
|
||||
float& distance, BoxFace& face, glm::vec3& surfaceNormal, QString& extraInfo) {
|
||||
|
||||
|
||||
return _model->findRayIntersectionAgainstSubMeshes(origin, direction, distance, face, surfaceNormal, extraInfo);
|
||||
}
|
||||
|
||||
|
|
|
@ -43,9 +43,10 @@ private:
|
|||
|
||||
ModelPointer _model;
|
||||
QVariantMap _modelTextures;
|
||||
|
||||
|
||||
QUrl _url;
|
||||
bool _updateModel;
|
||||
bool _updateModel = { false };
|
||||
bool _scaleToFit = { false };
|
||||
};
|
||||
|
||||
#endif // hifi_ModelOverlay_h
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
#include "Image3DOverlay.h"
|
||||
#include "Circle3DOverlay.h"
|
||||
#include "Cube3DOverlay.h"
|
||||
#include "Shape3DOverlay.h"
|
||||
#include "ImageOverlay.h"
|
||||
#include "Line3DOverlay.h"
|
||||
#include "LocalModelsOverlay.h"
|
||||
|
@ -157,6 +158,8 @@ unsigned int Overlays::addOverlay(const QString& type, const QVariant& propertie
|
|||
thisOverlay = std::make_shared<TextOverlay>();
|
||||
} else if (type == Text3DOverlay::TYPE) {
|
||||
thisOverlay = std::make_shared<Text3DOverlay>();
|
||||
} else if (type == Shape3DOverlay::TYPE) {
|
||||
thisOverlay = std::make_shared<Shape3DOverlay>();
|
||||
} else if (type == Cube3DOverlay::TYPE) {
|
||||
thisOverlay = std::make_shared<Cube3DOverlay>();
|
||||
} else if (type == Sphere3DOverlay::TYPE) {
|
||||
|
|
130
interface/src/ui/overlays/Shape3DOverlay.cpp
Normal file
130
interface/src/ui/overlays/Shape3DOverlay.cpp
Normal file
|
@ -0,0 +1,130 @@
|
|||
//
|
||||
// Shape3DOverlay.cpp
|
||||
// interface/src/ui/overlays
|
||||
//
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// include this before QGLWidget, which includes an earlier version of OpenGL
|
||||
#include "Shape3DOverlay.h"
|
||||
|
||||
#include <SharedUtil.h>
|
||||
#include <StreamUtils.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <DependencyManager.h>
|
||||
|
||||
QString const Shape3DOverlay::TYPE = "shape";
|
||||
|
||||
Shape3DOverlay::Shape3DOverlay(const Shape3DOverlay* Shape3DOverlay) :
|
||||
Volume3DOverlay(Shape3DOverlay)
|
||||
{
|
||||
}
|
||||
|
||||
void Shape3DOverlay::render(RenderArgs* args) {
|
||||
if (!_visible) {
|
||||
return; // do nothing if we're not visible
|
||||
}
|
||||
|
||||
float alpha = getAlpha();
|
||||
xColor color = getColor();
|
||||
const float MAX_COLOR = 255.0f;
|
||||
glm::vec4 cubeColor(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
|
||||
|
||||
// TODO: handle registration point??
|
||||
glm::vec3 position = getPosition();
|
||||
glm::vec3 dimensions = getDimensions();
|
||||
glm::quat rotation = getRotation();
|
||||
|
||||
auto batch = args->_batch;
|
||||
|
||||
if (batch) {
|
||||
Transform transform;
|
||||
transform.setTranslation(position);
|
||||
transform.setRotation(rotation);
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto pipeline = args->_pipeline;
|
||||
if (!pipeline) {
|
||||
pipeline = _isSolid ? geometryCache->getShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
}
|
||||
|
||||
transform.setScale(dimensions);
|
||||
batch->setModelTransform(transform);
|
||||
if (_isSolid) {
|
||||
geometryCache->renderSolidShapeInstance(*batch, _shape, cubeColor, pipeline);
|
||||
} else {
|
||||
geometryCache->renderWireShapeInstance(*batch, _shape, cubeColor, pipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const render::ShapeKey Shape3DOverlay::getShapeKey() {
|
||||
auto builder = render::ShapeKey::Builder();
|
||||
if (getAlpha() != 1.0f) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
if (!getIsSolid()) {
|
||||
builder.withUnlit().withDepthBias();
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
Shape3DOverlay* Shape3DOverlay::createClone() const {
|
||||
return new Shape3DOverlay(this);
|
||||
}
|
||||
|
||||
|
||||
static const std::array<QString, GeometryCache::Shape::NUM_SHAPES> shapeStrings { {
|
||||
"Line",
|
||||
"Triangle",
|
||||
"Quad",
|
||||
"Hexagon",
|
||||
"Octagon",
|
||||
"Circle",
|
||||
"Cube",
|
||||
"Sphere",
|
||||
"Tetrahedron",
|
||||
"Octahedron",
|
||||
"Dodecahedron",
|
||||
"Icosahedron",
|
||||
"Torus",
|
||||
"Cone",
|
||||
"Cylinder"
|
||||
} };
|
||||
|
||||
|
||||
void Shape3DOverlay::setProperties(const QVariantMap& properties) {
|
||||
Volume3DOverlay::setProperties(properties);
|
||||
|
||||
auto shape = properties["shape"];
|
||||
if (shape.isValid()) {
|
||||
const QString shapeStr = shape.toString();
|
||||
for (size_t i = 0; i < shapeStrings.size(); ++i) {
|
||||
if (shapeStr == shapeStrings[i]) {
|
||||
this->_shape = static_cast<GeometryCache::Shape>(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
auto borderSize = properties["borderSize"];
|
||||
|
||||
if (borderSize.isValid()) {
|
||||
float value = borderSize.toFloat();
|
||||
setBorderSize(value);
|
||||
}
|
||||
}
|
||||
|
||||
QVariant Shape3DOverlay::getProperty(const QString& property) {
|
||||
if (property == "borderSize") {
|
||||
return _borderSize;
|
||||
}
|
||||
|
||||
if (property == "shape") {
|
||||
return shapeStrings[_shape];
|
||||
}
|
||||
|
||||
return Volume3DOverlay::getProperty(property);
|
||||
}
|
46
interface/src/ui/overlays/Shape3DOverlay.h
Normal file
46
interface/src/ui/overlays/Shape3DOverlay.h
Normal file
|
@ -0,0 +1,46 @@
|
|||
//
|
||||
// Shape3DOverlay.h
|
||||
// interface/src/ui/overlays
|
||||
//
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_Shape3DOverlay_h
|
||||
#define hifi_Shape3DOverlay_h
|
||||
|
||||
#include "Volume3DOverlay.h"
|
||||
|
||||
#include <GeometryCache.h>
|
||||
|
||||
class Shape3DOverlay : public Volume3DOverlay {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
static QString const TYPE;
|
||||
virtual QString getType() const override { return TYPE; }
|
||||
|
||||
Shape3DOverlay() {}
|
||||
Shape3DOverlay(const Shape3DOverlay* Shape3DOverlay);
|
||||
|
||||
virtual void render(RenderArgs* args) override;
|
||||
virtual const render::ShapeKey getShapeKey() override;
|
||||
|
||||
virtual Shape3DOverlay* createClone() const override;
|
||||
|
||||
float getBorderSize() const { return _borderSize; }
|
||||
|
||||
void setBorderSize(float value) { _borderSize = value; }
|
||||
|
||||
void setProperties(const QVariantMap& properties) override;
|
||||
QVariant getProperty(const QString& property) override;
|
||||
|
||||
private:
|
||||
float _borderSize;
|
||||
GeometryCache::Shape _shape { GeometryCache::Hexagon };
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_Shape3DOverlay_h
|
|
@ -58,7 +58,7 @@ void Sphere3DOverlay::render(RenderArgs* args) {
|
|||
}
|
||||
|
||||
const render::ShapeKey Sphere3DOverlay::getShapeKey() {
|
||||
auto builder = render::ShapeKey::Builder().withOwnPipeline();
|
||||
auto builder = render::ShapeKey::Builder();
|
||||
if (getAlpha() != 1.0f) {
|
||||
builder.withTranslucent();
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ AABox Volume3DOverlay::getBounds() const {
|
|||
auto extents = Extents{_localBoundingBox};
|
||||
extents.rotate(getRotation());
|
||||
extents.shiftBy(getPosition());
|
||||
|
||||
|
||||
return AABox(extents);
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,7 @@ void Volume3DOverlay::setProperties(const QVariantMap& properties) {
|
|||
|
||||
auto dimensions = properties["dimensions"];
|
||||
|
||||
// if "dimensions" property was not there, check to see if they included aliases: scale
|
||||
// if "dimensions" property was not there, check to see if they included aliases: scale, size
|
||||
if (!dimensions.isValid()) {
|
||||
dimensions = properties["scale"];
|
||||
if (!dimensions.isValid()) {
|
||||
|
@ -57,7 +57,7 @@ bool Volume3DOverlay::findRayIntersection(const glm::vec3& origin, const glm::ve
|
|||
// extents is the entity relative, scaled, centered extents of the entity
|
||||
glm::mat4 worldToEntityMatrix;
|
||||
_transform.getInverseMatrix(worldToEntityMatrix);
|
||||
|
||||
|
||||
glm::vec3 overlayFrameOrigin = glm::vec3(worldToEntityMatrix * glm::vec4(origin, 1.0f));
|
||||
glm::vec3 overlayFrameDirection = glm::vec3(worldToEntityMatrix * glm::vec4(direction, 0.0f));
|
||||
|
||||
|
|
|
@ -15,13 +15,13 @@
|
|||
|
||||
class Volume3DOverlay : public Base3DOverlay {
|
||||
Q_OBJECT
|
||||
|
||||
|
||||
public:
|
||||
Volume3DOverlay() {}
|
||||
Volume3DOverlay(const Volume3DOverlay* volume3DOverlay);
|
||||
|
||||
|
||||
virtual AABox getBounds() const override;
|
||||
|
||||
|
||||
const glm::vec3& getDimensions() const { return _localBoundingBox.getDimensions(); }
|
||||
void setDimensions(float value) { _localBoundingBox.setBox(glm::vec3(-value / 2.0f), value); }
|
||||
void setDimensions(const glm::vec3& value) { _localBoundingBox.setBox(-value / 2.0f, value); }
|
||||
|
@ -29,13 +29,13 @@ public:
|
|||
void setProperties(const QVariantMap& properties) override;
|
||||
QVariant getProperty(const QString& property) override;
|
||||
|
||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
|
||||
BoxFace& face, glm::vec3& surfaceNormal) override;
|
||||
|
||||
virtual bool findRayIntersection(const glm::vec3& origin, const glm::vec3& direction, float& distance,
|
||||
BoxFace& face, glm::vec3& surfaceNormal) override;
|
||||
|
||||
protected:
|
||||
// Centered local bounding box
|
||||
AABox _localBoundingBox{ vec3(0.0f), 1.0f };
|
||||
};
|
||||
|
||||
|
||||
|
||||
#endif // hifi_Volume3DOverlay_h
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
set(TARGET_NAME audio-client)
|
||||
setup_hifi_library(Network Multimedia)
|
||||
link_hifi_libraries(audio)
|
||||
link_hifi_libraries(audio plugins)
|
||||
|
||||
# append audio includes to our list of includes to bubble
|
||||
target_include_directories(${TARGET_NAME} PUBLIC "${HIFI_LIBRARY_DIR}/audio/src")
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
#include <sys/stat.h>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/norm.hpp>
|
||||
#include <glm/gtx/vector_angle.hpp>
|
||||
|
||||
#ifdef __APPLE__
|
||||
#include <CoreAudio/AudioHardware.h>
|
||||
|
@ -34,6 +36,8 @@
|
|||
#include <QtMultimedia/QAudioOutput>
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
#include <PositionalAudioStream.h>
|
||||
#include <SettingHandle.h>
|
||||
|
@ -41,8 +45,6 @@
|
|||
#include <UUID.h>
|
||||
#include <Transform.h>
|
||||
|
||||
#include "AudioInjector.h"
|
||||
#include "AudioConstants.h"
|
||||
#include "PositionalAudioStream.h"
|
||||
#include "AudioClientLogging.h"
|
||||
|
||||
|
@ -102,6 +104,7 @@ AudioClient::AudioClient() :
|
|||
_reverbOptions(&_scriptReverbOptions),
|
||||
_inputToNetworkResampler(NULL),
|
||||
_networkToOutputResampler(NULL),
|
||||
_audioLimiter(AudioConstants::SAMPLE_RATE, AudioConstants::STEREO),
|
||||
_outgoingAvatarAudioSequenceNumber(0),
|
||||
_audioOutputIODevice(_receivedAudioStream, this),
|
||||
_stats(&_receivedAudioStream),
|
||||
|
@ -133,10 +136,15 @@ AudioClient::AudioClient() :
|
|||
packetReceiver.registerListener(PacketType::MixedAudio, this, "handleAudioDataPacket");
|
||||
packetReceiver.registerListener(PacketType::NoisyMute, this, "handleNoisyMutePacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||
}
|
||||
|
||||
AudioClient::~AudioClient() {
|
||||
stop();
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::reset() {
|
||||
|
@ -503,6 +511,53 @@ void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> me
|
|||
emit muteEnvironmentRequested(position, radius);
|
||||
}
|
||||
|
||||
void AudioClient::negotiateAudioFormat() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto negotiateFormatPacket = NLPacket::create(PacketType::NegotiateAudioFormat);
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
quint8 numberOfCodecs = (quint8)codecPlugins.size();
|
||||
negotiateFormatPacket->writePrimitive(numberOfCodecs);
|
||||
for (auto& plugin : codecPlugins) {
|
||||
auto codecName = plugin->getName();
|
||||
negotiateFormatPacket->writeString(codecName);
|
||||
}
|
||||
|
||||
// grab our audio mixer from the NodeList, if it exists
|
||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
|
||||
if (audioMixer) {
|
||||
// send off this mute packet
|
||||
nodeList->sendPacket(std::move(negotiateFormatPacket), *audioMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message) {
|
||||
_selectedCodecName = message->readString();
|
||||
|
||||
qDebug() << "Selected Codec:" << _selectedCodecName;
|
||||
|
||||
// release any old codec encoder/decoder first...
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
_codec = nullptr;
|
||||
}
|
||||
_receivedAudioStream.cleanupCodec();
|
||||
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& plugin : codecPlugins) {
|
||||
if (_selectedCodecName == plugin->getName()) {
|
||||
_codec = plugin;
|
||||
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
|
||||
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
|
||||
qDebug() << "Selected Codec Plugin:" << _codec.get();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
|
||||
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
|
||||
return deviceInfo.deviceName();
|
||||
|
@ -770,7 +825,16 @@ void AudioClient::handleAudioInput() {
|
|||
audioTransform.setTranslation(_positionGetter());
|
||||
audioTransform.setRotation(_orientationGetter());
|
||||
// FIXME find a way to properly handle both playback audio and user audio concurrently
|
||||
emitAudioPacket(networkAudioSamples, numNetworkBytes, _outgoingAvatarAudioSequenceNumber, audioTransform, packetType);
|
||||
|
||||
QByteArray decocedBuffer(reinterpret_cast<char*>(networkAudioSamples), numNetworkBytes);
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(decocedBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = decocedBuffer;
|
||||
}
|
||||
|
||||
emitAudioPacket(encodedBuffer.constData(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, packetType, _selectedCodecName);
|
||||
_stats.sentPacket();
|
||||
}
|
||||
}
|
||||
|
@ -779,23 +843,114 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
|
|||
Transform audioTransform;
|
||||
audioTransform.setTranslation(_positionGetter());
|
||||
audioTransform.setRotation(_orientationGetter());
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audio, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audio;
|
||||
}
|
||||
|
||||
// FIXME check a flag to see if we should echo audio?
|
||||
emitAudioPacket(audio.data(), audio.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, PacketType::MicrophoneAudioWithEcho);
|
||||
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, PacketType::MicrophoneAudioWithEcho, _selectedCodecName);
|
||||
}
|
||||
|
||||
void AudioClient::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
|
||||
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
|
||||
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
|
||||
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
|
||||
void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
|
||||
|
||||
memset(_hrtfBuffer, 0, sizeof(_hrtfBuffer));
|
||||
QVector<AudioInjector*> injectorsToRemove;
|
||||
static const float INT16_TO_FLOAT_SCALE_FACTOR = 1/32768.0f;
|
||||
|
||||
bool injectorsHaveData = false;
|
||||
|
||||
// lock the injector vector
|
||||
Lock lock(_injectorsMutex);
|
||||
|
||||
for (AudioInjector* injector : getActiveLocalAudioInjectors()) {
|
||||
if (injector->getLocalBuffer()) {
|
||||
|
||||
qint64 samplesToRead = injector->isStereo() ?
|
||||
AudioConstants::NETWORK_FRAME_BYTES_STEREO :
|
||||
AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL;
|
||||
|
||||
// get one frame from the injector (mono or stereo)
|
||||
memset(_scratchBuffer, 0, sizeof(_scratchBuffer));
|
||||
if (0 < injector->getLocalBuffer()->readData((char*)_scratchBuffer, samplesToRead)) {
|
||||
|
||||
injectorsHaveData = true;
|
||||
|
||||
if (injector->isStereo() ) {
|
||||
for(int i=0; i<AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
|
||||
_hrtfBuffer[i] += (float)(_scratchBuffer[i]) * INT16_TO_FLOAT_SCALE_FACTOR;
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
// calculate distance, gain and azimuth for hrtf
|
||||
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = gainForSource(distance, injector->getVolume());
|
||||
float azimuth = azimuthForSource(relativePosition);
|
||||
|
||||
injector->getLocalHRTF().render(_scratchBuffer, _hrtfBuffer, 1, azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
qDebug() << "injector has no more data, marking finished for removal";
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
qDebug() << "injector has no local buffer, marking as finished for removal";
|
||||
injector->finishLocalInjection();
|
||||
injectorsToRemove.append(injector);
|
||||
}
|
||||
}
|
||||
|
||||
if(injectorsHaveData) {
|
||||
|
||||
// mix network into the hrtfBuffer
|
||||
for(int i=0; i<AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
|
||||
_hrtfBuffer[i] += (float)(inputBuffer[i]) * INT16_TO_FLOAT_SCALE_FACTOR;
|
||||
}
|
||||
|
||||
// now, use limiter to write back to the inputBuffer
|
||||
_audioLimiter.render(_hrtfBuffer, inputBuffer, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
|
||||
}
|
||||
|
||||
for(AudioInjector* injector : injectorsToRemove) {
|
||||
qDebug() << "removing injector";
|
||||
getActiveLocalAudioInjectors().removeOne(injector);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::processReceivedSamples(const QByteArray& decodedBuffer, QByteArray& outputBuffer) {
|
||||
const int numDecodecSamples = decodedBuffer.size() / sizeof(int16_t);
|
||||
const int numDeviceOutputSamples = _outputFrameSize;
|
||||
|
||||
Q_ASSERT(_outputFrameSize == numDecodecSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
|
||||
/ (_desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount()));
|
||||
|
||||
outputBuffer.resize(numDeviceOutputSamples * sizeof(int16_t));
|
||||
|
||||
const int16_t* receivedSamples = reinterpret_cast<const int16_t*>(inputBuffer.data());
|
||||
const int16_t* decodedSamples;
|
||||
int16_t* outputSamples = reinterpret_cast<int16_t*>(outputBuffer.data());
|
||||
QByteArray decodedBufferCopy = decodedBuffer;
|
||||
assert(decodedBuffer.size() == AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
|
||||
if(getActiveLocalAudioInjectors().size() > 0) {
|
||||
mixLocalAudioInjectors((int16_t*)decodedBufferCopy.data());
|
||||
decodedSamples = reinterpret_cast<const int16_t*>(decodedBufferCopy.data());
|
||||
} else {
|
||||
decodedSamples = reinterpret_cast<const int16_t*>(decodedBuffer.data());
|
||||
}
|
||||
|
||||
// copy the packet from the RB to the output
|
||||
possibleResampling(_networkToOutputResampler, receivedSamples, outputSamples,
|
||||
numNetworkOutputSamples, numDeviceOutputSamples,
|
||||
possibleResampling(_networkToOutputResampler, decodedSamples, outputSamples,
|
||||
numDecodecSamples, numDeviceOutputSamples,
|
||||
_desiredOutputFormat, _outputFormat);
|
||||
|
||||
// apply stereo reverb at the listener, to the received audio
|
||||
|
@ -852,36 +1007,25 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
|
|||
|
||||
|
||||
bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
|
||||
if (injector->getLocalBuffer()) {
|
||||
QAudioFormat localFormat = _desiredOutputFormat;
|
||||
localFormat.setChannelCount(isStereo ? 2 : 1);
|
||||
Lock lock(_injectorsMutex);
|
||||
if (injector->getLocalBuffer() && _audioInput ) {
|
||||
// just add it to the vector of active local injectors, if
|
||||
// not already there.
|
||||
// Since this is invoked with invokeMethod, there _should_ be
|
||||
// no reason to lock access to the vector of injectors.
|
||||
if (!_activeLocalAudioInjectors.contains(injector)) {
|
||||
qDebug() << "adding new injector";
|
||||
_activeLocalAudioInjectors.append(injector);
|
||||
} else {
|
||||
qDebug() << "injector exists in active list already";
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
QAudioOutput* localOutput = new QAudioOutput(getNamedAudioDeviceForMode(QAudio::AudioOutput, _outputAudioDeviceName),
|
||||
localFormat,
|
||||
injector->getLocalBuffer());
|
||||
|
||||
// move the localOutput to the same thread as the local injector buffer
|
||||
localOutput->moveToThread(injector->getLocalBuffer()->thread());
|
||||
|
||||
// have it be stopped when that local buffer is about to close
|
||||
// We don't want to stop this localOutput and injector whenever this AudioClient singleton goes idle,
|
||||
// only when the localOutput does. But the connection is to localOutput, so that it happens on the right thread.
|
||||
connect(localOutput, &QAudioOutput::stateChanged, localOutput, [=](QAudio::State state) {
|
||||
if (state == QAudio::IdleState) {
|
||||
localOutput->stop();
|
||||
injector->stop();
|
||||
}
|
||||
});
|
||||
|
||||
connect(injector->getLocalBuffer(), &QIODevice::aboutToClose, localOutput, &QAudioOutput::stop);
|
||||
|
||||
qCDebug(audioclient) << "Starting QAudioOutput for local injector" << localOutput;
|
||||
|
||||
localOutput->start(injector->getLocalBuffer());
|
||||
return localOutput->state() == QAudio::ActiveState;
|
||||
} else {
|
||||
// no local buffer or audio
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void AudioClient::outputFormatChanged() {
|
||||
|
@ -1134,18 +1278,61 @@ float AudioClient::getAudioOutputMsecsUnplayed() const {
|
|||
return msecsAudioOutputUnplayed;
|
||||
}
|
||||
|
||||
|
||||
float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
|
||||
// copied from AudioMixer, more or less
|
||||
glm::quat inverseOrientation = glm::inverse(_orientationGetter());
|
||||
|
||||
// compute sample delay for the 2 ears to create phase panning
|
||||
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
|
||||
|
||||
// project the rotated source position vector onto x-y plane
|
||||
rotatedSourcePosition.y = 0.0f;
|
||||
|
||||
static const float SOURCE_DISTANCE_THRESHOLD = 1e-30f;
|
||||
|
||||
if (glm::length2(rotatedSourcePosition) > SOURCE_DISTANCE_THRESHOLD) {
|
||||
|
||||
// produce an oriented angle about the y-axis
|
||||
return glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f), glm::normalize(rotatedSourcePosition), glm::vec3(0.0f, -1.0f, 0.0f));
|
||||
} else {
|
||||
|
||||
// no azimuth if they are in same spot
|
||||
return 0.0f;
|
||||
}
|
||||
}
|
||||
|
||||
float AudioClient::gainForSource(float distance, float volume) {
|
||||
|
||||
const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f;
|
||||
|
||||
// I'm assuming that the AudioMixer's getting of the stream's attenuation
|
||||
// factor is basically same as getting volume
|
||||
float gain = volume;
|
||||
|
||||
// attenuate based on distance
|
||||
if (distance >= ATTENUATION_BEGINS_AT_DISTANCE) {
|
||||
gain /= (distance/ATTENUATION_BEGINS_AT_DISTANCE); // attenuation = -6dB * log2(distance)
|
||||
}
|
||||
|
||||
return gain;
|
||||
}
|
||||
|
||||
qint64 AudioClient::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||
auto samplesRequested = maxSize / sizeof(int16_t);
|
||||
int samplesPopped;
|
||||
int bytesWritten;
|
||||
|
||||
|
||||
if ((samplesPopped = _receivedAudioStream.popSamples((int)samplesRequested, false)) > 0) {
|
||||
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
|
||||
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
|
||||
bytesWritten = samplesPopped * sizeof(int16_t);
|
||||
} else {
|
||||
// nothing on network, don't grab anything from injectors, and just
|
||||
// return 0s
|
||||
memset(data, 0, maxSize);
|
||||
bytesWritten = maxSize;
|
||||
|
||||
}
|
||||
|
||||
int bytesAudioOutputUnplayed = _audio->_audioOutput->bufferSize() - _audio->_audioOutput->bytesFree();
|
||||
|
@ -1185,6 +1372,13 @@ void AudioClient::loadSettings() {
|
|||
windowSecondsForDesiredCalcOnTooManyStarves.get());
|
||||
_receivedAudioStream.setWindowSecondsForDesiredReduction(windowSecondsForDesiredReduction.get());
|
||||
_receivedAudioStream.setRepetitionWithFade(repetitionWithFade.get());
|
||||
|
||||
qDebug() << "---- Initializing Audio Client ----";
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& plugin : codecPlugins) {
|
||||
qDebug() << "Codec available:" << plugin->getName();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void AudioClient::saveSettings() {
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <fstream>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <mutex>
|
||||
|
||||
#include <QtCore/qsystemdetection.h>
|
||||
#include <QtCore/QByteArray>
|
||||
|
@ -37,11 +38,17 @@
|
|||
#include <SettingHandle.h>
|
||||
#include <Sound.h>
|
||||
#include <StDev.h>
|
||||
#include <AudioHRTF.h>
|
||||
#include <AudioSRC.h>
|
||||
#include <AudioInjector.h>
|
||||
#include <AudioReverb.h>
|
||||
#include <AudioLimiter.h>
|
||||
#include <AudioConstants.h>
|
||||
|
||||
#include <plugins/CodecPlugin.h>
|
||||
|
||||
#include "AudioIOStats.h"
|
||||
#include "AudioNoiseGate.h"
|
||||
#include "AudioSRC.h"
|
||||
#include "AudioReverb.h"
|
||||
|
||||
#ifdef _WIN32
|
||||
#pragma warning( push )
|
||||
|
@ -77,6 +84,9 @@ public:
|
|||
using AudioPositionGetter = std::function<glm::vec3()>;
|
||||
using AudioOrientationGetter = std::function<glm::quat()>;
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
class AudioOutputIODevice : public QIODevice {
|
||||
public:
|
||||
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream, AudioClient* audio) :
|
||||
|
@ -86,7 +96,6 @@ public:
|
|||
void stop() { close(); }
|
||||
qint64 readData(char * data, qint64 maxSize);
|
||||
qint64 writeData(const char * data, qint64 maxSize) { return 0; }
|
||||
|
||||
int getRecentUnfulfilledReads() { int unfulfilledReads = _unfulfilledReads; _unfulfilledReads = 0; return unfulfilledReads; }
|
||||
private:
|
||||
MixedProcessedAudioStream& _receivedAudioStream;
|
||||
|
@ -94,6 +103,8 @@ public:
|
|||
int _unfulfilledReads;
|
||||
};
|
||||
|
||||
void negotiateAudioFormat();
|
||||
|
||||
const MixedProcessedAudioStream& getReceivedAudioStream() const { return _receivedAudioStream; }
|
||||
MixedProcessedAudioStream& getReceivedAudioStream() { return _receivedAudioStream; }
|
||||
|
||||
|
@ -124,6 +135,8 @@ public:
|
|||
|
||||
void setPositionGetter(AudioPositionGetter positionGetter) { _positionGetter = positionGetter; }
|
||||
void setOrientationGetter(AudioOrientationGetter orientationGetter) { _orientationGetter = orientationGetter; }
|
||||
|
||||
QVector<AudioInjector*>& getActiveLocalAudioInjectors() { return _activeLocalAudioInjectors; }
|
||||
|
||||
static const float CALLBACK_ACCELERATOR_RATIO;
|
||||
|
||||
|
@ -139,6 +152,7 @@ public slots:
|
|||
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); }
|
||||
void handleAudioInput();
|
||||
|
@ -205,7 +219,11 @@ protected:
|
|||
|
||||
private:
|
||||
void outputFormatChanged();
|
||||
void mixLocalAudioInjectors(int16_t* inputBuffer);
|
||||
float azimuthForSource(const glm::vec3& relativePosition);
|
||||
float gainForSource(float distance, float volume);
|
||||
|
||||
Mutex _injectorsMutex;
|
||||
QByteArray firstInputFrame;
|
||||
QAudioInput* _audioInput;
|
||||
QAudioFormat _desiredInputFormat;
|
||||
|
@ -261,6 +279,11 @@ private:
|
|||
AudioSRC* _inputToNetworkResampler;
|
||||
AudioSRC* _networkToOutputResampler;
|
||||
|
||||
// for local hrtf-ing
|
||||
float _hrtfBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
int16_t _scratchBuffer[AudioConstants::NETWORK_FRAME_SAMPLES_STEREO];
|
||||
AudioLimiter _audioLimiter;
|
||||
|
||||
// Adds Reverb
|
||||
void configureReverb();
|
||||
void updateReverbOptions();
|
||||
|
@ -291,6 +314,12 @@ private:
|
|||
void checkDevices();
|
||||
|
||||
bool _hasReceivedFirstPacket = false;
|
||||
|
||||
QVector<AudioInjector*> _activeLocalAudioInjectors;
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr }; // for outbound mic stream
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
set(TARGET_NAME audio)
|
||||
setup_hifi_library(Network)
|
||||
link_hifi_libraries(networking shared)
|
||||
link_hifi_libraries(networking shared plugins)
|
||||
|
|
|
@ -19,7 +19,8 @@
|
|||
|
||||
#include "AudioConstants.h"
|
||||
|
||||
void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform, PacketType packetType) {
|
||||
void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber,
|
||||
const Transform& transform, PacketType packetType, QString codecName) {
|
||||
static std::mutex _mutex;
|
||||
using Locker = std::unique_lock<std::mutex>;
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
@ -27,10 +28,17 @@ void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes
|
|||
if (audioMixer && audioMixer->getActiveSocket()) {
|
||||
Locker lock(_mutex);
|
||||
auto audioPacket = NLPacket::create(packetType);
|
||||
|
||||
// FIXME - this is not a good way to determine stereoness with codecs....
|
||||
quint8 isStereo = bytes == AudioConstants::NETWORK_FRAME_BYTES_STEREO ? 1 : 0;
|
||||
|
||||
// write sequence number
|
||||
audioPacket->writePrimitive(sequenceNumber++);
|
||||
auto sequence = sequenceNumber++;
|
||||
audioPacket->writePrimitive(sequence);
|
||||
|
||||
// write the codec
|
||||
audioPacket->writeString(codecName);
|
||||
|
||||
if (packetType == PacketType::SilentAudioFrame) {
|
||||
// pack num silent samples
|
||||
quint16 numSilentSamples = isStereo ?
|
||||
|
@ -49,8 +57,8 @@ void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes
|
|||
|
||||
if (audioPacket->getType() != PacketType::SilentAudioFrame) {
|
||||
// audio samples have already been packed (written to networkAudioSamples)
|
||||
audioPacket->setPayloadSize(audioPacket->getPayloadSize() + bytes);
|
||||
static const int leadingBytes = sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
|
||||
int leadingBytes = audioPacket->getPayloadSize();
|
||||
audioPacket->setPayloadSize(leadingBytes + bytes);
|
||||
memcpy(audioPacket->getPayload() + leadingBytes, audioData, bytes);
|
||||
}
|
||||
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::SendAudioPacket);
|
||||
|
|
|
@ -28,7 +28,8 @@ class AbstractAudioInterface : public QObject {
|
|||
public:
|
||||
AbstractAudioInterface(QObject* parent = 0) : QObject(parent) {};
|
||||
|
||||
static void emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform, PacketType packetType);
|
||||
static void emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform,
|
||||
PacketType packetType, QString codecName = QString(""));
|
||||
|
||||
public slots:
|
||||
virtual bool outputLocalInjector(bool isStereo, AudioInjector* injector) = 0;
|
||||
|
|
|
@ -26,6 +26,8 @@ namespace AudioConstants {
|
|||
|
||||
inline const char* getAudioFrameName() { return "com.highfidelity.recording.Audio"; }
|
||||
|
||||
const int MAX_CODEC_NAME_LENGTH = 30;
|
||||
const int MAX_CODEC_NAME_LENGTH_ON_WIRE = MAX_CODEC_NAME_LENGTH + sizeof(uint32_t);
|
||||
const int NETWORK_FRAME_BYTES_STEREO = 1024;
|
||||
const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / sizeof(AudioSample);
|
||||
const int NETWORK_FRAME_BYTES_PER_CHANNEL = 512;
|
||||
|
|
|
@ -16,6 +16,13 @@
|
|||
#include "AudioHRTF.h"
|
||||
#include "AudioHRTFData.h"
|
||||
|
||||
#ifndef MAX
|
||||
#define MAX(a,b) (((a) > (b)) ? (a) : (b))
|
||||
#endif
|
||||
#ifndef MIN
|
||||
#define MIN(a,b) (((a) < (b)) ? (a) : (b))
|
||||
#endif
|
||||
|
||||
//
|
||||
// Equal-gain crossfade
|
||||
//
|
||||
|
@ -58,6 +65,103 @@ static const float crossfadeTable[HRTF_BLOCK] = {
|
|||
0.0024846123f, 0.0019026510f, 0.0013981014f, 0.0009710421f, 0.0006215394f, 0.0003496476f, 0.0001554090f, 0.0000388538f,
|
||||
};
|
||||
|
||||
//
|
||||
// Model the frequency-dependent attenuation of sound propogation in air.
|
||||
//
|
||||
// Fit using linear regression to a log-log model of lowpass cutoff frequency vs distance,
|
||||
// loosely based on data from Handbook of Acoustics. Only the onset of significant
|
||||
// attenuation is modelled, not the filter slope.
|
||||
//
|
||||
// 1m -> -3dB @ 55kHz
|
||||
// 10m -> -3dB @ 12kHz
|
||||
// 100m -> -3dB @ 2.5kHz
|
||||
// 1km -> -3dB @ 0.6kHz
|
||||
// 10km -> -3dB @ 0.1kHz
|
||||
//
|
||||
static const int NLOWPASS = 64;
|
||||
static const float lowpassTable[NLOWPASS][5] = { // { b0, b1, b2, a1, a2 }
|
||||
// distance = 1
|
||||
{ 0.999772371f, 1.399489756f, 0.454495527f, 1.399458985f, 0.454298669f },
|
||||
{ 0.999631480f, 1.357609808f, 0.425210203f, 1.357549905f, 0.424901586f },
|
||||
{ 0.999405154f, 1.311503050f, 0.394349994f, 1.311386830f, 0.393871368f },
|
||||
{ 0.999042876f, 1.260674595f, 0.361869089f, 1.260450057f, 0.361136504f },
|
||||
// distance = 2
|
||||
{ 0.998465222f, 1.204646525f, 0.327757118f, 1.204214978f, 0.326653886f },
|
||||
{ 0.997548106f, 1.143019308f, 0.292064663f, 1.142195387f, 0.290436690f },
|
||||
{ 0.996099269f, 1.075569152f, 0.254941286f, 1.074009405f, 0.252600301f },
|
||||
{ 0.993824292f, 1.002389610f, 0.216688640f, 0.999469185f, 0.213433357f },
|
||||
// distance = 4
|
||||
{ 0.990280170f, 0.924075266f, 0.177827150f, 0.918684864f, 0.173497723f },
|
||||
{ 0.984818279f, 0.841917936f, 0.139164195f, 0.832151968f, 0.133748443f },
|
||||
{ 0.976528670f, 0.758036513f, 0.101832398f, 0.740761682f, 0.095635899f },
|
||||
{ 0.964216485f, 0.675305244f, 0.067243474f, 0.645654855f, 0.061110348f },
|
||||
// distance = 8
|
||||
{ 0.946463038f, 0.596943020f, 0.036899688f, 0.547879974f, 0.032425772f },
|
||||
{ 0.921823868f, 0.525770189f, 0.012060451f, 0.447952111f, 0.011702396f },
|
||||
{ 0.890470015f, 0.463334299f, -0.001227816f, 0.347276405f, 0.005300092f },
|
||||
{ 0.851335343f, 0.407521164f, -0.009353968f, 0.241900234f, 0.007602305f },
|
||||
// distance = 16
|
||||
{ 0.804237360f, 0.358139558f, -0.014293332f, 0.130934213f, 0.017149373f },
|
||||
{ 0.750073259f, 0.314581568f, -0.016625381f, 0.014505388f, 0.033524057f },
|
||||
{ 0.690412072f, 0.275936128f, -0.017054561f, -0.106682490f, 0.055976129f },
|
||||
{ 0.627245545f, 0.241342015f, -0.016246850f, -0.231302564f, 0.083643275f },
|
||||
// distance = 32
|
||||
{ 0.562700627f, 0.210158533f, -0.014740899f, -0.357562697f, 0.115680957f },
|
||||
{ 0.498787849f, 0.181982455f, -0.012925406f, -0.483461730f, 0.151306628f },
|
||||
{ 0.437224055f, 0.156585449f, -0.011055180f, -0.607042210f, 0.189796534f },
|
||||
{ 0.379336998f, 0.133834032f, -0.009281617f, -0.726580065f, 0.230469477f },
|
||||
// distance = 64
|
||||
{ 0.326040627f, 0.113624970f, -0.007683443f, -0.840693542f, 0.272675696f },
|
||||
{ 0.277861727f, 0.095845793f, -0.006291936f, -0.948380091f, 0.315795676f },
|
||||
{ 0.234997480f, 0.080357656f, -0.005109519f, -1.049001190f, 0.359246807f },
|
||||
{ 0.197386484f, 0.066993521f, -0.004122547f, -1.142236313f, 0.402493771f },
|
||||
// distance = 128
|
||||
{ 0.164780457f, 0.055564709f, -0.003309645f, -1.228023442f, 0.445058962f },
|
||||
{ 0.136808677f, 0.045870650f, -0.002646850f, -1.306498037f, 0.486530514f },
|
||||
{ 0.113031290f, 0.037708627f, -0.002110591f, -1.377937457f, 0.526566783f },
|
||||
{ 0.092980475f, 0.030881892f, -0.001679255f, -1.442713983f, 0.564897095f },
|
||||
// distance = 256
|
||||
{ 0.076190239f, 0.025205585f, -0.001333863f, -1.501257246f, 0.601319206f },
|
||||
{ 0.062216509f, 0.020510496f, -0.001058229f, -1.554025452f, 0.635694228f },
|
||||
{ 0.050649464f, 0.016644994f, -0.000838826f, -1.601484205f, 0.667939837f },
|
||||
{ 0.041120009f, 0.013475547f, -0.000664513f, -1.644091518f, 0.698022561f },
|
||||
// distance = 512
|
||||
{ 0.033302044f, 0.010886252f, -0.000526217f, -1.682287704f, 0.725949783f },
|
||||
{ 0.026911868f, 0.008777712f, -0.000416605f, -1.716488979f, 0.751761953f },
|
||||
{ 0.021705773f, 0.007065551f, -0.000329788f, -1.747083800f, 0.775525335f },
|
||||
{ 0.017476603f, 0.005678758f, -0.000261057f, -1.774431204f, 0.797325509f },
|
||||
// distance = 1024
|
||||
{ 0.014049828f, 0.004558012f, -0.000206658f, -1.798860530f, 0.817261711f },
|
||||
{ 0.011279504f, 0.003654067f, -0.000163610f, -1.820672082f, 0.835442043f },
|
||||
{ 0.009044384f, 0.002926264f, -0.000129544f, -1.840138412f, 0.851979516f },
|
||||
{ 0.007244289f, 0.002341194f, -0.000102586f, -1.857505967f, 0.866988864f },
|
||||
// distance = 2048
|
||||
{ 0.005796846f, 0.001871515f, -0.000081250f, -1.872996926f, 0.880584038f },
|
||||
{ 0.004634607f, 0.001494933f, -0.000064362f, -1.886811124f, 0.892876302f },
|
||||
{ 0.003702543f, 0.001193324f, -0.000050993f, -1.899127955f, 0.903972829f },
|
||||
{ 0.002955900f, 0.000951996f, -0.000040407f, -1.910108223f, 0.913975712f },
|
||||
// distance = 4096
|
||||
{ 0.002358382f, 0.000759068f, -0.000032024f, -1.919895894f, 0.922981321f },
|
||||
{ 0.001880626f, 0.000604950f, -0.000025383f, -1.928619738f, 0.931079931f },
|
||||
{ 0.001498926f, 0.000481920f, -0.000020123f, -1.936394836f, 0.938355560f },
|
||||
{ 0.001194182f, 0.000383767f, -0.000015954f, -1.943323983f, 0.944885977f },
|
||||
// distance = 8192
|
||||
{ 0.000951028f, 0.000305502f, -0.000012651f, -1.949498943f, 0.950742822f },
|
||||
{ 0.000757125f, 0.000243126f, -0.000010033f, -1.955001608f, 0.955991826f },
|
||||
{ 0.000602572f, 0.000193434f, -0.000007957f, -1.959905036f, 0.960693085f },
|
||||
{ 0.000479438f, 0.000153861f, -0.000006312f, -1.964274383f, 0.964901371f },
|
||||
// distance = 16384
|
||||
{ 0.000381374f, 0.000122359f, -0.000005007f, -1.968167752f, 0.968666478f },
|
||||
{ 0.000303302f, 0.000097288f, -0.000003972f, -1.971636944f, 0.972033562f },
|
||||
{ 0.000241166f, 0.000077342f, -0.000003151f, -1.974728138f, 0.975043493f },
|
||||
{ 0.000191726f, 0.000061475f, -0.000002500f, -1.977482493f, 0.977733194f },
|
||||
// distance = 32768
|
||||
{ 0.000152399f, 0.000048857f, -0.000001984f, -1.979936697f, 0.980135969f },
|
||||
{ 0.000121122f, 0.000038825f, -0.000001574f, -1.982123446f, 0.982281818f },
|
||||
{ 0.000096252f, 0.000030849f, -0.000001249f, -1.984071877f, 0.984197728f },
|
||||
{ 0.000076480f, 0.000024509f, -0.000000991f, -1.985807957f, 0.985907955f },
|
||||
};
|
||||
|
||||
static const float TWOPI = 6.283185307f;
|
||||
|
||||
//
|
||||
|
@ -162,40 +266,68 @@ static void interleave_4x4(float* src0, float* src1, float* src2, float* src3, f
|
|||
}
|
||||
}
|
||||
|
||||
// 4 channels (interleaved)
|
||||
static void biquad_4x4(float* src, float* dst, float coef[5][4], float state[2][4], int numFrames) {
|
||||
// process 2 cascaded biquads on 4 channels (interleaved)
|
||||
// biquads computed in parallel, by adding one sample of delay
|
||||
static void biquad2_4x4(float* src, float* dst, float coef[5][8], float state[3][8], int numFrames) {
|
||||
|
||||
// enable flush-to-zero mode to prevent denormals
|
||||
unsigned int ftz = _MM_GET_FLUSH_ZERO_MODE();
|
||||
_MM_SET_FLUSH_ZERO_MODE(_MM_FLUSH_ZERO_ON);
|
||||
|
||||
__m128 w1 = _mm_loadu_ps(state[0]);
|
||||
__m128 w2 = _mm_loadu_ps(state[1]);
|
||||
// restore state
|
||||
__m128 y00 = _mm_loadu_ps(&state[0][0]);
|
||||
__m128 w10 = _mm_loadu_ps(&state[1][0]);
|
||||
__m128 w20 = _mm_loadu_ps(&state[2][0]);
|
||||
|
||||
__m128 b0 = _mm_loadu_ps(coef[0]);
|
||||
__m128 b1 = _mm_loadu_ps(coef[1]);
|
||||
__m128 b2 = _mm_loadu_ps(coef[2]);
|
||||
__m128 a1 = _mm_loadu_ps(coef[3]);
|
||||
__m128 a2 = _mm_loadu_ps(coef[4]);
|
||||
__m128 y01;
|
||||
__m128 w11 = _mm_loadu_ps(&state[1][4]);
|
||||
__m128 w21 = _mm_loadu_ps(&state[2][4]);
|
||||
|
||||
// first biquad coefs
|
||||
__m128 b00 = _mm_loadu_ps(&coef[0][0]);
|
||||
__m128 b10 = _mm_loadu_ps(&coef[1][0]);
|
||||
__m128 b20 = _mm_loadu_ps(&coef[2][0]);
|
||||
__m128 a10 = _mm_loadu_ps(&coef[3][0]);
|
||||
__m128 a20 = _mm_loadu_ps(&coef[4][0]);
|
||||
|
||||
// second biquad coefs
|
||||
__m128 b01 = _mm_loadu_ps(&coef[0][4]);
|
||||
__m128 b11 = _mm_loadu_ps(&coef[1][4]);
|
||||
__m128 b21 = _mm_loadu_ps(&coef[2][4]);
|
||||
__m128 a11 = _mm_loadu_ps(&coef[3][4]);
|
||||
__m128 a21 = _mm_loadu_ps(&coef[4][4]);
|
||||
|
||||
for (int i = 0; i < numFrames; i++) {
|
||||
|
||||
__m128 x00 = _mm_loadu_ps(&src[4*i]);
|
||||
__m128 x01 = y00; // first biquad output
|
||||
|
||||
// transposed Direct Form II
|
||||
__m128 x0 = _mm_loadu_ps(&src[4*i]);
|
||||
__m128 y0;
|
||||
y00 = _mm_add_ps(w10, _mm_mul_ps(x00, b00));
|
||||
y01 = _mm_add_ps(w11, _mm_mul_ps(x01, b01));
|
||||
|
||||
y0 = _mm_add_ps(w1, _mm_mul_ps(x0, b0));
|
||||
w1 = _mm_add_ps(w2, _mm_mul_ps(x0, b1));
|
||||
w2 = _mm_mul_ps(x0, b2);
|
||||
w1 = _mm_sub_ps(w1, _mm_mul_ps(y0, a1));
|
||||
w2 = _mm_sub_ps(w2, _mm_mul_ps(y0, a2));
|
||||
w10 = _mm_add_ps(w20, _mm_mul_ps(x00, b10));
|
||||
w11 = _mm_add_ps(w21, _mm_mul_ps(x01, b11));
|
||||
|
||||
_mm_storeu_ps(&dst[4*i], y0);
|
||||
w20 = _mm_mul_ps(x00, b20);
|
||||
w21 = _mm_mul_ps(x01, b21);
|
||||
|
||||
w10 = _mm_sub_ps(w10, _mm_mul_ps(y00, a10));
|
||||
w11 = _mm_sub_ps(w11, _mm_mul_ps(y01, a11));
|
||||
|
||||
w20 = _mm_sub_ps(w20, _mm_mul_ps(y00, a20));
|
||||
w21 = _mm_sub_ps(w21, _mm_mul_ps(y01, a21));
|
||||
|
||||
_mm_storeu_ps(&dst[4*i], y01); // second biquad output
|
||||
}
|
||||
|
||||
// save state
|
||||
_mm_storeu_ps(state[0], w1);
|
||||
_mm_storeu_ps(state[1], w2);
|
||||
_mm_storeu_ps(&state[0][0], y00);
|
||||
_mm_storeu_ps(&state[1][0], w10);
|
||||
_mm_storeu_ps(&state[2][0], w20);
|
||||
|
||||
_mm_storeu_ps(&state[1][4], w11);
|
||||
_mm_storeu_ps(&state[2][4], w21);
|
||||
|
||||
_MM_SET_FLUSH_ZERO_MODE(ftz);
|
||||
}
|
||||
|
@ -345,56 +477,105 @@ static void interleave_4x4(float* src0, float* src1, float* src2, float* src3, f
|
|||
}
|
||||
}
|
||||
|
||||
// 4 channels (interleaved)
|
||||
static void biquad_4x4(float* src, float* dst, float coef[5][4], float state[2][4], int numFrames) {
|
||||
// process 2 cascaded biquads on 4 channels (interleaved)
|
||||
// biquads are computed in parallel, by adding one sample of delay
|
||||
static void biquad2_4x4(float* src, float* dst, float coef[5][8], float state[3][8], int numFrames) {
|
||||
|
||||
// channel 0
|
||||
float w10 = state[0][0];
|
||||
float w20 = state[1][0];
|
||||
// restore state
|
||||
float y00 = state[0][0];
|
||||
float w10 = state[1][0];
|
||||
float w20 = state[2][0];
|
||||
|
||||
float y01 = state[0][1];
|
||||
float w11 = state[1][1];
|
||||
float w21 = state[2][1];
|
||||
|
||||
float y02 = state[0][2];
|
||||
float w12 = state[1][2];
|
||||
float w22 = state[2][2];
|
||||
|
||||
float y03 = state[0][3];
|
||||
float w13 = state[1][3];
|
||||
float w23 = state[2][3];
|
||||
|
||||
float y04;
|
||||
float w14 = state[1][4];
|
||||
float w24 = state[2][4];
|
||||
|
||||
float y05;
|
||||
float w15 = state[1][5];
|
||||
float w25 = state[2][5];
|
||||
|
||||
float y06;
|
||||
float w16 = state[1][6];
|
||||
float w26 = state[2][6];
|
||||
|
||||
float y07;
|
||||
float w17 = state[1][7];
|
||||
float w27 = state[2][7];
|
||||
|
||||
// first biquad coefs
|
||||
float b00 = coef[0][0];
|
||||
float b10 = coef[1][0];
|
||||
float b20 = coef[2][0];
|
||||
float a10 = coef[3][0];
|
||||
float a20 = coef[4][0];
|
||||
|
||||
// channel 1
|
||||
float w11 = state[0][1];
|
||||
float w21 = state[1][1];
|
||||
|
||||
float b01 = coef[0][1];
|
||||
float b11 = coef[1][1];
|
||||
float b21 = coef[2][1];
|
||||
float a11 = coef[3][1];
|
||||
float a21 = coef[4][1];
|
||||
|
||||
// channel 2
|
||||
float w12 = state[0][2];
|
||||
float w22 = state[1][2];
|
||||
|
||||
float b02 = coef[0][2];
|
||||
float b12 = coef[1][2];
|
||||
float b22 = coef[2][2];
|
||||
float a12 = coef[3][2];
|
||||
float a22 = coef[4][2];
|
||||
|
||||
// channel 3
|
||||
float w13 = state[0][3];
|
||||
float w23 = state[1][3];
|
||||
|
||||
float b03 = coef[0][3];
|
||||
float b13 = coef[1][3];
|
||||
float b23 = coef[2][3];
|
||||
float a13 = coef[3][3];
|
||||
float a23 = coef[4][3];
|
||||
|
||||
// second biquad coefs
|
||||
float b04 = coef[0][4];
|
||||
float b14 = coef[1][4];
|
||||
float b24 = coef[2][4];
|
||||
float a14 = coef[3][4];
|
||||
float a24 = coef[4][4];
|
||||
|
||||
float b05 = coef[0][5];
|
||||
float b15 = coef[1][5];
|
||||
float b25 = coef[2][5];
|
||||
float a15 = coef[3][5];
|
||||
float a25 = coef[4][5];
|
||||
|
||||
float b06 = coef[0][6];
|
||||
float b16 = coef[1][6];
|
||||
float b26 = coef[2][6];
|
||||
float a16 = coef[3][6];
|
||||
float a26 = coef[4][6];
|
||||
|
||||
float b07 = coef[0][7];
|
||||
float b17 = coef[1][7];
|
||||
float b27 = coef[2][7];
|
||||
float a17 = coef[3][7];
|
||||
float a27 = coef[4][7];
|
||||
|
||||
for (int i = 0; i < numFrames; i++) {
|
||||
|
||||
// first biquad input
|
||||
float x00 = src[4*i+0] + 1.0e-20f; // prevent denormals
|
||||
float x01 = src[4*i+1] + 1.0e-20f;
|
||||
float x02 = src[4*i+2] + 1.0e-20f;
|
||||
float x03 = src[4*i+3] + 1.0e-20f;
|
||||
float y00, y01, y02, y03;
|
||||
// second biquad input is previous output
|
||||
float x04 = y00;
|
||||
float x05 = y01;
|
||||
float x06 = y02;
|
||||
float x07 = y03;
|
||||
|
||||
// transposed Direct Form II
|
||||
y00 = b00 * x00 + w10;
|
||||
|
@ -413,24 +594,57 @@ static void biquad_4x4(float* src, float* dst, float coef[5][4], float state[2][
|
|||
w13 = b13 * x03 - a13 * y03 + w23;
|
||||
w23 = b23 * x03 - a23 * y03;
|
||||
|
||||
dst[4*i+0] = y00;
|
||||
dst[4*i+1] = y01;
|
||||
dst[4*i+2] = y02;
|
||||
dst[4*i+3] = y03;
|
||||
// transposed Direct Form II
|
||||
y04 = b04 * x04 + w14;
|
||||
w14 = b14 * x04 - a14 * y04 + w24;
|
||||
w24 = b24 * x04 - a24 * y04;
|
||||
|
||||
y05 = b05 * x05 + w15;
|
||||
w15 = b15 * x05 - a15 * y05 + w25;
|
||||
w25 = b25 * x05 - a25 * y05;
|
||||
|
||||
y06 = b06 * x06 + w16;
|
||||
w16 = b16 * x06 - a16 * y06 + w26;
|
||||
w26 = b26 * x06 - a26 * y06;
|
||||
|
||||
y07 = b07 * x07 + w17;
|
||||
w17 = b17 * x07 - a17 * y07 + w27;
|
||||
w27 = b27 * x07 - a27 * y07;
|
||||
|
||||
dst[4*i+0] = y04; // second biquad output
|
||||
dst[4*i+1] = y05;
|
||||
dst[4*i+2] = y06;
|
||||
dst[4*i+3] = y07;
|
||||
}
|
||||
|
||||
// save state
|
||||
state[0][0] = w10;
|
||||
state[1][0] = w20;
|
||||
state[0][0] = y00;
|
||||
state[1][0] = w10;
|
||||
state[2][0] = w20;
|
||||
|
||||
state[0][1] = w11;
|
||||
state[1][1] = w21;
|
||||
state[0][1] = y01;
|
||||
state[1][1] = w11;
|
||||
state[2][1] = w21;
|
||||
|
||||
state[0][2] = w12;
|
||||
state[1][2] = w22;
|
||||
state[0][2] = y02;
|
||||
state[1][2] = w12;
|
||||
state[2][2] = w22;
|
||||
|
||||
state[0][3] = w13;
|
||||
state[1][3] = w23;
|
||||
state[0][3] = y03;
|
||||
state[1][3] = w13;
|
||||
state[2][3] = w23;
|
||||
|
||||
state[1][4] = w14;
|
||||
state[2][4] = w24;
|
||||
|
||||
state[1][5] = w15;
|
||||
state[2][5] = w25;
|
||||
|
||||
state[1][6] = w16;
|
||||
state[2][6] = w26;
|
||||
|
||||
state[1][7] = w17;
|
||||
state[2][7] = w27;
|
||||
}
|
||||
|
||||
// crossfade 4 inputs into 2 outputs with accumulation (interleaved)
|
||||
|
@ -468,9 +682,63 @@ static void ThiranBiquad(float f, float& b0, float& b1, float& b2, float& a1, fl
|
|||
b2 = 1.0f;
|
||||
}
|
||||
|
||||
// compute new filters for a given azimuth and gain
|
||||
static void setAzimuthAndGain(float firCoef[4][HRTF_TAPS], float bqCoef[5][4], int delay[4],
|
||||
int index, float azimuth, float gain, int channel) {
|
||||
// split x into exponent and fraction (0.0f to 1.0f)
|
||||
static void splitf(float x, int& expn, float& frac) {
|
||||
|
||||
union { float f; int i; } mant, bits = { x };
|
||||
const int IEEE754_MANT_BITS = 23;
|
||||
const int IEEE754_EXPN_BIAS = 127;
|
||||
|
||||
mant.i = bits.i & ((1 << IEEE754_MANT_BITS) - 1);
|
||||
mant.i |= (IEEE754_EXPN_BIAS << IEEE754_MANT_BITS);
|
||||
|
||||
frac = mant.f - 1.0f;
|
||||
expn = (bits.i >> IEEE754_MANT_BITS) - IEEE754_EXPN_BIAS;
|
||||
}
|
||||
|
||||
static void distanceBiquad(float distance, float& b0, float& b1, float& b2, float& a1, float& a2) {
|
||||
|
||||
//
|
||||
// Computed from a lookup table quantized to distance = 2^(N/4)
|
||||
// and reconstructed by piecewise linear interpolation.
|
||||
// Approximation error < 0.25dB
|
||||
//
|
||||
|
||||
float x = distance;
|
||||
x = MIN(MAX(x, 1.0f), 1<<30);
|
||||
x *= x;
|
||||
x *= x; // x = distance^4
|
||||
|
||||
// split x into e and frac, such that x = 2^(e+0) + frac * (2^(e+1) - 2^(e+0))
|
||||
int e;
|
||||
float frac;
|
||||
splitf(x, e, frac);
|
||||
|
||||
// clamp to table limits
|
||||
if (e < 0) {
|
||||
e = 0;
|
||||
frac = 0.0f;
|
||||
}
|
||||
if (e > NLOWPASS-2) {
|
||||
e = NLOWPASS-2;
|
||||
frac = 1.0f;
|
||||
}
|
||||
assert(frac >= 0.0f);
|
||||
assert(frac <= 1.0f);
|
||||
assert(e+0 >= 0);
|
||||
assert(e+1 < NLOWPASS);
|
||||
|
||||
// piecewise linear interpolation
|
||||
b0 = lowpassTable[e+0][0] + frac * (lowpassTable[e+1][0] - lowpassTable[e+0][0]);
|
||||
b1 = lowpassTable[e+0][1] + frac * (lowpassTable[e+1][1] - lowpassTable[e+0][1]);
|
||||
b2 = lowpassTable[e+0][2] + frac * (lowpassTable[e+1][2] - lowpassTable[e+0][2]);
|
||||
a1 = lowpassTable[e+0][3] + frac * (lowpassTable[e+1][3] - lowpassTable[e+0][3]);
|
||||
a2 = lowpassTable[e+0][4] + frac * (lowpassTable[e+1][4] - lowpassTable[e+0][4]);
|
||||
}
|
||||
|
||||
// compute new filters for a given azimuth, distance and gain
|
||||
static void setFilters(float firCoef[4][HRTF_TAPS], float bqCoef[5][8], int delay[4],
|
||||
int index, float azimuth, float distance, float gain, int channel) {
|
||||
|
||||
// convert from radians to table units
|
||||
azimuth *= HRTF_AZIMUTHS / TWOPI;
|
||||
|
@ -551,9 +819,26 @@ static void setAzimuthAndGain(float firCoef[4][HRTF_TAPS], float bqCoef[5][4], i
|
|||
bqCoef[4][channel+1] = a2;
|
||||
delay[channel+1] = itdi;
|
||||
}
|
||||
|
||||
//
|
||||
// Second biquad implements the distance filter.
|
||||
//
|
||||
distanceBiquad(distance, b0, b1, b2, a1, a2);
|
||||
|
||||
bqCoef[0][channel+4] = b0;
|
||||
bqCoef[1][channel+4] = b1;
|
||||
bqCoef[2][channel+4] = b2;
|
||||
bqCoef[3][channel+4] = a1;
|
||||
bqCoef[4][channel+4] = a2;
|
||||
|
||||
bqCoef[0][channel+5] = b0;
|
||||
bqCoef[1][channel+5] = b1;
|
||||
bqCoef[2][channel+5] = b2;
|
||||
bqCoef[3][channel+5] = a1;
|
||||
bqCoef[4][channel+5] = a2;
|
||||
}
|
||||
|
||||
void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth, float gain, int numFrames) {
|
||||
void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames) {
|
||||
|
||||
assert(index >= 0);
|
||||
assert(index < HRTF_TABLES);
|
||||
|
@ -562,18 +847,19 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
|
|||
float in[HRTF_TAPS + HRTF_BLOCK]; // mono
|
||||
float firCoef[4][HRTF_TAPS]; // 4-channel
|
||||
float firBuffer[4][HRTF_DELAY + HRTF_BLOCK]; // 4-channel
|
||||
float bqCoef[5][4]; // 4-channel (interleaved)
|
||||
float bqCoef[5][8]; // 4-channel (interleaved)
|
||||
float bqBuffer[4 * HRTF_BLOCK]; // 4-channel (interleaved)
|
||||
int delay[4]; // 4-channel (interleaved)
|
||||
|
||||
// to avoid polluting the cache, old filters are recomputed instead of stored
|
||||
setAzimuthAndGain(firCoef, bqCoef, delay, index, _azimuthState, _gainState, L0);
|
||||
setFilters(firCoef, bqCoef, delay, index, _azimuthState, _distanceState, _gainState, L0);
|
||||
|
||||
// compute new filters
|
||||
setAzimuthAndGain(firCoef, bqCoef, delay, index, azimuth, gain, L1);
|
||||
setFilters(firCoef, bqCoef, delay, index, azimuth, distance, gain, L1);
|
||||
|
||||
// new parameters become old
|
||||
_azimuthState = azimuth;
|
||||
_distanceState = distance;
|
||||
_gainState = gain;
|
||||
|
||||
// convert mono input to float
|
||||
|
@ -611,14 +897,25 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
|
|||
&firBuffer[R1][HRTF_DELAY] - delay[R1],
|
||||
bqBuffer, HRTF_BLOCK);
|
||||
|
||||
// process old/new fractional delay
|
||||
biquad_4x4(bqBuffer, bqBuffer, bqCoef, _bqState, HRTF_BLOCK);
|
||||
// process old/new biquads
|
||||
biquad2_4x4(bqBuffer, bqBuffer, bqCoef, _bqState, HRTF_BLOCK);
|
||||
|
||||
// new state becomes old
|
||||
_bqState[0][L0] = _bqState[0][L1];
|
||||
_bqState[1][L0] = _bqState[1][L1];
|
||||
_bqState[2][L0] = _bqState[2][L1];
|
||||
|
||||
_bqState[0][R0] = _bqState[0][R1];
|
||||
_bqState[1][R0] = _bqState[1][R1];
|
||||
_bqState[2][R0] = _bqState[2][R1];
|
||||
|
||||
_bqState[0][L2] = _bqState[0][L3];
|
||||
_bqState[1][L2] = _bqState[1][L3];
|
||||
_bqState[2][L2] = _bqState[2][L3];
|
||||
|
||||
_bqState[0][R2] = _bqState[0][R3];
|
||||
_bqState[1][R2] = _bqState[1][R3];
|
||||
_bqState[2][R2] = _bqState[2][R3];
|
||||
|
||||
// crossfade old/new output and accumulate
|
||||
crossfade_4x2(bqBuffer, output, crossfadeTable, HRTF_BLOCK);
|
||||
|
@ -626,15 +923,16 @@ void AudioHRTF::render(int16_t* input, float* output, int index, float azimuth,
|
|||
_silentState = false;
|
||||
}
|
||||
|
||||
void AudioHRTF::renderSilent(int16_t* input, float* output, int index, float azimuth, float gain, int numFrames) {
|
||||
void AudioHRTF::renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames) {
|
||||
|
||||
// process the first silent block, to flush internal state
|
||||
if (!_silentState) {
|
||||
render(input, output, index, azimuth, gain, numFrames);
|
||||
render(input, output, index, azimuth, distance, gain, numFrames);
|
||||
}
|
||||
|
||||
// new parameters become old
|
||||
_azimuthState = azimuth;
|
||||
_distanceState = distance;
|
||||
_gainState = gain;
|
||||
|
||||
_silentState = true;
|
||||
|
|
|
@ -21,7 +21,7 @@ static const int HRTF_TABLES = 25; // number of HRTF subjects
|
|||
static const int HRTF_DELAY = 24; // max ITD in samples (1.0ms at 24KHz)
|
||||
static const int HRTF_BLOCK = 256; // block processing size
|
||||
|
||||
static const float HRTF_GAIN = 0.5f; // HRTF global gain adjustment
|
||||
static const float HRTF_GAIN = 1.0f; // HRTF global gain adjustment
|
||||
|
||||
class AudioHRTF {
|
||||
|
||||
|
@ -33,15 +33,16 @@ public:
|
|||
// output: interleaved stereo mix buffer (accumulates into existing output)
|
||||
// index: HRTF subject index
|
||||
// azimuth: clockwise panning angle in radians
|
||||
// distance: source distance in meters
|
||||
// gain: gain factor for distance attenuation
|
||||
// numFrames: must be HRTF_BLOCK in this version
|
||||
//
|
||||
void render(int16_t* input, float* output, int index, float azimuth, float gain, int numFrames);
|
||||
void render(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
|
||||
|
||||
//
|
||||
// Fast path when input is known to be silent
|
||||
//
|
||||
void renderSilent(int16_t* input, float* output, int index, float azimuth, float gain, int numFrames);
|
||||
void renderSilent(int16_t* input, float* output, int index, float azimuth, float distance, float gain, int numFrames);
|
||||
|
||||
private:
|
||||
AudioHRTF(const AudioHRTF&) = delete;
|
||||
|
@ -49,10 +50,10 @@ private:
|
|||
|
||||
// SIMD channel assignmentS
|
||||
enum Channel {
|
||||
L0,
|
||||
R0,
|
||||
L1,
|
||||
R1
|
||||
L0, R0,
|
||||
L1, R1,
|
||||
L2, R2,
|
||||
L3, R3
|
||||
};
|
||||
|
||||
// For best cache utilization when processing thousands of instances, only
|
||||
|
@ -64,11 +65,12 @@ private:
|
|||
// integer delay history
|
||||
float _delayState[4][HRTF_DELAY] = {};
|
||||
|
||||
// fractional delay history
|
||||
float _bqState[2][4] = {};
|
||||
// biquad history
|
||||
float _bqState[3][8] = {};
|
||||
|
||||
// parameter history
|
||||
float _azimuthState = 0.0f;
|
||||
float _distanceState = 0.0f;
|
||||
float _gainState = 0.0f;
|
||||
|
||||
bool _silentState = false;
|
||||
|
|
|
@ -26,6 +26,17 @@
|
|||
|
||||
#include "AudioInjector.h"
|
||||
|
||||
int audioInjectorPtrMetaTypeId = qRegisterMetaType<AudioInjector*>();
|
||||
|
||||
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs) {
|
||||
return static_cast<AudioInjectorState>(static_cast<uint8_t>(lhs) & static_cast<uint8_t>(rhs));
|
||||
};
|
||||
|
||||
AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs) {
|
||||
lhs = static_cast<AudioInjectorState>(static_cast<uint8_t>(lhs) | static_cast<uint8_t>(rhs));
|
||||
return lhs;
|
||||
};
|
||||
|
||||
AudioInjector::AudioInjector(QObject* parent) :
|
||||
QObject(parent)
|
||||
{
|
||||
|
@ -41,14 +52,42 @@ AudioInjector::AudioInjector(const Sound& sound, const AudioInjectorOptions& inj
|
|||
|
||||
AudioInjector::AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions) :
|
||||
_audioData(audioData),
|
||||
_options(injectorOptions)
|
||||
_options(injectorOptions)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
bool AudioInjector::stateHas(AudioInjectorState state) const {
|
||||
return (_state & state) == state;
|
||||
}
|
||||
|
||||
void AudioInjector::setOptions(const AudioInjectorOptions& options) {
|
||||
// since options.stereo is computed from the audio stream,
|
||||
// we need to copy it from existing options just in case.
|
||||
bool currentlyStereo = _options.stereo;
|
||||
_options = options;
|
||||
_options.stereo = currentlyStereo;
|
||||
}
|
||||
|
||||
void AudioInjector::finishNetworkInjection() {
|
||||
_state |= AudioInjectorState::NetworkInjectionFinished;
|
||||
|
||||
// if we are already finished with local
|
||||
// injection, then we are finished
|
||||
if(stateHas(AudioInjectorState::LocalInjectionFinished)) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioInjector::finishLocalInjection() {
|
||||
_state |= AudioInjectorState::LocalInjectionFinished;
|
||||
if(_options.localOnly || stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
finish();
|
||||
}
|
||||
}
|
||||
|
||||
void AudioInjector::finish() {
|
||||
bool shouldDelete = (_state == State::NotFinishedWithPendingDelete);
|
||||
_state = State::Finished;
|
||||
_state |= AudioInjectorState::Finished;
|
||||
|
||||
emit finished();
|
||||
|
||||
|
@ -58,7 +97,7 @@ void AudioInjector::finish() {
|
|||
_localBuffer = NULL;
|
||||
}
|
||||
|
||||
if (shouldDelete) {
|
||||
if (stateHas(AudioInjectorState::PendingDelete)) {
|
||||
// we've been asked to delete after finishing, trigger a deleteLater here
|
||||
deleteLater();
|
||||
}
|
||||
|
@ -110,23 +149,27 @@ void AudioInjector::restart() {
|
|||
_hasSentFirstFrame = false;
|
||||
|
||||
// check our state to decide if we need extra handling for the restart request
|
||||
if (_state == State::Finished) {
|
||||
if (stateHas(AudioInjectorState::Finished)) {
|
||||
// we finished playing, need to reset state so we can get going again
|
||||
_hasSetup = false;
|
||||
_shouldStop = false;
|
||||
_state = State::NotFinished;
|
||||
|
||||
_state = AudioInjectorState::NotFinished;
|
||||
|
||||
// call inject audio to start injection over again
|
||||
setupInjection();
|
||||
|
||||
// if we're a local injector call inject locally to start injecting again
|
||||
if (_options.localOnly) {
|
||||
injectLocally();
|
||||
} else {
|
||||
// wake the AudioInjectorManager back up if it's stuck waiting
|
||||
if (!injectorManager->restartFinishedInjector(this)) {
|
||||
_state = State::Finished; // we're not playing, so reset the state used by isPlaying.
|
||||
// inject locally
|
||||
if(injectLocally()) {
|
||||
|
||||
// if not localOnly, wake the AudioInjectorManager back up if it is stuck waiting
|
||||
if (!_options.localOnly) {
|
||||
|
||||
if (!injectorManager->restartFinishedInjector(this)) {
|
||||
_state = AudioInjectorState::Finished; // we're not playing, so reset the state used by isPlaying.
|
||||
}
|
||||
}
|
||||
} else {
|
||||
_state = AudioInjectorState::Finished; // we failed to play, so we are finished again
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -145,7 +188,8 @@ bool AudioInjector::injectLocally() {
|
|||
// give our current send position to the local buffer
|
||||
_localBuffer->setCurrentOffset(_currentSendOffset);
|
||||
|
||||
success = _localAudioInterface->outputLocalInjector(_options.stereo, this);
|
||||
// call this function on the AudioClient's thread
|
||||
success = QMetaObject::invokeMethod(_localAudioInterface, "outputLocalInjector", Q_ARG(bool, _options.stereo), Q_ARG(AudioInjector*, this));
|
||||
|
||||
if (!success) {
|
||||
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
|
||||
|
@ -170,8 +214,16 @@ const uchar MAX_INJECTOR_VOLUME = 0xFF;
|
|||
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
|
||||
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
|
||||
|
||||
qint64 writeStringToStream(const QString& string, QDataStream& stream) {
|
||||
QByteArray data = string.toUtf8();
|
||||
uint32_t length = data.length();
|
||||
stream << static_cast<quint32>(length);
|
||||
stream << data;
|
||||
return length + sizeof(uint32_t);
|
||||
}
|
||||
|
||||
int64_t AudioInjector::injectNextFrame() {
|
||||
if (_state == AudioInjector::State::Finished) {
|
||||
if (stateHas(AudioInjectorState::NetworkInjectionFinished)) {
|
||||
qDebug() << "AudioInjector::injectNextFrame called but AudioInjector has finished and was not restarted. Returning.";
|
||||
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
|
||||
}
|
||||
|
@ -216,14 +268,20 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
// pack some placeholder sequence number for now
|
||||
audioPacketStream << (quint16) 0;
|
||||
|
||||
// current injectors don't use codecs, so pack in the unknown codec name
|
||||
QString noCodecForInjectors("");
|
||||
writeStringToStream(noCodecForInjectors, audioPacketStream);
|
||||
|
||||
// pack stream identifier (a generated UUID)
|
||||
audioPacketStream << QUuid::createUuid();
|
||||
|
||||
// pack the stereo/mono type of the stream
|
||||
audioPacketStream << _options.stereo;
|
||||
|
||||
// pack the flag for loopback
|
||||
uchar loopbackFlag = (uchar)true;
|
||||
// pack the flag for loopback. Now, we don't loopback
|
||||
// and _always_ play locally, so loopbackFlag should be
|
||||
// false always.
|
||||
uchar loopbackFlag = (uchar)false;
|
||||
audioPacketStream << loopbackFlag;
|
||||
|
||||
// pack the position for injected audio
|
||||
|
@ -289,16 +347,23 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
|
||||
_currentPacket->seek(audioDataOffset);
|
||||
|
||||
// This code is copying bytes from the _audioData directly into the packet, handling looping appropriately.
|
||||
// Might be a reasonable place to do the encode step here.
|
||||
QByteArray decodedAudio;
|
||||
while (totalBytesLeftToCopy > 0) {
|
||||
int bytesToCopy = std::min(totalBytesLeftToCopy, _audioData.size() - _currentSendOffset);
|
||||
|
||||
_currentPacket->write(_audioData.data() + _currentSendOffset, bytesToCopy);
|
||||
decodedAudio.append(_audioData.data() + _currentSendOffset, bytesToCopy);
|
||||
_currentSendOffset += bytesToCopy;
|
||||
totalBytesLeftToCopy -= bytesToCopy;
|
||||
if (_options.loop && _currentSendOffset >= _audioData.size()) {
|
||||
_currentSendOffset = 0;
|
||||
}
|
||||
}
|
||||
// FIXME -- good place to call codec encode here. We need to figure out how to tell the AudioInjector which
|
||||
// codec to use... possible through AbstractAudioInterface.
|
||||
QByteArray encodedAudio = decodedAudio;
|
||||
_currentPacket->write(encodedAudio.data(), encodedAudio.size());
|
||||
|
||||
// set the correct size used for this packet
|
||||
_currentPacket->setPayloadSize(_currentPacket->pos());
|
||||
|
@ -314,7 +379,7 @@ int64_t AudioInjector::injectNextFrame() {
|
|||
}
|
||||
|
||||
if (_currentSendOffset >= _audioData.size() && !_options.loop) {
|
||||
finish();
|
||||
finishNetworkInjection();
|
||||
return NEXT_FRAME_DELTA_ERROR_OR_FINISHED;
|
||||
}
|
||||
|
||||
|
@ -353,10 +418,10 @@ void AudioInjector::triggerDeleteAfterFinish() {
|
|||
return;
|
||||
}
|
||||
|
||||
if (_state == State::Finished) {
|
||||
if (_state == AudioInjectorState::Finished) {
|
||||
stopAndDeleteLater();
|
||||
} else {
|
||||
_state = State::NotFinishedWithPendingDelete;
|
||||
_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -402,7 +467,7 @@ AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const
|
|||
AudioInjector* sound = playSound(buffer, options, localInterface);
|
||||
|
||||
if (sound) {
|
||||
sound->_state = AudioInjector::State::NotFinishedWithPendingDelete;
|
||||
sound->_state |= AudioInjectorState::PendingDelete;
|
||||
}
|
||||
|
||||
return sound;
|
||||
|
@ -419,21 +484,23 @@ AudioInjector* AudioInjector::playSound(const QByteArray& buffer, const AudioInj
|
|||
// setup parameters required for injection
|
||||
injector->setupInjection();
|
||||
|
||||
if (options.localOnly) {
|
||||
if (injector->injectLocally()) {
|
||||
// local injection succeeded, return the pointer to injector
|
||||
return injector;
|
||||
} else {
|
||||
// unable to inject locally, return a nullptr
|
||||
return nullptr;
|
||||
}
|
||||
} else {
|
||||
// attempt to thread the new injector
|
||||
if (injectorManager->threadInjector(injector)) {
|
||||
return injector;
|
||||
} else {
|
||||
// we failed to thread the new injector (we are at the max number of injector threads)
|
||||
return nullptr;
|
||||
}
|
||||
// we always inject locally
|
||||
//
|
||||
if (!injector->injectLocally()) {
|
||||
// failed, so don't bother sending to server
|
||||
qDebug() << "AudioInjector::playSound failed to inject locally";
|
||||
return nullptr;
|
||||
}
|
||||
// if localOnly, we are done, just return injector.
|
||||
if (options.localOnly) {
|
||||
return injector;
|
||||
}
|
||||
|
||||
// send off to server for everyone else
|
||||
if (!injectorManager->threadInjector(injector)) {
|
||||
// we failed to thread the new injector (we are at the max number of injector threads)
|
||||
qDebug() << "AudioInjector::playSound failed to thread injector";
|
||||
}
|
||||
return injector;
|
||||
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue