mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 04:44:11 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into avatarMixerLoopImprovements
This commit is contained in:
commit
ced6f43923
18 changed files with 292 additions and 37 deletions
|
@ -57,9 +57,9 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
auto& packetReceiver = nodeList->getPacketReceiver();
|
||||
|
||||
packetReceiver.registerListenerForTypes({ PacketType::MicrophoneAudioNoEcho, PacketType::MicrophoneAudioWithEcho,
|
||||
PacketType::InjectAudio, PacketType::SilentAudioFrame,
|
||||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
PacketType::InjectAudio, PacketType::AudioStreamStats },
|
||||
this, "handleAudioPacket");
|
||||
packetReceiver.registerListenerForTypes({ PacketType::SilentAudioFrame }, this, "handleSilentAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
@ -72,7 +72,13 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
void AudioMixer::handleAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
||||
void AudioMixer::handleSilentAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
_numSilentPackets++;
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
@ -300,6 +306,8 @@ void AudioMixer::sendStatsPacket() {
|
|||
statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames;
|
||||
|
||||
statsObject["silent_packets_per_frame"] = (float)_numSilentPackets / (float)_numStatFrames;
|
||||
|
||||
// timing stats
|
||||
QJsonObject timingStats;
|
||||
|
||||
|
@ -338,7 +346,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["mix_stats"] = mixStats;
|
||||
|
||||
_numStatFrames = 0;
|
||||
_numStatFrames = _numSilentPackets = 0;
|
||||
_stats.reset();
|
||||
|
||||
// add stats for each listerner
|
||||
|
|
|
@ -56,7 +56,8 @@ public slots:
|
|||
|
||||
private slots:
|
||||
// packet handlers
|
||||
void handleNodeAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleSilentAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
|
||||
void handleNodeKilled(SharedNodePointer killedNode);
|
||||
|
@ -87,6 +88,8 @@ private:
|
|||
float _trailingMixRatio { 0.0f };
|
||||
float _throttlingRatio { 0.0f };
|
||||
|
||||
int _numSilentPackets { 0 };
|
||||
|
||||
int _numStatFrames { 0 };
|
||||
AudioMixerStats _stats;
|
||||
|
||||
|
|
|
@ -37,8 +37,10 @@ void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
|
|||
|
||||
void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode) {
|
||||
QString channel, message;
|
||||
QByteArray data;
|
||||
QUuid senderID;
|
||||
MessagesClient::decodeMessagesPacket(receivedMessage, channel, message, senderID);
|
||||
bool isText;
|
||||
MessagesClient::decodeMessagesPacket(receivedMessage, channel, isText, message, data, senderID);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -47,7 +49,8 @@ void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> receivedMessa
|
|||
return node->getActiveSocket() && _channelSubscribers[channel].contains(node->getUUID());
|
||||
},
|
||||
[&](const SharedNodePointer& node) {
|
||||
auto packetList = MessagesClient::encodeMessagesPacket(channel, message, senderID);
|
||||
auto packetList = isText ? MessagesClient::encodeMessagesPacket(channel, message, senderID) :
|
||||
MessagesClient::encodeMessagesDataPacket(channel, data, senderID);
|
||||
nodeList->sendPacketList(std::move(packetList), *node);
|
||||
});
|
||||
}
|
||||
|
|
|
@ -399,7 +399,7 @@ void ImageReader::run() {
|
|||
int originalHeight = imageHeight;
|
||||
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
|
||||
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
image.swap(newImage);
|
||||
qCDebug(modelnetworking) << "Downscale image" << _url
|
||||
<< "from" << originalWidth << "x" << originalHeight
|
||||
|
|
|
@ -74,7 +74,7 @@ QImage processSourceImage(const QImage& srcImage, bool cubemap) {
|
|||
if (targetSize != srcImageSize) {
|
||||
PROFILE_RANGE(resource_parse, "processSourceImage Rectify");
|
||||
qCDebug(modelLog) << "Resizing texture from " << srcImageSize.x << "x" << srcImageSize.y << " to " << targetSize.x << "x" << targetSize.y;
|
||||
return srcImage.scaled(fromGlm(targetSize));
|
||||
return srcImage.scaled(fromGlm(targetSize), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
}
|
||||
|
||||
return srcImage;
|
||||
|
@ -202,14 +202,19 @@ const QImage& image, bool isLinear, bool doCompress) {
|
|||
|
||||
#define CPU_MIPMAPS 1
|
||||
|
||||
void generateMips(gpu::Texture* texture, QImage& image, gpu::Element formatMip) {
|
||||
void generateMips(gpu::Texture* texture, QImage& image, gpu::Element formatMip, bool fastResize) {
|
||||
#if CPU_MIPMAPS
|
||||
PROFILE_RANGE(resource_parse, "generateMips");
|
||||
auto numMips = texture->evalNumMips();
|
||||
for (uint16 level = 1; level < numMips; ++level) {
|
||||
QSize mipSize(texture->evalMipWidth(level), texture->evalMipHeight(level));
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMip(level, formatMip, image.byteCount(), image.constBits());
|
||||
if (fastResize) {
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMip(level, formatMip, image.byteCount(), image.constBits());
|
||||
} else {
|
||||
QImage mipImage = image.scaled(mipSize, Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
texture->assignStoredMip(level, formatMip, mipImage.byteCount(), mipImage.constBits());
|
||||
}
|
||||
}
|
||||
#else
|
||||
texture->autoGenerateMips(-1);
|
||||
|
@ -222,8 +227,8 @@ void generateFaceMips(gpu::Texture* texture, QImage& image, gpu::Element formatM
|
|||
auto numMips = texture->evalNumMips();
|
||||
for (uint16 level = 1; level < numMips; ++level) {
|
||||
QSize mipSize(texture->evalMipWidth(level), texture->evalMipHeight(level));
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMipFace(level, formatMip, image.byteCount(), image.constBits(), face);
|
||||
QImage mipImage = image.scaled(mipSize, Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
texture->assignStoredMipFace(level, formatMip, mipImage.byteCount(), mipImage.constBits(), face);
|
||||
}
|
||||
#else
|
||||
texture->autoGenerateMips(-1);
|
||||
|
@ -257,7 +262,7 @@ gpu::Texture* TextureUsage::process2DTextureColorFromImage(const QImage& srcImag
|
|||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
|
||||
if (generateMips) {
|
||||
::generateMips(theTexture, image, formatMip);
|
||||
::generateMips(theTexture, image, formatMip, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,7 +305,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& src
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -386,7 +391,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -419,7 +424,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcIma
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
@ -458,7 +463,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
@ -494,7 +499,7 @@ gpu::Texture* TextureUsage::createMetallicTextureFromImage(const QImage& srcImag
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
|
|
@ -36,16 +36,23 @@ void MessagesClient::init() {
|
|||
}
|
||||
}
|
||||
|
||||
void MessagesClient::decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel, QString& message, QUuid& senderID) {
|
||||
void MessagesClient::decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel,
|
||||
bool& isText, QString& message, QByteArray& data, QUuid& senderID) {
|
||||
quint16 channelLength;
|
||||
receivedMessage->readPrimitive(&channelLength);
|
||||
auto channelData = receivedMessage->read(channelLength);
|
||||
channel = QString::fromUtf8(channelData);
|
||||
|
||||
quint16 messageLength;
|
||||
receivedMessage->readPrimitive(&isText);
|
||||
|
||||
quint32 messageLength;
|
||||
receivedMessage->readPrimitive(&messageLength);
|
||||
auto messageData = receivedMessage->read(messageLength);
|
||||
message = QString::fromUtf8(messageData);
|
||||
if (isText) {
|
||||
message = QString::fromUtf8(messageData);
|
||||
} else {
|
||||
data = messageData;
|
||||
}
|
||||
|
||||
QByteArray bytesSenderID = receivedMessage->read(NUM_BYTES_RFC4122_UUID);
|
||||
if (bytesSenderID.length() == NUM_BYTES_RFC4122_UUID) {
|
||||
|
@ -64,8 +71,11 @@ std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesPacket(QString chann
|
|||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
bool isTextMessage = true;
|
||||
packetList->writePrimitive(isTextMessage);
|
||||
|
||||
auto messageUtf8 = message.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
quint32 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
|
@ -74,12 +84,38 @@ std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesPacket(QString chann
|
|||
return packetList;
|
||||
}
|
||||
|
||||
std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesDataPacket(QString channel, QByteArray data, QUuid senderID) {
|
||||
auto packetList = NLPacketList::create(PacketType::MessagesData, QByteArray(), true, true);
|
||||
|
||||
auto channelUtf8 = channel.toUtf8();
|
||||
quint16 channelLength = channelUtf8.length();
|
||||
packetList->writePrimitive(channelLength);
|
||||
packetList->write(channelUtf8);
|
||||
|
||||
bool isTextMessage = false;
|
||||
packetList->writePrimitive(isTextMessage);
|
||||
|
||||
quint32 dataLength = data.length();
|
||||
packetList->writePrimitive(dataLength);
|
||||
packetList->write(data);
|
||||
|
||||
packetList->write(senderID.toRfc4122());
|
||||
|
||||
return packetList;
|
||||
}
|
||||
|
||||
|
||||
void MessagesClient::handleMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode) {
|
||||
QString channel, message;
|
||||
QByteArray data;
|
||||
bool isText { false };
|
||||
QUuid senderID;
|
||||
decodeMessagesPacket(receivedMessage, channel, message, senderID);
|
||||
emit messageReceived(channel, message, senderID, false);
|
||||
decodeMessagesPacket(receivedMessage, channel, isText, message, data, senderID);
|
||||
if (isText) {
|
||||
emit messageReceived(channel, message, senderID, false);
|
||||
} else {
|
||||
emit dataReceived(channel, data, senderID, false);
|
||||
}
|
||||
}
|
||||
|
||||
void MessagesClient::sendMessage(QString channel, QString message, bool localOnly) {
|
||||
|
@ -98,6 +134,22 @@ void MessagesClient::sendMessage(QString channel, QString message, bool localOnl
|
|||
}
|
||||
}
|
||||
|
||||
void MessagesClient::sendData(QString channel, QByteArray data, bool localOnly) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
if (localOnly) {
|
||||
QUuid senderID = nodeList->getSessionUUID();
|
||||
emit dataReceived(channel, data, senderID, true);
|
||||
} else {
|
||||
SharedNodePointer messagesMixer = nodeList->soloNodeOfType(NodeType::MessagesMixer);
|
||||
|
||||
if (messagesMixer) {
|
||||
QUuid senderID = nodeList->getSessionUUID();
|
||||
auto packetList = encodeMessagesDataPacket(channel, data, senderID);
|
||||
nodeList->sendPacketList(std::move(packetList), *messagesMixer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void MessagesClient::sendLocalMessage(QString channel, QString message) {
|
||||
sendMessage(channel, message, true);
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#define hifi_MessagesClient_h
|
||||
|
||||
#include <QString>
|
||||
#include <QByteArray>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
||||
|
@ -31,15 +32,19 @@ public:
|
|||
|
||||
Q_INVOKABLE void sendMessage(QString channel, QString message, bool localOnly = false);
|
||||
Q_INVOKABLE void sendLocalMessage(QString channel, QString message);
|
||||
Q_INVOKABLE void sendData(QString channel, QByteArray data, bool localOnly = false);
|
||||
Q_INVOKABLE void subscribe(QString channel);
|
||||
Q_INVOKABLE void unsubscribe(QString channel);
|
||||
|
||||
static void decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel, QString& message, QUuid& senderID);
|
||||
static std::unique_ptr<NLPacketList> encodeMessagesPacket(QString channel, QString message, QUuid senderID);
|
||||
static void decodeMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, QString& channel,
|
||||
bool& isText, QString& message, QByteArray& data, QUuid& senderID);
|
||||
|
||||
static std::unique_ptr<NLPacketList> encodeMessagesPacket(QString channel, QString message, QUuid senderID);
|
||||
static std::unique_ptr<NLPacketList> encodeMessagesDataPacket(QString channel, QByteArray data, QUuid senderID);
|
||||
|
||||
signals:
|
||||
void messageReceived(QString channel, QString message, QUuid senderUUID, bool localOnly);
|
||||
void dataReceived(QString channel, QByteArray data, QUuid senderUUID, bool localOnly);
|
||||
|
||||
private slots:
|
||||
void handleMessagesPacket(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode);
|
||||
|
|
|
@ -57,6 +57,8 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
case PacketType::BulkAvatarData:
|
||||
case PacketType::KillAvatar:
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::VariableAvatarData);
|
||||
case PacketType::MessagesData:
|
||||
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
||||
case PacketType::ICEServerHeartbeat:
|
||||
return 18; // ICE Server Heartbeat signing
|
||||
case PacketType::AssetGetInfo:
|
||||
|
|
|
@ -263,4 +263,8 @@ enum class AudioVersion : PacketVersion {
|
|||
HighDynamicRangeVolume,
|
||||
};
|
||||
|
||||
enum class MessageDataVersion : PacketVersion {
|
||||
TextOrBinaryData = 18
|
||||
};
|
||||
|
||||
#endif // hifi_PacketHeaders_h
|
||||
|
|
|
@ -63,7 +63,8 @@ float fetchRoughnessMap(vec2 uv) {
|
|||
<@if withNormal@>
|
||||
uniform sampler2D normalMap;
|
||||
vec3 fetchNormalMap(vec2 uv) {
|
||||
return texture(normalMap, uv).xyz;
|
||||
// unpack normal, swizzle to get into hifi tangent space with Y axis pointing out
|
||||
return normalize(texture(normalMap, uv).xzy -vec3(0.5, 0.5, 0.5));
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
|
@ -148,11 +149,23 @@ vec3 fetchLightmapMap(vec2 uv) {
|
|||
vec3 normalizedNormal = normalize(<$interpolatedNormal$>.xyz);
|
||||
vec3 normalizedTangent = normalize(<$interpolatedTangent$>.xyz);
|
||||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = normalize(<$fetchedNormal$> - vec3(0.5, 0.5, 0.5));
|
||||
vec3 localNormal = <$fetchedNormal$>;
|
||||
<$normal$> = vec3(normalizedTangent * localNormal.x + normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func tangentToViewSpaceLOD(fragPos, fetchedNormal, interpolatedNormal, interpolatedTangent, normal)@>
|
||||
{
|
||||
vec3 normalizedNormal = normalize(<$interpolatedNormal$>.xyz);
|
||||
vec3 normalizedTangent = normalize(<$interpolatedTangent$>.xyz);
|
||||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
// attenuate the normal map divergence from the mesh normal based on distance
|
||||
// THe attenuation range [20,100] meters from the eye is arbitrary for now
|
||||
vec3 localNormal = mix(<$fetchedNormal$>, vec3(0.0, 1.0, 0.0), smoothstep(20, 100, (-<$fragPos$>).z));
|
||||
<$normal$> = vec3(normalizedTangent * localNormal.x + normalizedNormal * localNormal.y + normalizedBitangent * localNormal.z);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func evalMaterialAlbedo(fetchedAlbedo, materialAlbedo, matKey, albedo)@>
|
||||
{
|
||||
<$albedo$>.xyz = (((<$matKey$> & ALBEDO_VAL_BIT) != 0) ? <$materialAlbedo$> : vec3(1.0));
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
|
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, _SCRIBE_NULL, lightmapVal)$>
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTexel, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTexel, _normal, _tangent, viewNormal)$>
|
||||
|
||||
packDeferredFragmentLightmap(
|
||||
normalize(viewNormal.xyz),
|
||||
|
|
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, _SCRIBE_NULL, lightmapVal)$>
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTexel, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTexel, _normal, _tangent, viewNormal)$>
|
||||
|
||||
packDeferredFragmentLightmap(
|
||||
normalize(viewNormal.xyz),
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
|
122
script-archive/displayLastEditedBy.js
Normal file
122
script-archive/displayLastEditedBy.js
Normal file
|
@ -0,0 +1,122 @@
|
|||
//
|
||||
// displayLastEditedBy.js
|
||||
//
|
||||
// Created by Si Fi Faye Li on 2 December, 2016
|
||||
//
|
||||
// Draws a line from each entity to the user in the current session who last changed a property, if any, as recorded
|
||||
// by the lastEditedBy property.
|
||||
|
||||
(function () {
|
||||
var SHOW_LAST_EDITED_BY_ME = true;
|
||||
var SEARCH_RADIUS = 40;
|
||||
// in meter, if the entities is too far away(out of search radius), we won't display its last edited by
|
||||
|
||||
var LINE_COLOR = { red: 0, green: 255, blue: 255};
|
||||
var LINE_EXPRIRATION_TIME = 3000; // in ms
|
||||
var UPDATE_INTERVAL = 1 / 60; // 60fps
|
||||
var myHashMap = {}; // stores {entityID of target entity : overlayID of the line}
|
||||
|
||||
var timer = 0;
|
||||
var lastUpdateTime = 0;
|
||||
function update(deltaTime) {
|
||||
timer += deltaTime;
|
||||
if (timer - lastUpdateTime > UPDATE_INTERVAL) {
|
||||
var targetEntityIDs = Entities.findEntities(MyAvatar.position,SEARCH_RADIUS);
|
||||
|
||||
targetEntityIDs.forEach(function(targetEntityID){
|
||||
var targetEntityProps = Entities.getEntityProperties(targetEntityID);
|
||||
|
||||
|
||||
// don't draw lines for entities that were last edited long time ago
|
||||
if (targetEntityProps.hasOwnProperty("lastEdited")) {
|
||||
var currentTime = new Date().getTime();
|
||||
// lastEdited is in usec while JS date object returns msec
|
||||
var timeDiff = currentTime - targetEntityProps.lastEdited/1000;
|
||||
if (timeDiff > LINE_EXPRIRATION_TIME) {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
var targetAvatarUUID = targetEntityProps.lastEditedBy;
|
||||
|
||||
// don't draw lines for entities last edited by myself
|
||||
// you may set SHOW_LAST_EDITED_BY_ME to true if you want to see these lines
|
||||
if (targetAvatarUUID === MyAvatar.sessionUUID && !SHOW_LAST_EDITED_BY_ME) {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// don't draw lines for entities with no last edited by
|
||||
if (targetAvatarUUID === "{00000000-0000-0000-0000-000000000000}") {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var targetAvatar = AvatarList.getAvatar(targetAvatarUUID);
|
||||
|
||||
// skip adding overlay if the avatar can't be found
|
||||
if (targetAvatar === null) {
|
||||
// delete overlay if the avatar was found before but no long here
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var props = {
|
||||
start: targetEntityProps.position,
|
||||
end: targetAvatar.position,
|
||||
color: LINE_COLOR,
|
||||
alpha: 1,
|
||||
ignoreRayIntersection: true,
|
||||
visible: true,
|
||||
solid: true,
|
||||
drawInFront: true
|
||||
};
|
||||
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.editOverlay(overlayID, props);
|
||||
} else {
|
||||
var newOverlayID = Overlays.addOverlay("line3d", props);
|
||||
myHashMap[targetEntityID] = newOverlayID;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// remove lines for entities no longer within search radius
|
||||
for (var key in myHashMap) {
|
||||
if (myHashMap.hasOwnProperty(key)) {
|
||||
if (targetEntityIDs.indexOf(key) === -1) {
|
||||
var overlayID = myHashMap[key];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
delete myHashMap[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lastUpdateTime = timer;
|
||||
}
|
||||
}
|
||||
Script.update.connect(update);
|
||||
|
||||
function cleanup() {
|
||||
for (var key in myHashMap) {
|
||||
if (myHashMap.hasOwnProperty(key)) {
|
||||
var overlayID = myHashMap[key];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
}
|
||||
}
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
})();
|
38
scripts/developer/tests/messagesTests.js
Normal file
38
scripts/developer/tests/messagesTests.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
|
||||
var channelName = "com.highfidelity.example.dataMessages";
|
||||
|
||||
Messages.subscribe(channelName);
|
||||
|
||||
//messageReceived(QString channel, QString message, QUuid senderUUID, bool localOnly);
|
||||
Messages.messageReceived.connect(function(channel, message, sender, local) {
|
||||
print("message recieved on ", channel, " message:", message, " from:", sender, " local:", local);
|
||||
});
|
||||
|
||||
Messages.dataReceived.connect(function(channel, data, sender, local) {
|
||||
var int8data = new Int8Array(data);
|
||||
var dataAsString = "";
|
||||
for (var i = 0; i < int8data.length; i++) {
|
||||
if (i > 0) {
|
||||
dataAsString += ", ";
|
||||
}
|
||||
dataAsString += int8data[i];
|
||||
}
|
||||
print("data recieved on ", channel, " from:", sender, " local:", local, "length of data:", int8data.length, " data:", dataAsString);
|
||||
});
|
||||
|
||||
var counter = 0;
|
||||
Script.update.connect(function(){
|
||||
counter++;
|
||||
if (counter == 100) {
|
||||
Messages.sendMessage(channelName, "foo");
|
||||
} else if (counter == 200) {
|
||||
var data = new Int8Array([0,1,10,2,20,3,30]);
|
||||
print("about to call sendData() data.length:", data.length);
|
||||
Messages.sendData(channelName, data.buffer);
|
||||
counter = 0;
|
||||
}
|
||||
});
|
||||
|
||||
Script.scriptEnding.connect(function(){
|
||||
Messages.unsubscribe(channelName);
|
||||
});
|
Loading…
Reference in a new issue