mirror of
https://github.com/lubosz/overte.git
synced 2025-04-16 09:29:16 +02:00
Merge branch 'master' of github.com:highfidelity/hifi into avatar-as-child-fixes
This commit is contained in:
commit
3d7633a11f
13 changed files with 181 additions and 30 deletions
|
@ -57,9 +57,9 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
auto& packetReceiver = nodeList->getPacketReceiver();
|
||||
|
||||
packetReceiver.registerListenerForTypes({ PacketType::MicrophoneAudioNoEcho, PacketType::MicrophoneAudioWithEcho,
|
||||
PacketType::InjectAudio, PacketType::SilentAudioFrame,
|
||||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
PacketType::InjectAudio, PacketType::AudioStreamStats },
|
||||
this, "handleAudioPacket");
|
||||
packetReceiver.registerListenerForTypes({ PacketType::SilentAudioFrame }, this, "handleSilentAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
@ -72,7 +72,13 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
}
|
||||
|
||||
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
void AudioMixer::handleAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
||||
void AudioMixer::handleSilentAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
_numSilentPackets++;
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
@ -300,6 +306,8 @@ void AudioMixer::sendStatsPacket() {
|
|||
statsObject["avg_streams_per_frame"] = (float)_stats.sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_stats.sumListeners / (float)_numStatFrames;
|
||||
|
||||
statsObject["silent_packets_per_frame"] = (float)_numSilentPackets / (float)_numStatFrames;
|
||||
|
||||
// timing stats
|
||||
QJsonObject timingStats;
|
||||
|
||||
|
@ -338,7 +346,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["mix_stats"] = mixStats;
|
||||
|
||||
_numStatFrames = 0;
|
||||
_numStatFrames = _numSilentPackets = 0;
|
||||
_stats.reset();
|
||||
|
||||
// add stats for each listerner
|
||||
|
|
|
@ -56,7 +56,8 @@ public slots:
|
|||
|
||||
private slots:
|
||||
// packet handlers
|
||||
void handleNodeAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleSilentAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
|
||||
void handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
|
||||
void handleNodeKilled(SharedNodePointer killedNode);
|
||||
|
@ -87,6 +88,8 @@ private:
|
|||
float _trailingMixRatio { 0.0f };
|
||||
float _throttlingRatio { 0.0f };
|
||||
|
||||
int _numSilentPackets { 0 };
|
||||
|
||||
int _numStatFrames { 0 };
|
||||
AudioMixerStats _stats;
|
||||
|
||||
|
|
|
@ -399,7 +399,7 @@ void ImageReader::run() {
|
|||
int originalHeight = imageHeight;
|
||||
imageWidth = (int)(scaleFactor * (float)imageWidth + 0.5f);
|
||||
imageHeight = (int)(scaleFactor * (float)imageHeight + 0.5f);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio);
|
||||
QImage newImage = image.scaled(QSize(imageWidth, imageHeight), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
image.swap(newImage);
|
||||
qCDebug(modelnetworking) << "Downscale image" << _url
|
||||
<< "from" << originalWidth << "x" << originalHeight
|
||||
|
|
|
@ -74,7 +74,7 @@ QImage processSourceImage(const QImage& srcImage, bool cubemap) {
|
|||
if (targetSize != srcImageSize) {
|
||||
PROFILE_RANGE(resource_parse, "processSourceImage Rectify");
|
||||
qCDebug(modelLog) << "Resizing texture from " << srcImageSize.x << "x" << srcImageSize.y << " to " << targetSize.x << "x" << targetSize.y;
|
||||
return srcImage.scaled(fromGlm(targetSize));
|
||||
return srcImage.scaled(fromGlm(targetSize), Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
}
|
||||
|
||||
return srcImage;
|
||||
|
@ -202,14 +202,19 @@ const QImage& image, bool isLinear, bool doCompress) {
|
|||
|
||||
#define CPU_MIPMAPS 1
|
||||
|
||||
void generateMips(gpu::Texture* texture, QImage& image, gpu::Element formatMip) {
|
||||
void generateMips(gpu::Texture* texture, QImage& image, gpu::Element formatMip, bool fastResize) {
|
||||
#if CPU_MIPMAPS
|
||||
PROFILE_RANGE(resource_parse, "generateMips");
|
||||
auto numMips = texture->evalNumMips();
|
||||
for (uint16 level = 1; level < numMips; ++level) {
|
||||
QSize mipSize(texture->evalMipWidth(level), texture->evalMipHeight(level));
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMip(level, formatMip, image.byteCount(), image.constBits());
|
||||
if (fastResize) {
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMip(level, formatMip, image.byteCount(), image.constBits());
|
||||
} else {
|
||||
QImage mipImage = image.scaled(mipSize, Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
texture->assignStoredMip(level, formatMip, mipImage.byteCount(), mipImage.constBits());
|
||||
}
|
||||
}
|
||||
#else
|
||||
texture->autoGenerateMips(-1);
|
||||
|
@ -222,8 +227,8 @@ void generateFaceMips(gpu::Texture* texture, QImage& image, gpu::Element formatM
|
|||
auto numMips = texture->evalNumMips();
|
||||
for (uint16 level = 1; level < numMips; ++level) {
|
||||
QSize mipSize(texture->evalMipWidth(level), texture->evalMipHeight(level));
|
||||
image = image.scaled(mipSize);
|
||||
texture->assignStoredMipFace(level, formatMip, image.byteCount(), image.constBits(), face);
|
||||
QImage mipImage = image.scaled(mipSize, Qt::IgnoreAspectRatio, Qt::SmoothTransformation);
|
||||
texture->assignStoredMipFace(level, formatMip, mipImage.byteCount(), mipImage.constBits(), face);
|
||||
}
|
||||
#else
|
||||
texture->autoGenerateMips(-1);
|
||||
|
@ -257,7 +262,7 @@ gpu::Texture* TextureUsage::process2DTextureColorFromImage(const QImage& srcImag
|
|||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
|
||||
if (generateMips) {
|
||||
::generateMips(theTexture, image, formatMip);
|
||||
::generateMips(theTexture, image, formatMip, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -300,7 +305,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& src
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -386,7 +391,7 @@ gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcIm
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
}
|
||||
|
||||
return theTexture;
|
||||
|
@ -419,7 +424,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromImage(const QImage& srcIma
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
@ -458,7 +463,7 @@ gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& s
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
@ -494,7 +499,7 @@ gpu::Texture* TextureUsage::createMetallicTextureFromImage(const QImage& srcImag
|
|||
theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
|
||||
theTexture->setSource(srcImageName);
|
||||
theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
|
||||
generateMips(theTexture, image, formatMip);
|
||||
generateMips(theTexture, image, formatMip, true);
|
||||
|
||||
// FIXME queue for transfer to GPU and block on completion
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ void MessagesClient::decodeMessagesPacket(QSharedPointer<ReceivedMessage> receiv
|
|||
|
||||
receivedMessage->readPrimitive(&isText);
|
||||
|
||||
quint16 messageLength;
|
||||
quint32 messageLength;
|
||||
receivedMessage->readPrimitive(&messageLength);
|
||||
auto messageData = receivedMessage->read(messageLength);
|
||||
if (isText) {
|
||||
|
@ -75,7 +75,7 @@ std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesPacket(QString chann
|
|||
packetList->writePrimitive(isTextMessage);
|
||||
|
||||
auto messageUtf8 = message.toUtf8();
|
||||
quint16 messageLength = messageUtf8.length();
|
||||
quint32 messageLength = messageUtf8.length();
|
||||
packetList->writePrimitive(messageLength);
|
||||
packetList->write(messageUtf8);
|
||||
|
||||
|
@ -95,7 +95,7 @@ std::unique_ptr<NLPacketList> MessagesClient::encodeMessagesDataPacket(QString c
|
|||
bool isTextMessage = false;
|
||||
packetList->writePrimitive(isTextMessage);
|
||||
|
||||
quint16 dataLength = data.length();
|
||||
quint32 dataLength = data.length();
|
||||
packetList->writePrimitive(dataLength);
|
||||
packetList->write(data);
|
||||
|
||||
|
|
|
@ -63,7 +63,8 @@ float fetchRoughnessMap(vec2 uv) {
|
|||
<@if withNormal@>
|
||||
uniform sampler2D normalMap;
|
||||
vec3 fetchNormalMap(vec2 uv) {
|
||||
return texture(normalMap, uv).xyz;
|
||||
// unpack normal, swizzle to get into hifi tangent space with Y axis pointing out
|
||||
return normalize(texture(normalMap, uv).xzy -vec3(0.5, 0.5, 0.5));
|
||||
}
|
||||
<@endif@>
|
||||
|
||||
|
@ -148,11 +149,23 @@ vec3 fetchLightmapMap(vec2 uv) {
|
|||
vec3 normalizedNormal = normalize(<$interpolatedNormal$>.xyz);
|
||||
vec3 normalizedTangent = normalize(<$interpolatedTangent$>.xyz);
|
||||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
vec3 localNormal = normalize(<$fetchedNormal$> - vec3(0.5, 0.5, 0.5));
|
||||
vec3 localNormal = <$fetchedNormal$>;
|
||||
<$normal$> = vec3(normalizedTangent * localNormal.x + normalizedBitangent * localNormal.y + normalizedNormal * localNormal.z);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func tangentToViewSpaceLOD(fragPos, fetchedNormal, interpolatedNormal, interpolatedTangent, normal)@>
|
||||
{
|
||||
vec3 normalizedNormal = normalize(<$interpolatedNormal$>.xyz);
|
||||
vec3 normalizedTangent = normalize(<$interpolatedTangent$>.xyz);
|
||||
vec3 normalizedBitangent = normalize(cross(normalizedNormal, normalizedTangent));
|
||||
// attenuate the normal map divergence from the mesh normal based on distance
|
||||
// THe attenuation range [20,100] meters from the eye is arbitrary for now
|
||||
vec3 localNormal = mix(<$fetchedNormal$>, vec3(0.0, 1.0, 0.0), smoothstep(20, 100, (-<$fragPos$>).z));
|
||||
<$normal$> = vec3(normalizedTangent * localNormal.x + normalizedNormal * localNormal.y + normalizedBitangent * localNormal.z);
|
||||
}
|
||||
<@endfunc@>
|
||||
|
||||
<@func evalMaterialAlbedo(fetchedAlbedo, materialAlbedo, matKey, albedo)@>
|
||||
{
|
||||
<$albedo$>.xyz = (((<$matKey$> & ALBEDO_VAL_BIT) != 0) ? <$materialAlbedo$> : vec3(1.0));
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
|
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, _SCRIBE_NULL, lightmapVal)$>
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTexel, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTexel, _normal, _tangent, viewNormal)$>
|
||||
|
||||
packDeferredFragmentLightmap(
|
||||
normalize(viewNormal.xyz),
|
||||
|
|
|
@ -34,7 +34,7 @@ void main(void) {
|
|||
<$fetchMaterialTexturesCoord1(matKey, _texCoord1, _SCRIBE_NULL, lightmapVal)$>
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTexel, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTexel, _normal, _tangent, viewNormal)$>
|
||||
|
||||
packDeferredFragmentLightmap(
|
||||
normalize(viewNormal.xyz),
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float scattering = getMaterialScattering(mat);
|
||||
<$evalMaterialScattering(scatteringTex, scattering, matKey, scattering)$>;
|
||||
|
|
|
@ -47,7 +47,7 @@ void main(void) {
|
|||
<$evalMaterialEmissive(emissiveTex, emissive, matKey, emissive)$>;
|
||||
|
||||
vec3 viewNormal;
|
||||
<$tangentToViewSpace(normalTex, _normal, _tangent, viewNormal)$>
|
||||
<$tangentToViewSpaceLOD(_position, normalTex, _normal, _tangent, viewNormal)$>
|
||||
|
||||
float metallic = getMaterialMetallic(mat);
|
||||
<$evalMaterialMetallic(metallicTex, metallic, matKey, metallic)$>;
|
||||
|
|
122
script-archive/displayLastEditedBy.js
Normal file
122
script-archive/displayLastEditedBy.js
Normal file
|
@ -0,0 +1,122 @@
|
|||
//
|
||||
// displayLastEditedBy.js
|
||||
//
|
||||
// Created by Si Fi Faye Li on 2 December, 2016
|
||||
//
|
||||
// Draws a line from each entity to the user in the current session who last changed a property, if any, as recorded
|
||||
// by the lastEditedBy property.
|
||||
|
||||
(function () {
|
||||
var SHOW_LAST_EDITED_BY_ME = true;
|
||||
var SEARCH_RADIUS = 40;
|
||||
// in meter, if the entities is too far away(out of search radius), we won't display its last edited by
|
||||
|
||||
var LINE_COLOR = { red: 0, green: 255, blue: 255};
|
||||
var LINE_EXPRIRATION_TIME = 3000; // in ms
|
||||
var UPDATE_INTERVAL = 1 / 60; // 60fps
|
||||
var myHashMap = {}; // stores {entityID of target entity : overlayID of the line}
|
||||
|
||||
var timer = 0;
|
||||
var lastUpdateTime = 0;
|
||||
function update(deltaTime) {
|
||||
timer += deltaTime;
|
||||
if (timer - lastUpdateTime > UPDATE_INTERVAL) {
|
||||
var targetEntityIDs = Entities.findEntities(MyAvatar.position,SEARCH_RADIUS);
|
||||
|
||||
targetEntityIDs.forEach(function(targetEntityID){
|
||||
var targetEntityProps = Entities.getEntityProperties(targetEntityID);
|
||||
|
||||
|
||||
// don't draw lines for entities that were last edited long time ago
|
||||
if (targetEntityProps.hasOwnProperty("lastEdited")) {
|
||||
var currentTime = new Date().getTime();
|
||||
// lastEdited is in usec while JS date object returns msec
|
||||
var timeDiff = currentTime - targetEntityProps.lastEdited/1000;
|
||||
if (timeDiff > LINE_EXPRIRATION_TIME) {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
var targetAvatarUUID = targetEntityProps.lastEditedBy;
|
||||
|
||||
// don't draw lines for entities last edited by myself
|
||||
// you may set SHOW_LAST_EDITED_BY_ME to true if you want to see these lines
|
||||
if (targetAvatarUUID === MyAvatar.sessionUUID && !SHOW_LAST_EDITED_BY_ME) {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
// don't draw lines for entities with no last edited by
|
||||
if (targetAvatarUUID === "{00000000-0000-0000-0000-000000000000}") {
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var targetAvatar = AvatarList.getAvatar(targetAvatarUUID);
|
||||
|
||||
// skip adding overlay if the avatar can't be found
|
||||
if (targetAvatar === null) {
|
||||
// delete overlay if the avatar was found before but no long here
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
var props = {
|
||||
start: targetEntityProps.position,
|
||||
end: targetAvatar.position,
|
||||
color: LINE_COLOR,
|
||||
alpha: 1,
|
||||
ignoreRayIntersection: true,
|
||||
visible: true,
|
||||
solid: true,
|
||||
drawInFront: true
|
||||
};
|
||||
|
||||
if (myHashMap.hasOwnProperty(targetEntityID)) {
|
||||
var overlayID = myHashMap[targetEntityID];
|
||||
Overlays.editOverlay(overlayID, props);
|
||||
} else {
|
||||
var newOverlayID = Overlays.addOverlay("line3d", props);
|
||||
myHashMap[targetEntityID] = newOverlayID;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// remove lines for entities no longer within search radius
|
||||
for (var key in myHashMap) {
|
||||
if (myHashMap.hasOwnProperty(key)) {
|
||||
if (targetEntityIDs.indexOf(key) === -1) {
|
||||
var overlayID = myHashMap[key];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
delete myHashMap[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lastUpdateTime = timer;
|
||||
}
|
||||
}
|
||||
Script.update.connect(update);
|
||||
|
||||
function cleanup() {
|
||||
for (var key in myHashMap) {
|
||||
if (myHashMap.hasOwnProperty(key)) {
|
||||
var overlayID = myHashMap[key];
|
||||
Overlays.deleteOverlay(overlayID);
|
||||
}
|
||||
}
|
||||
}
|
||||
Script.scriptEnding.connect(cleanup);
|
||||
})();
|
Loading…
Reference in a new issue