Merge branch 'master' of https://github.com/highfidelity/hifi into fix-duplicate-teleport-target-overlays

This commit is contained in:
Thijs Wenker 2016-07-20 03:12:02 +02:00
commit b3b1bf7884
41 changed files with 600 additions and 307 deletions

View file

@ -525,7 +525,6 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
} }
} }
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData()); auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
// FIXME - why would we not have client data at this point?? // FIXME - why would we not have client data at this point??
@ -539,14 +538,7 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
clientData->setupCodec(selectedCodec, selectedCodecName); clientData->setupCodec(selectedCodec, selectedCodecName);
qDebug() << "selectedCodecName:" << selectedCodecName; qDebug() << "selectedCodecName:" << selectedCodecName;
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
auto replyPacket = NLPacket::create(PacketType::SelectedAudioFormat);
// write them to our packet
replyPacket->writeString(selectedCodecName);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->sendPacket(std::move(replyPacket), *sendingNode);
} }
void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) { void AudioMixer::handleNodeKilled(SharedNodePointer killedNode) {
@ -769,13 +761,18 @@ void AudioMixer::broadcastMixes() {
std::unique_ptr<NLPacket> mixPacket; std::unique_ptr<NLPacket> mixPacket;
if (mixHasAudio) { if (mixHasAudio) {
int mixPacketBytes = sizeof(quint16) + AudioConstants::NETWORK_FRAME_BYTES_STEREO; int mixPacketBytes = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE
+ AudioConstants::NETWORK_FRAME_BYTES_STEREO;
mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes); mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes);
// pack sequence number // pack sequence number
quint16 sequence = nodeData->getOutgoingSequenceNumber(); quint16 sequence = nodeData->getOutgoingSequenceNumber();
mixPacket->writePrimitive(sequence); mixPacket->writePrimitive(sequence);
// write the codec
QString codecInPacket = nodeData->getCodecName();
mixPacket->writeString(codecInPacket);
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO); QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
QByteArray encodedBuffer; QByteArray encodedBuffer;
nodeData->encode(decodedBuffer, encodedBuffer); nodeData->encode(decodedBuffer, encodedBuffer);
@ -783,13 +780,17 @@ void AudioMixer::broadcastMixes() {
// pack mixed audio samples // pack mixed audio samples
mixPacket->write(encodedBuffer.constData(), encodedBuffer.size()); mixPacket->write(encodedBuffer.constData(), encodedBuffer.size());
} else { } else {
int silentPacketBytes = sizeof(quint16) + sizeof(quint16); int silentPacketBytes = sizeof(quint16) + sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE;
mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes); mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes);
// pack sequence number // pack sequence number
quint16 sequence = nodeData->getOutgoingSequenceNumber(); quint16 sequence = nodeData->getOutgoingSequenceNumber();
mixPacket->writePrimitive(sequence); mixPacket->writePrimitive(sequence);
// write the codec
QString codecInPacket = nodeData->getCodecName();
mixPacket->writeString(codecInPacket);
// pack number of silent audio samples // pack number of silent audio samples
quint16 numSilentSamples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; quint16 numSilentSamples = AudioConstants::NETWORK_FRAME_SAMPLES_STEREO;
mixPacket->writePrimitive(numSilentSamples); mixPacket->writePrimitive(numSilentSamples);

View file

@ -113,6 +113,8 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
avatarAudioStream->setupCodec(_codec, _selectedCodecName, AudioConstants::MONO); avatarAudioStream->setupCodec(_codec, _selectedCodecName, AudioConstants::MONO);
qDebug() << "creating new AvatarAudioStream... codec:" << _selectedCodecName; qDebug() << "creating new AvatarAudioStream... codec:" << _selectedCodecName;
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec, this, &AudioMixerClientData::sendSelectAudioFormat);
auto emplaced = _audioStreams.emplace( auto emplaced = _audioStreams.emplace(
QUuid(), QUuid(),
std::unique_ptr<PositionalAudioStream> { avatarAudioStream } std::unique_ptr<PositionalAudioStream> { avatarAudioStream }
@ -128,7 +130,6 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
isMicStream = true; isMicStream = true;
} else if (packetType == PacketType::InjectAudio) { } else if (packetType == PacketType::InjectAudio) {
// this is injected audio // this is injected audio
// grab the stream identifier for this injected audio // grab the stream identifier for this injected audio
message.seek(sizeof(quint16)); message.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID)); QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
@ -344,6 +345,14 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
return result; return result;
} }
void AudioMixerClientData::sendSelectAudioFormat(SharedNodePointer node, const QString& selectedCodecName) {
auto replyPacket = NLPacket::create(PacketType::SelectedAudioFormat);
replyPacket->writeString(selectedCodecName);
auto nodeList = DependencyManager::get<NodeList>();
nodeList->sendPacket(std::move(replyPacket), *node);
}
void AudioMixerClientData::setupCodec(CodecPluginPointer codec, const QString& codecName) { void AudioMixerClientData::setupCodec(CodecPluginPointer codec, const QString& codecName) {
cleanupCodec(); // cleanup any previously allocated coders first cleanupCodec(); // cleanup any previously allocated coders first
_codec = codec; _codec = codec;

View file

@ -78,9 +78,14 @@ public:
} }
} }
QString getCodecName() { return _selectedCodecName; }
signals: signals:
void injectorStreamFinished(const QUuid& streamIdentifier); void injectorStreamFinished(const QUuid& streamIdentifier);
public slots:
void sendSelectAudioFormat(SharedNodePointer node, const QString& selectedCodecName);
private: private:
QReadWriteLock _streamsLock; QReadWriteLock _streamsLock;
AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID AudioStreamMap _audioStreams; // microphone stream from avatar is stored under key of null UUID

View file

@ -1,19 +1,31 @@
include(ExternalProject) include(ExternalProject)
include(SelectLibraryConfigurations) include(SelectLibraryConfigurations)
set(EXTERNAL_NAME HiFiAudioCodec) set(EXTERNAL_NAME hifiAudioCodec)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER) string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
ExternalProject_Add( if (WIN32 OR APPLE)
${EXTERNAL_NAME} ExternalProject_Add(
URL https://s3.amazonaws.com/hifi-public/dependencies/codecSDK-1.zip ${EXTERNAL_NAME}
URL_MD5 23ec3fe51eaa155ea159a4971856fc13 URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-1.zip
CONFIGURE_COMMAND "" URL_MD5 23ec3fe51eaa155ea159a4971856fc13
BUILD_COMMAND "" CONFIGURE_COMMAND ""
INSTALL_COMMAND "" BUILD_COMMAND ""
LOG_DOWNLOAD 1 INSTALL_COMMAND ""
) LOG_DOWNLOAD 1
)
elseif(NOT ANDROID)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://s3.amazonaws.com/hifi-public/dependencies/codecSDK-linux.zip
URL_MD5 7d37914a18aa4de971d2f45dd3043bde
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)
endif()
# Hide this external target (for ide users) # Hide this external target (for ide users)
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals") set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
@ -23,11 +35,9 @@ ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL) set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/include CACHE TYPE INTERNAL)
if (WIN32) if (WIN32)
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/audio.lib CACHE TYPE INTERNAL) set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/audio.lib CACHE TYPE INTERNAL)
elseif(APPLE) elseif(APPLE)
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL) set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
elseif(NOT ANDROID) elseif(NOT ANDROID)
# FIXME need to account for different architectures set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/Release/libaudio.a CACHE TYPE INTERNAL)
#set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/lib/linux64/audio.so CACHE TYPE INTERNAL)
endif() endif()

View file

@ -43,4 +43,4 @@ macro(ADD_DEPENDENCY_EXTERNAL_PROJECTS)
endforeach() endforeach()
endmacro() endmacro()

View file

@ -37,7 +37,7 @@ macro(SETUP_HIFI_CLIENT_SERVER_PLUGIN)
${CLIENT_PLUGIN_FULL_PATH} ${CLIENT_PLUGIN_FULL_PATH}
) )
# copy the client plugin binaries # copy the client plugin binaries
add_custom_command(TARGET ${DIR} POST_BUILD add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy COMMAND "${CMAKE_COMMAND}" -E copy
"$<TARGET_FILE:${TARGET_NAME}>" "$<TARGET_FILE:${TARGET_NAME}>"
${CLIENT_PLUGIN_FULL_PATH} ${CLIENT_PLUGIN_FULL_PATH}
@ -50,7 +50,7 @@ macro(SETUP_HIFI_CLIENT_SERVER_PLUGIN)
${SERVER_PLUGIN_FULL_PATH} ${SERVER_PLUGIN_FULL_PATH}
) )
# copy the server plugin binaries # copy the server plugin binaries
add_custom_command(TARGET ${DIR} POST_BUILD add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy COMMAND "${CMAKE_COMMAND}" -E copy
"$<TARGET_FILE:${TARGET_NAME}>" "$<TARGET_FILE:${TARGET_NAME}>"
${SERVER_PLUGIN_FULL_PATH} ${SERVER_PLUGIN_FULL_PATH}

View file

@ -1083,9 +1083,11 @@ void DomainServer::sendHeartbeatToMetaverse(const QString& networkAddress) {
// Setup the domain object to send to the data server // Setup the domain object to send to the data server
QJsonObject domainObject; QJsonObject domainObject;
// add the version // add the versions
static const QString VERSION_KEY = "version"; static const QString VERSION_KEY = "version";
domainObject[VERSION_KEY] = BuildInfo::VERSION; domainObject[VERSION_KEY] = BuildInfo::VERSION;
static const QString PROTOCOL_KEY = "protocol";
domainObject[PROTOCOL_KEY] = protocolVersionsSignatureBase64();
// add networking // add networking
if (!networkAddress.isEmpty()) { if (!networkAddress.isEmpty()) {
@ -1119,7 +1121,12 @@ void DomainServer::sendHeartbeatToMetaverse(const QString& networkAddress) {
QString domainUpdateJSON = QString("{\"domain\":%1}").arg(QString(QJsonDocument(domainObject).toJson(QJsonDocument::Compact))); QString domainUpdateJSON = QString("{\"domain\":%1}").arg(QString(QJsonDocument(domainObject).toJson(QJsonDocument::Compact)));
static const QString DOMAIN_UPDATE = "/api/v1/domains/%1"; static const QString DOMAIN_UPDATE = "/api/v1/domains/%1";
DependencyManager::get<AccountManager>()->sendRequest(DOMAIN_UPDATE.arg(uuidStringWithoutCurlyBraces(getID())), QString path = DOMAIN_UPDATE.arg(uuidStringWithoutCurlyBraces(getID()));
#if DEV_BUILD || PR_BUILD
qDebug() << "Domain metadata sent to" << path;
qDebug() << "Domain metadata update:" << domainUpdateJSON;
#endif
DependencyManager::get<AccountManager>()->sendRequest(path,
AccountManagerAuth::Optional, AccountManagerAuth::Optional,
QNetworkAccessManager::PutOperation, QNetworkAccessManager::PutOperation,
JSONCallbackParameters(nullptr, QString(), this, "handleMetaverseHeartbeatError"), JSONCallbackParameters(nullptr, QString(), this, "handleMetaverseHeartbeatError"),

View file

@ -58,6 +58,7 @@ set(INTERFACE_SRCS ${INTERFACE_SRCS} "${QT_UI_HEADERS}" "${QT_RESOURCES}")
# qt5_create_translation_custom(${QM} ${INTERFACE_SRCS} ${QT_UI_FILES} ${TS}) # qt5_create_translation_custom(${QM} ${INTERFACE_SRCS} ${QT_UI_FILES} ${TS})
if (APPLE) if (APPLE)
# configure CMake to use a custom Info.plist # configure CMake to use a custom Info.plist
set_target_properties(${this_target} PROPERTIES MACOSX_BUNDLE_INFO_PLIST MacOSXBundleInfo.plist.in) set_target_properties(${this_target} PROPERTIES MACOSX_BUNDLE_INFO_PLIST MacOSXBundleInfo.plist.in)
@ -229,6 +230,13 @@ if (APPLE)
set(SCRIPTS_INSTALL_DIR "${INTERFACE_INSTALL_APP_PATH}/Contents/Resources") set(SCRIPTS_INSTALL_DIR "${INTERFACE_INSTALL_APP_PATH}/Contents/Resources")
# copy script files beside the executable
add_custom_command(TARGET ${TARGET_NAME} POST_BUILD
COMMAND "${CMAKE_COMMAND}" -E copy_directory
"${CMAKE_SOURCE_DIR}/scripts"
$<TARGET_FILE_DIR:${TARGET_NAME}>/../Resources/scripts
)
# call the fixup_interface macro to add required bundling commands for installation # call the fixup_interface macro to add required bundling commands for installation
fixup_interface() fixup_interface()
@ -263,6 +271,7 @@ else (APPLE)
endif (APPLE) endif (APPLE)
if (SCRIPTS_INSTALL_DIR) if (SCRIPTS_INSTALL_DIR)
# setup install of scripts beside interface executable # setup install of scripts beside interface executable
install( install(
DIRECTORY "${CMAKE_SOURCE_DIR}/scripts/" DIRECTORY "${CMAKE_SOURCE_DIR}/scripts/"

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.6 KiB

View file

@ -13,6 +13,7 @@ import QtQuick 2.4
import "controls" import "controls"
import "styles" import "styles"
import "windows" import "windows"
import "hifi"
Window { Window {
id: root id: root
@ -44,11 +45,50 @@ Window {
anchors.centerIn = parent; anchors.centerIn = parent;
} }
function goCard(card) {
addressLine.text = card.userStory.name;
toggleOrGo(true);
}
property var allDomains: [];
property var suggestionChoices: [];
property var domainsBaseUrl: null;
property int cardWidth: 200;
property int cardHeight: 152;
AddressBarDialog { AddressBarDialog {
id: addressBarDialog id: addressBarDialog
implicitWidth: backgroundImage.width implicitWidth: backgroundImage.width
implicitHeight: backgroundImage.height implicitHeight: backgroundImage.height
Row {
width: backgroundImage.width;
anchors {
bottom: backgroundImage.top;
bottomMargin: 2 * hifi.layout.spacing;
right: backgroundImage.right;
rightMargin: -104; // FIXME
}
spacing: hifi.layout.spacing;
Card {
id: s0;
width: cardWidth;
height: cardHeight;
goFunction: goCard
}
Card {
id: s1;
width: cardWidth;
height: cardHeight;
goFunction: goCard
}
Card {
id: s2;
width: cardWidth;
height: cardHeight;
goFunction: goCard
}
}
Image { Image {
id: backgroundImage id: backgroundImage
source: "../images/address-bar.svg" source: "../images/address-bar.svg"
@ -130,22 +170,178 @@ Window {
} }
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75 font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
helperText: "Go to: place, @user, /path, network address" helperText: "Go to: place, @user, /path, network address"
onTextChanged: filterChoicesByText()
} }
} }
} }
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
// TODO: make available to other .qml.
var request = new XMLHttpRequest();
// QT bug: apparently doesn't handle onload. Workaround using readyState.
request.onreadystatechange = function () {
var READY_STATE_DONE = 4;
var HTTP_OK = 200;
if (request.readyState >= READY_STATE_DONE) {
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
response = !error && request.responseText,
contentType = !error && request.getResponseHeader('content-type');
if (!error && contentType.indexOf('application/json') === 0) {
try {
response = JSON.parse(response);
} catch (e) {
error = e;
}
}
cb(error, response);
}
};
request.open("GET", url, true);
request.send();
}
// call iterator(element, icb) once for each element of array, and then cb(error) when icb(error) has been called by each iterator.
// short-circuits if error. Note that iterator MUST be an asynchronous function. (Use setTimeout if necessary.)
function asyncEach(array, iterator, cb) {
var count = array.length;
function icb(error) {
if (!--count || error) {
count = -1; // don't cb multiple times (e.g., if error)
cb(error);
}
}
if (!count) {
return cb();
}
array.forEach(function (element) {
iterator(element, icb);
});
}
function identity(x) {
return x;
}
function addPictureToDomain(domainInfo, cb) { // asynchronously add thumbnail and lobby to domainInfo, if available, and cb(error)
// This requests data for all the names at once, and just uses the first one to come back.
// We might change this to check one at a time, which would be less requests and more latency.
asyncEach([domainInfo.name].concat(domainInfo.names || null).filter(identity), function (name, icb) {
var url = "https://metaverse.highfidelity.com/api/v1/places/" + name;
getRequest(url, function (error, json) {
var previews = !error && json.data.place.previews;
if (previews) {
if (!domainInfo.thumbnail) { // just grab the first one
domainInfo.thumbnail = previews.thumbnail;
}
if (!domainInfo.lobby) {
domainInfo.lobby = previews.lobby;
}
}
icb(error);
});
}, cb);
}
function getDomains(options, cb) { // cb(error, arrayOfData)
if (!options.page) {
options.page = 1;
}
if (!domainsBaseUrl) {
var domainsOptions = [
'open', // published hours handle now
'active', // has at least one person connected. FIXME: really want any place that is verified accessible.
// FIXME: really want places I'm allowed in, not just open ones.
'restriction=open', // Not by whitelist, etc. FIXME: If logged in, add hifi to the restriction options, in order to include places that require login.
// FIXME add maturity
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
'sort_by=users',
'sort_order=desc',
];
domainsBaseUrl = "https://metaverse.highfidelity.com/api/v1/domains/all?" + domainsOptions.join('&');
}
var url = domainsBaseUrl + "&page=" + options.page + "&users=" + options.minUsers + "-" + options.maxUsers;
getRequest(url, function (error, json) {
if (!error && (json.status !== 'success')) {
error = new Error("Bad response: " + JSON.stringify(json));
}
if (error) {
error.message += ' for ' + url;
return cb(error);
}
var domains = json.data.domains;
if (json.current_page < json.total_pages) {
options.page++;
return getDomains(options, function (error, others) {
cb(error, domains.concat(others));
});
}
cb(null, domains);
});
}
function filterChoicesByText() {
function fill1(target, data) {
if (!data) {
target.visible = false;
return;
}
console.log('suggestion:', JSON.stringify(data));
target.userStory = data;
target.image.source = data.lobby || target.defaultPicture;
target.placeText = data.name;
target.usersText = data.online_users + ((data.online_users === 1) ? ' user' : ' users');
target.visible = true;
}
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity);
var filtered = !words.length ? suggestionChoices : allDomains.filter(function (domain) {
var text = domain.names.concat(domain.tags).join(' ');
if (domain.description) {
text += domain.description;
}
text = text.toUpperCase();
return words.every(function (word) {
return text.indexOf(word) >= 0;
});
});
fill1(s0, filtered[0]);
fill1(s1, filtered[1]);
fill1(s2, filtered[2]);
}
function fillDestinations() {
allDomains = suggestionChoices = [];
getDomains({minUsers: 0, maxUsers: 20}, function (error, domains) {
if (error) {
console.log('domain query failed:', error);
return filterChoicesByText();
}
var here = AddressManager.hostname; // don't show where we are now.
allDomains = domains.filter(function (domain) { return domain.name !== here; });
// Whittle down suggestions to those that have at least one user, and try to get pictures.
suggestionChoices = allDomains.filter(function (domain) { return domain.online_users; });
asyncEach(domains, addPictureToDomain, function (error) {
if (error) {
console.log('place picture query failed:', error);
}
// Whittle down more by requiring a picture.
suggestionChoices = suggestionChoices.filter(function (domain) { return domain.lobby; });
filterChoicesByText();
});
});
}
onVisibleChanged: { onVisibleChanged: {
if (visible) { if (visible) {
addressLine.forceActiveFocus() addressLine.forceActiveFocus()
fillDestinations();
} else { } else {
addressLine.text = "" addressLine.text = ""
} }
} }
function toggleOrGo() { function toggleOrGo(fromSuggestions) {
if (addressLine.text !== "") { if (addressLine.text !== "") {
addressBarDialog.loadAddress(addressLine.text) addressBarDialog.loadAddress(addressLine.text, fromSuggestions)
} }
root.shown = false; root.shown = false;
} }

View file

@ -0,0 +1,96 @@
//
// Card.qml
// qml/hifi
//
// Displays a clickable card representing a user story or destination.
//
// Created by Howard Stearns on 7/13/2016
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0
import QtQuick 2.5
import QtGraphicalEffects 1.0
import "../styles-uit"
Rectangle {
property var goFunction: null;
property var userStory: null;
property alias image: lobby;
property alias placeText: place.text;
property alias usersText: users.text;
property int textPadding: 20;
property int textSize: 24;
property string defaultPicture: "../../images/default-domain.gif";
HifiConstants { id: hifi }
Image {
id: lobby;
width: parent.width;
height: parent.height;
source: defaultPicture;
fillMode: Image.PreserveAspectCrop;
// source gets filled in later
anchors.verticalCenter: parent.verticalCenter;
anchors.left: parent.left;
onStatusChanged: {
if (status == Image.Error) {
console.log("source: " + source + ": failed to load " + JSON.stringify(userStory));
source = defaultPicture;
}
}
}
property int dropHorizontalOffset: 0;
property int dropVerticalOffset: 1;
property int dropRadius: 2;
property int dropSamples: 9;
property int dropSpread: 0;
DropShadow {
source: place;
anchors.fill: place;
horizontalOffset: dropHorizontalOffset;
verticalOffset: dropVerticalOffset;
radius: dropRadius;
samples: dropSamples;
color: hifi.colors.black;
spread: dropSpread;
}
DropShadow {
source: users;
anchors.fill: users;
horizontalOffset: dropHorizontalOffset;
verticalOffset: dropVerticalOffset;
radius: dropRadius;
samples: dropSamples;
color: hifi.colors.black;
spread: dropSpread;
}
RalewaySemiBold {
id: place;
color: hifi.colors.white;
size: textSize;
anchors {
top: parent.top;
left: parent.left;
margins: textPadding;
}
}
RalewayRegular {
id: users;
size: textSize;
color: hifi.colors.white;
anchors {
bottom: parent.bottom;
right: parent.right;
margins: textPadding;
}
}
MouseArea {
anchors.fill: parent;
acceptedButtons: Qt.LeftButton;
onClicked: goFunction(parent);
hoverEnabled: true;
}
}

View file

@ -2746,7 +2746,7 @@ void Application::touchUpdateEvent(QTouchEvent* event) {
if (_keyboardMouseDevice->isActive()) { if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchUpdateEvent(event); _keyboardMouseDevice->touchUpdateEvent(event);
} }
if (_touchscreenDevice->isActive()) { if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchUpdateEvent(event); _touchscreenDevice->touchUpdateEvent(event);
} }
} }
@ -2767,7 +2767,7 @@ void Application::touchBeginEvent(QTouchEvent* event) {
if (_keyboardMouseDevice->isActive()) { if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchBeginEvent(event); _keyboardMouseDevice->touchBeginEvent(event);
} }
if (_touchscreenDevice->isActive()) { if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchBeginEvent(event); _touchscreenDevice->touchBeginEvent(event);
} }
@ -2787,7 +2787,7 @@ void Application::touchEndEvent(QTouchEvent* event) {
if (_keyboardMouseDevice->isActive()) { if (_keyboardMouseDevice->isActive()) {
_keyboardMouseDevice->touchEndEvent(event); _keyboardMouseDevice->touchEndEvent(event);
} }
if (_touchscreenDevice->isActive()) { if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchEndEvent(event); _touchscreenDevice->touchEndEvent(event);
} }
@ -2795,7 +2795,7 @@ void Application::touchEndEvent(QTouchEvent* event) {
} }
void Application::touchGestureEvent(QGestureEvent* event) { void Application::touchGestureEvent(QGestureEvent* event) {
if (_touchscreenDevice->isActive()) { if (_touchscreenDevice && _touchscreenDevice->isActive()) {
_touchscreenDevice->touchGestureEvent(event); _touchscreenDevice->touchGestureEvent(event);
} }
} }

View file

@ -207,8 +207,10 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
} }
withWriteLock([&]{ withWriteLock([&]{
if (_previousSet) { if (_previousSet &&
_positionalTarget != _previousPositionalTarget) { // don't average in a zero velocity if we get the same data
glm::vec3 oneFrameVelocity = (_positionalTarget - _previousPositionalTarget) / deltaTimeStep; glm::vec3 oneFrameVelocity = (_positionalTarget - _previousPositionalTarget) / deltaTimeStep;
_measuredLinearVelocities[_measuredLinearVelocitiesIndex++] = oneFrameVelocity; _measuredLinearVelocities[_measuredLinearVelocitiesIndex++] = oneFrameVelocity;
if (_measuredLinearVelocitiesIndex >= AvatarActionHold::velocitySmoothFrames) { if (_measuredLinearVelocitiesIndex >= AvatarActionHold::velocitySmoothFrames) {
_measuredLinearVelocitiesIndex = 0; _measuredLinearVelocitiesIndex = 0;
@ -228,9 +230,9 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
// 3 -- ignore i of 0 1 2 // 3 -- ignore i of 0 1 2
// 4 -- ignore i of 1 2 3 // 4 -- ignore i of 1 2 3
// 5 -- ignore i of 2 3 4 // 5 -- ignore i of 2 3 4
if ((i + 1) % 6 == _measuredLinearVelocitiesIndex || if ((i + 1) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
(i + 2) % 6 == _measuredLinearVelocitiesIndex || (i + 2) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex ||
(i + 3) % 6 == _measuredLinearVelocitiesIndex) { (i + 3) % AvatarActionHold::velocitySmoothFrames == _measuredLinearVelocitiesIndex) {
continue; continue;
} }
measuredLinearVelocity += _measuredLinearVelocities[i]; measuredLinearVelocity += _measuredLinearVelocities[i];

View file

@ -40,10 +40,10 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty()); _forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
} }
void AddressBarDialog::loadAddress(const QString& address) { void AddressBarDialog::loadAddress(const QString& address, bool fromSuggestions) {
qDebug() << "Called LoadAddress with address " << address; qDebug() << "Called LoadAddress with address " << address;
if (!address.isEmpty()) { if (!address.isEmpty()) {
DependencyManager::get<AddressManager>()->handleLookupString(address); DependencyManager::get<AddressManager>()->handleLookupString(address, fromSuggestions);
} }
} }

View file

@ -34,7 +34,7 @@ protected:
void displayAddressOfflineMessage(); void displayAddressOfflineMessage();
void displayAddressNotFoundMessage(); void displayAddressNotFoundMessage();
Q_INVOKABLE void loadAddress(const QString& address); Q_INVOKABLE void loadAddress(const QString& address, bool fromSuggestions = false);
Q_INVOKABLE void loadHome(); Q_INVOKABLE void loadHome();
Q_INVOKABLE void loadBack(); Q_INVOKABLE void loadBack();
Q_INVOKABLE void loadForward(); Q_INVOKABLE void loadForward();

View file

@ -834,7 +834,7 @@ void AudioClient::handleAudioInput() {
encodedBuffer = decocedBuffer; encodedBuffer = decocedBuffer;
} }
emitAudioPacket(encodedBuffer.constData(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, packetType); emitAudioPacket(encodedBuffer.constData(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, packetType, _selectedCodecName);
_stats.sentPacket(); _stats.sentPacket();
} }
} }
@ -852,7 +852,7 @@ void AudioClient::handleRecordedAudioInput(const QByteArray& audio) {
} }
// FIXME check a flag to see if we should echo audio? // FIXME check a flag to see if we should echo audio?
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, PacketType::MicrophoneAudioWithEcho); emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, audioTransform, PacketType::MicrophoneAudioWithEcho, _selectedCodecName);
} }
void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) { void AudioClient::mixLocalAudioInjectors(int16_t* inputBuffer) {
@ -1015,7 +1015,6 @@ bool AudioClient::outputLocalInjector(bool isStereo, AudioInjector* injector) {
// no reason to lock access to the vector of injectors. // no reason to lock access to the vector of injectors.
if (!_activeLocalAudioInjectors.contains(injector)) { if (!_activeLocalAudioInjectors.contains(injector)) {
qDebug() << "adding new injector"; qDebug() << "adding new injector";
_activeLocalAudioInjectors.append(injector); _activeLocalAudioInjectors.append(injector);
} else { } else {
qDebug() << "injector exists in active list already"; qDebug() << "injector exists in active list already";

View file

@ -19,7 +19,8 @@
#include "AudioConstants.h" #include "AudioConstants.h"
void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform, PacketType packetType) { void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber,
const Transform& transform, PacketType packetType, QString codecName) {
static std::mutex _mutex; static std::mutex _mutex;
using Locker = std::unique_lock<std::mutex>; using Locker = std::unique_lock<std::mutex>;
auto nodeList = DependencyManager::get<NodeList>(); auto nodeList = DependencyManager::get<NodeList>();
@ -27,10 +28,17 @@ void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes
if (audioMixer && audioMixer->getActiveSocket()) { if (audioMixer && audioMixer->getActiveSocket()) {
Locker lock(_mutex); Locker lock(_mutex);
auto audioPacket = NLPacket::create(packetType); auto audioPacket = NLPacket::create(packetType);
// FIXME - this is not a good way to determine stereoness with codecs....
quint8 isStereo = bytes == AudioConstants::NETWORK_FRAME_BYTES_STEREO ? 1 : 0; quint8 isStereo = bytes == AudioConstants::NETWORK_FRAME_BYTES_STEREO ? 1 : 0;
// write sequence number // write sequence number
audioPacket->writePrimitive(sequenceNumber++); auto sequence = sequenceNumber++;
audioPacket->writePrimitive(sequence);
// write the codec
audioPacket->writeString(codecName);
if (packetType == PacketType::SilentAudioFrame) { if (packetType == PacketType::SilentAudioFrame) {
// pack num silent samples // pack num silent samples
quint16 numSilentSamples = isStereo ? quint16 numSilentSamples = isStereo ?
@ -49,8 +57,8 @@ void AbstractAudioInterface::emitAudioPacket(const void* audioData, size_t bytes
if (audioPacket->getType() != PacketType::SilentAudioFrame) { if (audioPacket->getType() != PacketType::SilentAudioFrame) {
// audio samples have already been packed (written to networkAudioSamples) // audio samples have already been packed (written to networkAudioSamples)
audioPacket->setPayloadSize(audioPacket->getPayloadSize() + bytes); int leadingBytes = audioPacket->getPayloadSize();
static const int leadingBytes = sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8); audioPacket->setPayloadSize(leadingBytes + bytes);
memcpy(audioPacket->getPayload() + leadingBytes, audioData, bytes); memcpy(audioPacket->getPayload() + leadingBytes, audioData, bytes);
} }
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::SendAudioPacket); nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::SendAudioPacket);

View file

@ -28,7 +28,8 @@ class AbstractAudioInterface : public QObject {
public: public:
AbstractAudioInterface(QObject* parent = 0) : QObject(parent) {}; AbstractAudioInterface(QObject* parent = 0) : QObject(parent) {};
static void emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform, PacketType packetType); static void emitAudioPacket(const void* audioData, size_t bytes, quint16& sequenceNumber, const Transform& transform,
PacketType packetType, QString codecName = QString(""));
public slots: public slots:
virtual bool outputLocalInjector(bool isStereo, AudioInjector* injector) = 0; virtual bool outputLocalInjector(bool isStereo, AudioInjector* injector) = 0;

View file

@ -26,6 +26,8 @@ namespace AudioConstants {
inline const char* getAudioFrameName() { return "com.highfidelity.recording.Audio"; } inline const char* getAudioFrameName() { return "com.highfidelity.recording.Audio"; }
const int MAX_CODEC_NAME_LENGTH = 30;
const int MAX_CODEC_NAME_LENGTH_ON_WIRE = MAX_CODEC_NAME_LENGTH + sizeof(uint32_t);
const int NETWORK_FRAME_BYTES_STEREO = 1024; const int NETWORK_FRAME_BYTES_STEREO = 1024;
const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / sizeof(AudioSample); const int NETWORK_FRAME_SAMPLES_STEREO = NETWORK_FRAME_BYTES_STEREO / sizeof(AudioSample);
const int NETWORK_FRAME_BYTES_PER_CHANNEL = 512; const int NETWORK_FRAME_BYTES_PER_CHANNEL = 512;

View file

@ -165,14 +165,10 @@ void AudioInjector::restart() {
if (!_options.localOnly) { if (!_options.localOnly) {
if (!injectorManager->restartFinishedInjector(this)) { if (!injectorManager->restartFinishedInjector(this)) {
// TODO: this logic seems to remove the pending delete,
// which makes me wonder about the deleteLater calls
_state = AudioInjectorState::Finished; // we're not playing, so reset the state used by isPlaying. _state = AudioInjectorState::Finished; // we're not playing, so reset the state used by isPlaying.
} }
} }
} else { } else {
// TODO: this logic seems to remove the pending delete,
// which makes me wonder about the deleteLater calls
_state = AudioInjectorState::Finished; // we failed to play, so we are finished again _state = AudioInjectorState::Finished; // we failed to play, so we are finished again
} }
} }
@ -218,6 +214,14 @@ const uchar MAX_INJECTOR_VOLUME = 0xFF;
static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1; static const int64_t NEXT_FRAME_DELTA_ERROR_OR_FINISHED = -1;
static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0; static const int64_t NEXT_FRAME_DELTA_IMMEDIATELY = 0;
qint64 writeStringToStream(const QString& string, QDataStream& stream) {
QByteArray data = string.toUtf8();
uint32_t length = data.length();
stream << static_cast<quint32>(length);
stream << data;
return length + sizeof(uint32_t);
}
int64_t AudioInjector::injectNextFrame() { int64_t AudioInjector::injectNextFrame() {
if (stateHas(AudioInjectorState::NetworkInjectionFinished)) { if (stateHas(AudioInjectorState::NetworkInjectionFinished)) {
qDebug() << "AudioInjector::injectNextFrame called but AudioInjector has finished and was not restarted. Returning."; qDebug() << "AudioInjector::injectNextFrame called but AudioInjector has finished and was not restarted. Returning.";
@ -264,6 +268,10 @@ int64_t AudioInjector::injectNextFrame() {
// pack some placeholder sequence number for now // pack some placeholder sequence number for now
audioPacketStream << (quint16) 0; audioPacketStream << (quint16) 0;
// current injectors don't use codecs, so pack in the unknown codec name
QString noCodecForInjectors("");
writeStringToStream(noCodecForInjectors, audioPacketStream);
// pack stream identifier (a generated UUID) // pack stream identifier (a generated UUID)
audioPacketStream << QUuid::createUuid(); audioPacketStream << QUuid::createUuid();

View file

@ -34,11 +34,11 @@ class AudioInjectorManager;
enum class AudioInjectorState : uint8_t { enum class AudioInjectorState : uint8_t {
NotFinished = 1, NotFinished = 0,
Finished = 2, Finished = 1,
PendingDelete = 4, PendingDelete = 2,
LocalInjectionFinished = 8, LocalInjectionFinished = 4,
NetworkInjectionFinished = 16 NetworkInjectionFinished = 8
}; };
AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs); AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs);
@ -50,12 +50,6 @@ class AudioInjector : public QObject {
Q_OBJECT Q_OBJECT
public: public:
static const uint8_t NotFinished = 1;
static const uint8_t Finished = 2;
static const uint8_t PendingDelete = 4;
static const uint8_t LocalInjectionFinished = 8;
static const uint8_t NetworkInjectionFinished = 16;
AudioInjector(QObject* parent); AudioInjector(QObject* parent);
AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions); AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions);
AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions); AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions);
@ -90,7 +84,7 @@ public slots:
void setOptions(const AudioInjectorOptions& options); void setOptions(const AudioInjectorOptions& options);
float getLoudness() const { return _loudness; } float getLoudness() const { return _loudness; }
bool isPlaying() const { return stateHas(AudioInjectorState::NotFinished); } bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
void finish(); void finish();
void finishLocalInjection(); void finishLocalInjection();
void finishNetworkInjection(); void finishNetworkInjection();

View file

@ -13,6 +13,7 @@
#include <NLPacket.h> #include <NLPacket.h>
#include <Node.h> #include <Node.h>
#include <NodeList.h>
#include "InboundAudioStream.h" #include "InboundAudioStream.h"
@ -58,6 +59,7 @@ void InboundAudioStream::reset() {
_isStarved = true; _isStarved = true;
_hasStarted = false; _hasStarted = false;
resetStats(); resetStats();
cleanupCodec();
} }
void InboundAudioStream::resetStats() { void InboundAudioStream::resetStats() {
@ -99,12 +101,12 @@ void InboundAudioStream::perSecondCallbackForUpdatingStats() {
} }
int InboundAudioStream::parseData(ReceivedMessage& message) { int InboundAudioStream::parseData(ReceivedMessage& message) {
// parse sequence number and track it // parse sequence number and track it
quint16 sequence; quint16 sequence;
message.readPrimitive(&sequence); message.readPrimitive(&sequence);
SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence, SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence,
message.getSourceID()); message.getSourceID());
QString codecInPacket = message.readString();
packetReceivedUpdateTimingStats(); packetReceivedUpdateTimingStats();
@ -114,7 +116,7 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
int prePropertyPosition = message.getPosition(); int prePropertyPosition = message.getPosition();
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples); int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples);
message.seek(prePropertyPosition + propertyBytes); message.seek(prePropertyPosition + propertyBytes);
// handle this packet based on its arrival status. // handle this packet based on its arrival status.
switch (arrivalInfo._status) { switch (arrivalInfo._status) {
case SequenceNumberStats::Early: { case SequenceNumberStats::Early: {
@ -129,9 +131,22 @@ int InboundAudioStream::parseData(ReceivedMessage& message) {
case SequenceNumberStats::OnTime: { case SequenceNumberStats::OnTime: {
// Packet is on time; parse its data to the ringbuffer // Packet is on time; parse its data to the ringbuffer
if (message.getType() == PacketType::SilentAudioFrame) { if (message.getType() == PacketType::SilentAudioFrame) {
// FIXME - Some codecs need to know about these silent frames... and can produce better output
writeDroppableSilentSamples(networkSamples); writeDroppableSilentSamples(networkSamples);
} else { } else {
parseAudioData(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead())); // note: PCM and no codec are identical
bool selectedPCM = _selectedCodecName == "pcm" || _selectedCodecName == "";
bool packetPCM = codecInPacket == "pcm" || codecInPacket == "";
if (codecInPacket == _selectedCodecName || (packetPCM && selectedPCM)) {
auto afterProperties = message.readWithoutCopy(message.getBytesLeftToRead());
parseAudioData(message.getType(), afterProperties);
} else {
qDebug() << "Codec mismatch: expected" << _selectedCodecName << "got" << codecInPacket << "writing silence";
writeDroppableSilentSamples(networkSamples);
// inform others of the mismatch
auto sendingNode = DependencyManager::get<NodeList>()->nodeWithUUID(message.getSourceID());
emit mismatchedAudioCodec(sendingNode, _selectedCodecName);
}
} }
break; break;
} }

View file

@ -12,6 +12,7 @@
#ifndef hifi_InboundAudioStream_h #ifndef hifi_InboundAudioStream_h
#define hifi_InboundAudioStream_h #define hifi_InboundAudioStream_h
#include <Node.h>
#include <NodeData.h> #include <NodeData.h>
#include <NumericalConstants.h> #include <NumericalConstants.h>
#include <udt/PacketHeaders.h> #include <udt/PacketHeaders.h>
@ -180,6 +181,9 @@ public:
void setupCodec(CodecPluginPointer codec, const QString& codecName, int numChannels); void setupCodec(CodecPluginPointer codec, const QString& codecName, int numChannels);
void cleanupCodec(); void cleanupCodec();
signals:
void mismatchedAudioCodec(SharedNodePointer sendingNode, const QString& desiredCodec);
public slots: public slots:
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers /// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
/// is enabled, those stats are used to calculate _desiredJitterBufferFrames. /// is enabled, those stats are used to calculate _desiredJitterBufferFrames.

View file

@ -33,6 +33,7 @@ const uchar MAX_INJECTOR_VOLUME = 255;
int InjectedAudioStream::parseStreamProperties(PacketType type, int InjectedAudioStream::parseStreamProperties(PacketType type,
const QByteArray& packetAfterSeqNum, const QByteArray& packetAfterSeqNum,
int& numAudioSamples) { int& numAudioSamples) {
// setup a data stream to read from this packet // setup a data stream to read from this packet
QDataStream packetStream(packetAfterSeqNum); QDataStream packetStream(packetAfterSeqNum);

View file

@ -36,7 +36,15 @@ OffscreenGLCanvas::~OffscreenGLCanvas() {
delete _logger; delete _logger;
_logger = nullptr; _logger = nullptr;
} }
_context->doneCurrent(); _context->doneCurrent();
delete _context;
_context = nullptr;
_offscreenSurface->destroy();
delete _offscreenSurface;
_offscreenSurface = nullptr;
} }
bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) { bool OffscreenGLCanvas::create(QOpenGLContext* sharedContext) {

View file

@ -34,8 +34,8 @@ public:
protected: protected:
std::once_flag _reportOnce; std::once_flag _reportOnce;
QOpenGLContext* _context; QOpenGLContext* _context{ nullptr };
QOffscreenSurface* _offscreenSurface; QOffscreenSurface* _offscreenSurface{ nullptr };
QOpenGLDebugLogger* _logger{ nullptr }; QOpenGLDebugLogger* _logger{ nullptr };
}; };

View file

@ -28,16 +28,17 @@ QOpenGLContext* QOpenGLContextWrapper::currentContext() {
return QOpenGLContext::currentContext(); return QOpenGLContext::currentContext();
} }
QOpenGLContextWrapper::QOpenGLContextWrapper() : QOpenGLContextWrapper::QOpenGLContextWrapper() :
_context(new QOpenGLContext) _ownContext(true), _context(new QOpenGLContext) { }
{
}
QOpenGLContextWrapper::QOpenGLContextWrapper(QOpenGLContext* context) : QOpenGLContextWrapper::QOpenGLContextWrapper(QOpenGLContext* context) :
_context(context) _context(context) { }
{
QOpenGLContextWrapper::~QOpenGLContextWrapper() {
if (_ownContext) {
delete _context;
_context = nullptr;
}
} }
void QOpenGLContextWrapper::setFormat(const QSurfaceFormat& format) { void QOpenGLContextWrapper::setFormat(const QSurfaceFormat& format) {

View file

@ -23,6 +23,7 @@ class QOpenGLContextWrapper {
public: public:
QOpenGLContextWrapper(); QOpenGLContextWrapper();
QOpenGLContextWrapper(QOpenGLContext* context); QOpenGLContextWrapper(QOpenGLContext* context);
virtual ~QOpenGLContextWrapper();
void setFormat(const QSurfaceFormat& format); void setFormat(const QSurfaceFormat& format);
bool create(); bool create();
void swapBuffers(QSurface* surface); void swapBuffers(QSurface* surface);
@ -40,6 +41,7 @@ public:
private: private:
bool _ownContext { false };
QOpenGLContext* _context { nullptr }; QOpenGLContext* _context { nullptr };
}; };

View file

@ -187,7 +187,11 @@ GLTexture::~GLTexture() {
} }
} }
Backend::decrementTextureGPUCount(); if (_id) {
glDeleteTextures(1, &_id);
const_cast<GLuint&>(_id) = 0;
Backend::decrementTextureGPUCount();
}
Backend::updateTextureGPUMemoryUsage(_size, 0); Backend::updateTextureGPUMemoryUsage(_size, 0);
Backend::updateTextureGPUVirtualMemoryUsage(_virtualSize, 0); Backend::updateTextureGPUVirtualMemoryUsage(_virtualSize, 0);
} }

View file

@ -24,6 +24,7 @@
#include "NodeList.h" #include "NodeList.h"
#include "NetworkLogging.h" #include "NetworkLogging.h"
#include "UserActivityLogger.h" #include "UserActivityLogger.h"
#include "udt/PacketHeaders.h"
const QString ADDRESS_MANAGER_SETTINGS_GROUP = "AddressManager"; const QString ADDRESS_MANAGER_SETTINGS_GROUP = "AddressManager";
@ -37,6 +38,10 @@ AddressManager::AddressManager() :
} }
QString AddressManager::protocolVersion() {
return protocolVersionsSignatureBase64();
}
bool AddressManager::isConnected() { bool AddressManager::isConnected() {
return DependencyManager::get<NodeList>()->getDomainHandler().isConnected(); return DependencyManager::get<NodeList>()->getDomainHandler().isConnected();
} }
@ -221,7 +226,7 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
return false; return false;
} }
void AddressManager::handleLookupString(const QString& lookupString) { void AddressManager::handleLookupString(const QString& lookupString, bool fromSuggestions) {
if (!lookupString.isEmpty()) { if (!lookupString.isEmpty()) {
// make this a valid hifi URL and handle it off to handleUrl // make this a valid hifi URL and handle it off to handleUrl
QString sanitizedString = lookupString.trimmed(); QString sanitizedString = lookupString.trimmed();
@ -236,7 +241,7 @@ void AddressManager::handleLookupString(const QString& lookupString) {
lookupURL = QUrl(lookupString); lookupURL = QUrl(lookupString);
} }
handleUrl(lookupURL); handleUrl(lookupURL, fromSuggestions ? Suggestions : UserInput);
} }
} }

View file

@ -39,6 +39,7 @@ class AddressManager : public QObject, public Dependency {
Q_PROPERTY(QString hostname READ getHost) Q_PROPERTY(QString hostname READ getHost)
Q_PROPERTY(QString pathname READ currentPath) Q_PROPERTY(QString pathname READ currentPath)
public: public:
Q_INVOKABLE QString protocolVersion();
using PositionGetter = std::function<glm::vec3()>; using PositionGetter = std::function<glm::vec3()>;
using OrientationGetter = std::function<glm::quat()>; using OrientationGetter = std::function<glm::quat()>;
@ -49,7 +50,8 @@ public:
StartupFromSettings, StartupFromSettings,
DomainPathResponse, DomainPathResponse,
Internal, Internal,
AttemptedRefresh AttemptedRefresh,
Suggestions
}; };
bool isConnected(); bool isConnected();
@ -77,7 +79,7 @@ public:
std::function<void()> localSandboxNotRunningDoThat); std::function<void()> localSandboxNotRunningDoThat);
public slots: public slots:
void handleLookupString(const QString& lookupString); void handleLookupString(const QString& lookupString, bool fromSuggestions = false);
// we currently expect this to be called from NodeList once handleLookupString has been called with a path // we currently expect this to be called from NodeList once handleLookupString has been called with a path
bool goToViewpointForPath(const QString& viewpointString, const QString& pathString) bool goToViewpointForPath(const QString& viewpointString, const QString& pathString)

View file

@ -34,7 +34,7 @@
#include "NetworkLogging.h" #include "NetworkLogging.h"
#include "udt/Packet.h" #include "udt/Packet.h"
const char SOLO_NODE_TYPES[2] = { const std::set<NodeType_t> SOLO_NODE_TYPES = {
NodeType::AvatarMixer, NodeType::AvatarMixer,
NodeType::AudioMixer NodeType::AudioMixer
}; };
@ -534,7 +534,7 @@ SharedNodePointer LimitedNodeList::addOrUpdateNode(const QUuid& uuid, NodeType_t
if (it != _nodeHash.end()) { if (it != _nodeHash.end()) {
SharedNodePointer& matchingNode = it->second; SharedNodePointer& matchingNode = it->second;
matchingNode->setPublicSocket(publicSocket); matchingNode->setPublicSocket(publicSocket);
matchingNode->setLocalSocket(localSocket); matchingNode->setLocalSocket(localSocket);
matchingNode->setPermissions(permissions); matchingNode->setPermissions(permissions);
@ -551,7 +551,33 @@ SharedNodePointer LimitedNodeList::addOrUpdateNode(const QUuid& uuid, NodeType_t
SharedNodePointer newNodePointer(newNode, &QObject::deleteLater); SharedNodePointer newNodePointer(newNode, &QObject::deleteLater);
// if this is a solo node type, we assume that the DS has replaced its assignment and we should kill the previous node
if (SOLO_NODE_TYPES.count(newNode->getType())) {
// while we still have the read lock, see if there is a previous solo node we'll need to remove
auto previousSoloIt = std::find_if(_nodeHash.cbegin(), _nodeHash.cend(), [newNode](const UUIDNodePair& nodePair){
return nodePair.second->getType() == newNode->getType();
});
if (previousSoloIt != _nodeHash.cend()) {
// we have a previous solo node, switch to a write lock so we can remove it
readLocker.unlock();
QWriteLocker writeLocker(&_nodeMutex);
auto oldSoloNode = previousSoloIt->second;
_nodeHash.unsafe_erase(previousSoloIt);
handleNodeKill(oldSoloNode);
// convert the current lock back to a read lock for insertion of new node
writeLocker.unlock();
readLocker.relock();
}
}
// insert the new node and release our read lock
_nodeHash.insert(UUIDNodePair(newNode->getUUID(), newNodePointer)); _nodeHash.insert(UUIDNodePair(newNode->getUUID(), newNodePointer));
readLocker.unlock();
qCDebug(networking) << "Added" << *newNode; qCDebug(networking) << "Added" << *newNode;

View file

@ -16,6 +16,7 @@
#include <stdint.h> #include <stdint.h>
#include <iterator> #include <iterator>
#include <memory> #include <memory>
#include <set>
#include <unordered_map> #include <unordered_map>
#ifndef _WIN32 #ifndef _WIN32
@ -46,7 +47,7 @@
const quint64 NODE_SILENCE_THRESHOLD_MSECS = 5 * 1000; const quint64 NODE_SILENCE_THRESHOLD_MSECS = 5 * 1000;
extern const char SOLO_NODE_TYPES[2]; extern const std::set<NodeType_t> SOLO_NODE_TYPES;
const char DEFAULT_ASSIGNMENT_SERVER_HOSTNAME[] = "localhost"; const char DEFAULT_ASSIGNMENT_SERVER_HOSTNAME[] = "localhost";

View file

@ -178,6 +178,9 @@ void UserActivityLogger::wentTo(AddressManager::LookupTrigger lookupTrigger, QSt
case AddressManager::StartupFromSettings: case AddressManager::StartupFromSettings:
trigger = "StartupFromSettings"; trigger = "StartupFromSettings";
break; break;
case AddressManager::Suggestions:
trigger = "Suggesions";
break;
default: default:
return; return;
} }

View file

@ -154,8 +154,7 @@ qint64 BasePacket::writeString(const QString& string) {
QByteArray data = string.toUtf8(); QByteArray data = string.toUtf8();
uint32_t length = data.length(); uint32_t length = data.length();
writePrimitive(length); writePrimitive(length);
writeData(data.constData(), data.length()); write(data.constData(), data.length());
seek(pos() + length);
return length + sizeof(uint32_t); return length + sizeof(uint32_t);
} }
@ -176,7 +175,6 @@ bool BasePacket::reset() {
} }
qint64 BasePacket::writeData(const char* data, qint64 maxSize) { qint64 BasePacket::writeData(const char* data, qint64 maxSize) {
Q_ASSERT_X(maxSize <= bytesAvailableForWrite(), "BasePacket::writeData", "not enough space for write"); Q_ASSERT_X(maxSize <= bytesAvailableForWrite(), "BasePacket::writeData", "not enough space for write");
// make sure we have the space required to write this block // make sure we have the space required to write this block

View file

@ -72,6 +72,13 @@ PacketVersion versionForPacketType(PacketType packetType) {
case PacketType::DomainServerAddedNode: case PacketType::DomainServerAddedNode:
return static_cast<PacketVersion>(DomainServerAddedNodeVersion::PermissionsGrid); return static_cast<PacketVersion>(DomainServerAddedNodeVersion::PermissionsGrid);
case PacketType::MixedAudio:
case PacketType::SilentAudioFrame:
case PacketType::InjectAudio:
case PacketType::MicrophoneAudioNoEcho:
case PacketType::MicrophoneAudioWithEcho:
return static_cast<PacketVersion>(AudioVersion::CodecNameInAudioPackets);
default: default:
return 17; return 17;
} }
@ -99,8 +106,9 @@ void sendWrongProtocolVersionsSignature(bool sendWrongVersion) {
} }
#endif #endif
QByteArray protocolVersionsSignature() { static QByteArray protocolVersionSignature;
static QByteArray protocolVersionSignature; static QString protocolVersionSignatureBase64;
static void ensureProtocolVersionsSignature() {
static std::once_flag once; static std::once_flag once;
std::call_once(once, [&] { std::call_once(once, [&] {
QByteArray buffer; QByteArray buffer;
@ -114,8 +122,11 @@ QByteArray protocolVersionsSignature() {
QCryptographicHash hash(QCryptographicHash::Md5); QCryptographicHash hash(QCryptographicHash::Md5);
hash.addData(buffer); hash.addData(buffer);
protocolVersionSignature = hash.result(); protocolVersionSignature = hash.result();
protocolVersionSignatureBase64 = protocolVersionSignature.toBase64();
}); });
}
QByteArray protocolVersionsSignature() {
ensureProtocolVersionsSignature();
#if (PR_BUILD || DEV_BUILD) #if (PR_BUILD || DEV_BUILD)
if (sendWrongProtocolVersion) { if (sendWrongProtocolVersion) {
return QByteArray("INCORRECTVERSION"); // only for debugging version checking return QByteArray("INCORRECTVERSION"); // only for debugging version checking
@ -124,3 +135,7 @@ QByteArray protocolVersionsSignature() {
return protocolVersionSignature; return protocolVersionSignature;
} }
QString protocolVersionsSignatureBase64() {
ensureProtocolVersionsSignature();
return protocolVersionSignatureBase64;
}

View file

@ -113,6 +113,7 @@ extern const QSet<PacketType> NON_SOURCED_PACKETS;
PacketVersion versionForPacketType(PacketType packetType); PacketVersion versionForPacketType(PacketType packetType);
QByteArray protocolVersionsSignature(); /// returns a unqiue signature for all the current protocols QByteArray protocolVersionsSignature(); /// returns a unqiue signature for all the current protocols
QString protocolVersionsSignatureBase64();
#if (PR_BUILD || DEV_BUILD) #if (PR_BUILD || DEV_BUILD)
void sendWrongProtocolVersionsSignature(bool sendWrongVersion); /// for debugging version negotiation void sendWrongProtocolVersionsSignature(bool sendWrongVersion); /// for debugging version negotiation
@ -213,4 +214,9 @@ enum class DomainListVersion : PacketVersion {
PermissionsGrid PermissionsGrid
}; };
enum class AudioVersion : PacketVersion {
HasCompressedAudio = 17,
CodecNameInAudioPackets
};
#endif // hifi_PacketHeaders_h #endif // hifi_PacketHeaders_h

View file

@ -6,15 +6,11 @@
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html # See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
# #
if (WIN32 OR APPLE) set(TARGET_NAME hifiCodec)
set(TARGET_NAME hifiCodec) setup_hifi_client_server_plugin()
setup_hifi_client_server_plugin() link_hifi_libraries(audio shared plugins)
add_dependency_external_projects(hifiAudioCodec)
link_hifi_libraries(audio shared plugins) target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAUDIOCODEC_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${HIFIAUDIOCODEC_LIBRARIES})
add_dependency_external_projects(HiFiAudioCodec) install_beside_console()
target_include_directories(${TARGET_NAME} PRIVATE ${HIFIAUDIOCODEC_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${HIFIAUDIOCODEC_LIBRARIES})
install_beside_console()
endif()

View file

@ -34,6 +34,7 @@ var OVERLAY_DATA_HMD = {
color: {red: 255, green: 255, blue: 255}, color: {red: 255, green: 255, blue: 255},
alpha: 1, alpha: 1,
scale: 2, scale: 2,
emissive: true,
isFacingAvatar: true, isFacingAvatar: true,
drawInFront: true drawInFront: true
}; };

View file

@ -202,8 +202,7 @@ CONTROLLER_STATE_MACHINE[STATE_NEAR_GRABBING] = {
CONTROLLER_STATE_MACHINE[STATE_HOLD] = { CONTROLLER_STATE_MACHINE[STATE_HOLD] = {
name: "hold", name: "hold",
enterMethod: "nearGrabbingEnter", enterMethod: "nearGrabbingEnter",
updateMethod: "nearGrabbing", updateMethod: "nearGrabbing"
exitMethod: "holdExit"
}; };
CONTROLLER_STATE_MACHINE[STATE_NEAR_TRIGGER] = { CONTROLLER_STATE_MACHINE[STATE_NEAR_TRIGGER] = {
name: "trigger", name: "trigger",
@ -228,7 +227,7 @@ function colorPow(color, power) {
return { return {
red: Math.pow(color.red / 255.0, power) * 255, red: Math.pow(color.red / 255.0, power) * 255,
green: Math.pow(color.green / 255.0, power) * 255, green: Math.pow(color.green / 255.0, power) * 255,
blue: Math.pow(color.blue / 255.0, power) * 255, blue: Math.pow(color.blue / 255.0, power) * 255
}; };
} }
@ -271,14 +270,12 @@ function propsArePhysical(props) {
return isPhysical; return isPhysical;
} }
// currently disabled. var USE_ATTACH_POINT_SETTINGS = true;
var USE_ATTACH_POINT_SETTINGS = false;
var ATTACH_POINT_SETTINGS = "io.highfidelity.attachPoints"; var ATTACH_POINT_SETTINGS = "io.highfidelity.attachPoints";
function getAttachPointSettings() { function getAttachPointSettings() {
try { try {
var str = Settings.getValue(ATTACH_POINT_SETTINGS); var str = Settings.getValue(ATTACH_POINT_SETTINGS);
print("getAttachPointSettings = " + str);
if (str === "false") { if (str === "false") {
return {}; return {};
} else { } else {
@ -291,7 +288,6 @@ function getAttachPointSettings() {
} }
function setAttachPointSettings(attachPointSettings) { function setAttachPointSettings(attachPointSettings) {
var str = JSON.stringify(attachPointSettings); var str = JSON.stringify(attachPointSettings);
print("setAttachPointSettings = " + str);
Settings.setValue(ATTACH_POINT_SETTINGS, str); Settings.setValue(ATTACH_POINT_SETTINGS, str);
} }
function getAttachPointForHotspotFromSettings(hotspot, hand) { function getAttachPointForHotspotFromSettings(hotspot, hand) {
@ -765,9 +761,8 @@ function MyController(hand) {
} }
}; };
var SEARCH_SPHERE_ALPHA = 0.5;
this.searchSphereOn = function (location, size, color) { this.searchSphereOn = function (location, size, color) {
var rotation = Quat.lookAt(location, Camera.getPosition(), Vec3.UP); var rotation = Quat.lookAt(location, Camera.getPosition(), Vec3.UP);
var brightColor = colorPow(color, 0.06); var brightColor = colorPow(color, 0.06);
if (this.searchSphere === null) { if (this.searchSphere === null) {
@ -790,7 +785,7 @@ function MyController(hand) {
position: location, position: location,
rotation: rotation, rotation: rotation,
innerColor: brightColor, innerColor: brightColor,
outerColor: color, outerColor: color,
innerAlpha: 1.0, innerAlpha: 1.0,
outerAlpha: 0.0, outerAlpha: 0.0,
outerRadius: size * 1.2, outerRadius: size * 1.2,
@ -1592,13 +1587,16 @@ function MyController(hand) {
this.clearEquipHaptics(); this.clearEquipHaptics();
// controller pose is in avatar frame // controller pose is in avatar frame
var avatarControllerPose = var device = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
Controller.getPoseValue((this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand); var avatarControllerPose = Controller.getPoseValue(device);
// transform it into world frame // transform it into world frame
var controllerPositionVSAvatar = Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation); var worldControllerPosition = Vec3.sum(MyAvatar.position,
var controllerPosition = Vec3.sum(MyAvatar.position, controllerPositionVSAvatar); Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
var controllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation);
// also transform the position into room space
var worldToSensorMat = Mat4.inverse(MyAvatar.getSensorToWorldMatrix());
var roomControllerPosition = Mat4.transformPoint(worldToSensorMat, worldControllerPosition);
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES); var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
var now = Date.now(); var now = Date.now();
@ -1609,7 +1607,7 @@ function MyController(hand) {
this.currentObjectTime = now; this.currentObjectTime = now;
this.currentCameraOrientation = Camera.orientation; this.currentCameraOrientation = Camera.orientation;
this.grabRadius = Vec3.distance(this.currentObjectPosition, controllerPosition); this.grabRadius = Vec3.distance(this.currentObjectPosition, worldControllerPosition);
this.grabRadialVelocity = 0.0; this.grabRadialVelocity = 0.0;
// compute a constant based on the initial conditions which we use below to exagerate hand motion onto the held object // compute a constant based on the initial conditions which we use below to exagerate hand motion onto the held object
@ -1644,8 +1642,7 @@ function MyController(hand) {
this.turnOffVisualizations(); this.turnOffVisualizations();
this.previousControllerPositionVSAvatar = controllerPositionVSAvatar; this.previousRoomControllerPosition = roomControllerPosition;
this.previousControllerRotation = controllerRotation;
}; };
this.distanceHolding = function (deltaTime, timestamp) { this.distanceHolding = function (deltaTime, timestamp) {
@ -1658,13 +1655,17 @@ function MyController(hand) {
this.heartBeat(this.grabbedEntity); this.heartBeat(this.grabbedEntity);
// controller pose is in avatar frame // controller pose is in avatar frame
var avatarControllerPose = Controller.getPoseValue((this.hand === RIGHT_HAND) ? var device = (this.hand === RIGHT_HAND) ? Controller.Standard.RightHand : Controller.Standard.LeftHand;
Controller.Standard.RightHand : Controller.Standard.LeftHand); var avatarControllerPose = Controller.getPoseValue(device);
// transform it into world frame // transform it into world frame
var controllerPositionVSAvatar = Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation); var worldControllerPosition = Vec3.sum(MyAvatar.position,
var controllerPosition = Vec3.sum(MyAvatar.position, controllerPositionVSAvatar); Vec3.multiplyQbyV(MyAvatar.orientation, avatarControllerPose.translation));
var controllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation); var worldControllerRotation = Quat.multiply(MyAvatar.orientation, avatarControllerPose.rotation);
// also transform the position into room space
var worldToSensorMat = Mat4.inverse(MyAvatar.getSensorToWorldMatrix());
var roomControllerPosition = Mat4.transformPoint(worldToSensorMat, worldControllerPosition);
var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES); var grabbedProperties = Entities.getEntityProperties(this.grabbedEntity, GRABBABLE_PROPERTIES);
@ -1673,26 +1674,16 @@ function MyController(hand) {
this.currentObjectTime = now; this.currentObjectTime = now;
// the action was set up when this.distanceHolding was called. update the targets. // the action was set up when this.distanceHolding was called. update the targets.
var radius = Vec3.distance(this.currentObjectPosition, controllerPosition) * var radius = Vec3.distance(this.currentObjectPosition, worldControllerPosition) *
this.radiusScalar * DISTANCE_HOLDING_RADIUS_FACTOR; this.radiusScalar * DISTANCE_HOLDING_RADIUS_FACTOR;
if (radius < 1.0) { if (radius < 1.0) {
radius = 1.0; radius = 1.0;
} }
// scale delta controller hand movement by radius. var roomHandDelta = Vec3.subtract(roomControllerPosition, this.previousRoomControllerPosition);
var handMoved = Vec3.multiply(Vec3.subtract(controllerPositionVSAvatar, this.previousControllerPositionVSAvatar), var worldHandDelta = Mat4.transformVector(MyAvatar.getSensorToWorldMatrix(), roomHandDelta);
radius); var handMoved = Vec3.multiply(worldHandDelta, radius);
/// double delta controller rotation
// var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
// var handChange = Quat.multiply(Quat.slerp(this.previousControllerRotation,
// controllerRotation,
// DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR),
// Quat.inverse(this.previousControllerRotation));
// update the currentObject position and rotation.
this.currentObjectPosition = Vec3.sum(this.currentObjectPosition, handMoved); this.currentObjectPosition = Vec3.sum(this.currentObjectPosition, handMoved);
// this.currentObjectRotation = Quat.multiply(handChange, this.currentObjectRotation);
this.callEntityMethodOnGrabbed("continueDistantGrab"); this.callEntityMethodOnGrabbed("continueDistantGrab");
@ -1703,10 +1694,9 @@ function MyController(hand) {
var handControllerData = getEntityCustomData('handControllerKey', this.grabbedEntity, defaultMoveWithHeadData); var handControllerData = getEntityCustomData('handControllerKey', this.grabbedEntity, defaultMoveWithHeadData);
// Update radialVelocity // Update radialVelocity
var lastVelocity = Vec3.subtract(controllerPositionVSAvatar, this.previousControllerPositionVSAvatar); var lastVelocity = Vec3.multiply(worldHandDelta, 1.0 / deltaObjectTime);
lastVelocity = Vec3.multiply(lastVelocity, 1.0 / deltaObjectTime); var delta = Vec3.normalize(Vec3.subtract(grabbedProperties.position, worldControllerPosition));
var newRadialVelocity = Vec3.dot(lastVelocity, var newRadialVelocity = Vec3.dot(lastVelocity, delta);
Vec3.normalize(Vec3.subtract(grabbedProperties.position, controllerPosition)));
var VELOCITY_AVERAGING_TIME = 0.016; var VELOCITY_AVERAGING_TIME = 0.016;
this.grabRadialVelocity = (deltaObjectTime / VELOCITY_AVERAGING_TIME) * newRadialVelocity + this.grabRadialVelocity = (deltaObjectTime / VELOCITY_AVERAGING_TIME) * newRadialVelocity +
@ -1718,9 +1708,8 @@ function MyController(hand) {
this.grabRadius * RADIAL_GRAB_AMPLIFIER); this.grabRadius * RADIAL_GRAB_AMPLIFIER);
} }
var newTargetPosition = Vec3.multiply(this.grabRadius, Quat.getUp(controllerRotation)); var newTargetPosition = Vec3.multiply(this.grabRadius, Quat.getUp(worldControllerRotation));
newTargetPosition = Vec3.sum(newTargetPosition, controllerPosition); newTargetPosition = Vec3.sum(newTargetPosition, worldControllerPosition);
var objectToAvatar = Vec3.subtract(this.currentObjectPosition, MyAvatar.position); var objectToAvatar = Vec3.subtract(this.currentObjectPosition, MyAvatar.position);
if (handControllerData.disableMoveWithHead !== true) { if (handControllerData.disableMoveWithHead !== true) {
@ -1776,8 +1765,7 @@ function MyController(hand) {
print("continueDistanceHolding -- updateAction failed"); print("continueDistanceHolding -- updateAction failed");
} }
this.previousControllerPositionVSAvatar = controllerPositionVSAvatar; this.previousRoomControllerPosition = roomControllerPosition;
this.previousControllerRotation = controllerRotation;
}; };
this.setupHoldAction = function () { this.setupHoldAction = function () {
@ -1961,12 +1949,12 @@ function MyController(hand) {
this.currentObjectRotation = grabbedProperties.rotation; this.currentObjectRotation = grabbedProperties.rotation;
this.currentVelocity = ZERO_VEC; this.currentVelocity = ZERO_VEC;
this.currentAngularVelocity = ZERO_VEC; this.currentAngularVelocity = ZERO_VEC;
this.prevDropDetected = false;
}; };
this.nearGrabbing = function (deltaTime, timestamp) { this.nearGrabbing = function (deltaTime, timestamp) {
var dropDetected = this.dropGestureProcess(deltaTime);
if (this.state == STATE_NEAR_GRABBING && this.triggerSmoothedReleased()) { if (this.state == STATE_NEAR_GRABBING && this.triggerSmoothedReleased()) {
this.callEntityMethodOnGrabbed("releaseGrab"); this.callEntityMethodOnGrabbed("releaseGrab");
this.setState(STATE_OFF, "trigger released"); this.setState(STATE_OFF, "trigger released");
@ -1975,6 +1963,16 @@ function MyController(hand) {
if (this.state == STATE_HOLD) { if (this.state == STATE_HOLD) {
var dropDetected = this.dropGestureProcess(deltaTime);
if (this.triggerSmoothedReleased()) {
this.waitForTriggerRelease = false;
}
if (dropDetected && this.prevDropDetected != dropDetected) {
this.waitForTriggerRelease = true;
}
// highlight the grabbed hotspot when the dropGesture is detected. // highlight the grabbed hotspot when the dropGesture is detected.
if (dropDetected) { if (dropDetected) {
entityPropertiesCache.addEntity(this.grabbedHotspot.entityID); entityPropertiesCache.addEntity(this.grabbedHotspot.entityID);
@ -1982,17 +1980,24 @@ function MyController(hand) {
equipHotspotBuddy.highlightHotspot(this.grabbedHotspot); equipHotspotBuddy.highlightHotspot(this.grabbedHotspot);
} }
if (dropDetected && this.triggerSmoothedGrab()) { if (dropDetected && !this.waitForTriggerRelease && this.triggerSmoothedGrab()) {
this.callEntityMethodOnGrabbed("releaseEquip"); this.callEntityMethodOnGrabbed("releaseEquip");
this.setState(STATE_OFF, "drop gesture detected");
return;
}
if (this.thumbPressed()) { // store the offset attach points into preferences.
this.callEntityMethodOnGrabbed("releaseEquip"); if (USE_ATTACH_POINT_SETTINGS && this.grabbedHotspot && this.grabbedEntity) {
this.setState(STATE_OFF, "drop via thumb press"); var props = Entities.getEntityProperties(this.grabbedEntity, ["localPosition", "localRotation"]);
if (props && props.localPosition && props.localRotation) {
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand, props.localPosition, props.localRotation);
}
}
var grabbedEntity = this.grabbedEntity;
this.release();
this.grabbedEntity = grabbedEntity;
this.setState(STATE_NEAR_GRABBING, "drop gesture detected");
return; return;
} }
this.prevDropDetected = dropDetected;
} }
this.heartBeat(this.grabbedEntity); this.heartBeat(this.grabbedEntity);
@ -2088,22 +2093,6 @@ function MyController(hand) {
} }
}; };
this.holdExit = function () {
// store the offset attach points into preferences.
if (USE_ATTACH_POINT_SETTINGS && this.grabbedHotspot && this.grabbedEntity) {
entityPropertiesCache.addEntity(this.grabbedEntity);
var props = entityPropertiesCache.getProps(this.grabbedEntity);
var entityXform = new Xform(props.rotation, props.position);
var avatarXform = new Xform(MyAvatar.orientation, MyAvatar.position);
var handRot = (this.hand === RIGHT_HAND) ? MyAvatar.getRightPalmRotation() : MyAvatar.getLeftPalmRotation();
var avatarHandPos = (this.hand === RIGHT_HAND) ? MyAvatar.rightHandPosition : MyAvatar.leftHandPosition;
var palmXform = new Xform(handRot, avatarXform.xformPoint(avatarHandPos));
var offsetXform = Xform.mul(palmXform.inv(), entityXform);
storeAttachPointForHotspotInSettings(this.grabbedHotspot, this.hand, offsetXform.pos, offsetXform.rot);
}
};
this.nearTriggerEnter = function () { this.nearTriggerEnter = function () {
this.clearEquipHaptics(); this.clearEquipHaptics();

View file

@ -1,131 +0,0 @@
//
// handControllerMouse.js
// examples/controllers
//
// Created by Brad Hefta-Gaub on 2015/12/15
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var DEBUGGING = false;
var angularVelocityTrailingAverage = 0.0; // Global trailing average used to decide whether to move reticle at all
var lastX = 0;
var lastY = 0;
Math.clamp=function(a,b,c) {
return Math.max(b,Math.min(c,a));
}
function length(posA, posB) {
var dx = posA.x - posB.x;
var dy = posA.y - posB.y;
var length = Math.sqrt((dx*dx) + (dy*dy))
return length;
}
function moveReticleAbsolute(x, y) {
var globalPos = Reticle.getPosition();
globalPos.x = x;
globalPos.y = y;
Reticle.setPosition(globalPos);
}
var MAPPING_NAME = "com.highfidelity.testing.reticleWithHandRotation";
var mapping = Controller.newMapping(MAPPING_NAME);
if (Controller.Hardware.Hydra !== undefined) {
mapping.from(Controller.Hardware.Hydra.L3).peek().to(Controller.Actions.ReticleClick);
mapping.from(Controller.Hardware.Hydra.R4).peek().to(Controller.Actions.ReticleClick);
}
if (Controller.Hardware.Vive !== undefined) {
mapping.from(Controller.Hardware.Vive.LeftPrimaryThumb).peek().to(Controller.Actions.ReticleClick);
mapping.from(Controller.Hardware.Vive.RightPrimaryThumb).peek().to(Controller.Actions.ReticleClick);
}
mapping.enable();
function debugPrint(message) {
if (DEBUGGING) {
print(message);
}
}
var leftRightBias = 0.0;
var filteredRotatedLeft = Vec3.UNIT_NEG_Y;
var filteredRotatedRight = Vec3.UNIT_NEG_Y;
var lastAlpha = 0;
Script.update.connect(function(deltaTime) {
// avatar frame
var poseRight = Controller.getPoseValue(Controller.Standard.RightHand);
var poseLeft = Controller.getPoseValue(Controller.Standard.LeftHand);
// NOTE: hack for now
var screenSize = Reticle.maximumPosition;
var screenSizeX = screenSize.x;
var screenSizeY = screenSize.y;
// transform hand facing vectors from avatar frame into sensor frame.
var worldToSensorMatrix = Mat4.inverse(MyAvatar.sensorToWorldMatrix);
var rotatedRight = Mat4.transformVector(worldToSensorMatrix, Vec3.multiplyQbyV(MyAvatar.orientation, Vec3.multiplyQbyV(poseRight.rotation, Vec3.UNIT_NEG_Y)));
var rotatedLeft = Mat4.transformVector(worldToSensorMatrix, Vec3.multiplyQbyV(MyAvatar.orientation, Vec3.multiplyQbyV(poseLeft.rotation, Vec3.UNIT_NEG_Y)));
lastRotatedRight = rotatedRight;
// Decide which hand should be controlling the pointer
// by comparing which one is moving more, and by
// tending to stay with the one moving more.
if (deltaTime > 0.001) {
// leftRightBias is a running average of the difference in angular hand speed.
// a positive leftRightBias indicates the right hand is spinning faster then the left hand.
// a negative leftRightBias indicates the left hand is spnning faster.
var BIAS_ADJUST_PERIOD = 1.0;
var tau = Math.clamp(deltaTime / BIAS_ADJUST_PERIOD, 0, 1);
newLeftRightBias = Vec3.length(poseRight.angularVelocity) - Vec3.length(poseLeft.angularVelocity);
leftRightBias = (1 - tau) * leftRightBias + tau * newLeftRightBias;
}
// add a bit of hysteresis to prevent control flopping back and forth
// between hands when they are both mostly stationary.
var alpha;
var HYSTERESIS_OFFSET = 0.25;
if (lastAlpha > 0.5) {
// prefer right hand over left
alpha = leftRightBias > -HYSTERESIS_OFFSET ? 1 : 0;
} else {
alpha = leftRightBias > HYSTERESIS_OFFSET ? 1 : 0;
}
lastAlpha = alpha;
// Velocity filter the hand rotation used to position reticle so that it is easier to target small things with the hand controllers
var VELOCITY_FILTER_GAIN = 0.5;
filteredRotatedLeft = Vec3.mix(filteredRotatedLeft, rotatedLeft, Math.clamp(Vec3.length(poseLeft.angularVelocity) * VELOCITY_FILTER_GAIN, 0.0, 1.0));
filteredRotatedRight = Vec3.mix(filteredRotatedRight, rotatedRight, Math.clamp(Vec3.length(poseRight.angularVelocity) * VELOCITY_FILTER_GAIN, 0.0, 1.0));
var rotated = Vec3.mix(filteredRotatedLeft, filteredRotatedRight, alpha);
var absolutePitch = rotated.y; // from 1 down to -1 up ... but note: if you rotate down "too far" it starts to go up again...
var absoluteYaw = -rotated.x; // from -1 left to 1 right
var x = Math.clamp(screenSizeX * (absoluteYaw + 0.5), 0, screenSizeX);
var y = Math.clamp(screenSizeX * absolutePitch, 0, screenSizeY);
// don't move the reticle with the hand controllers unless the controllers are actually being moved
// take a time average of angular velocity, and don't move mouse at all if it's below threshold
var AVERAGING_INTERVAL = 0.95;
var MINIMUM_CONTROLLER_ANGULAR_VELOCITY = 0.03;
var angularVelocityMagnitude = Vec3.length(poseLeft.angularVelocity) * (1.0 - alpha) + Vec3.length(poseRight.angularVelocity) * alpha;
angularVelocityTrailingAverage = angularVelocityTrailingAverage * AVERAGING_INTERVAL + angularVelocityMagnitude * (1.0 - AVERAGING_INTERVAL);
if ((angularVelocityTrailingAverage > MINIMUM_CONTROLLER_ANGULAR_VELOCITY) && ((x != lastX) || (y != lastY))) {
moveReticleAbsolute(x, y);
lastX = x;
lastY = y;
}
});
Script.scriptEnding.connect(function(){
mapping.disable();
});