Merge remote-tracking branch 'upstream/master' into feature/quest

This commit is contained in:
Brad Davis 2019-01-30 10:19:46 -08:00
commit 10a7403d44
11 changed files with 318 additions and 47 deletions

View file

@ -1215,7 +1215,7 @@ Rectangle {
if (userIndex !== -1) {
['userName', 'admin', 'connection', 'profileUrl', 'placeName'].forEach(function (name) {
var value = message.params[name];
if (value === undefined) {
if (value === undefined || value == "") {
return;
}
nearbyUserModel.setProperty(userIndex, name, value);

View file

@ -21,11 +21,11 @@ import "../../../../controls" as HifiControls
import "../" as HifiCommerceCommon
import "qrc:////qml//hifi//models" as HifiModels // Absolute path so the same code works everywhere.
Item {
Rectangle {
HifiConstants { id: hifi; }
id: root;
color: hifi.colors.baseGray
property int parentAppTitleBarHeight;
property int parentAppNavBarHeight;
property string currentActiveView: "sendAssetHome";

View file

@ -53,7 +53,7 @@ bool ShapeEntityRenderer::needsRenderUpdate() const {
}
auto mat = _materials.find("0");
if (mat != _materials.end() && mat->second.needsUpdate()) {
if (mat != _materials.end() && (mat->second.needsUpdate() || mat->second.areTexturesLoading())) {
return true;
}
@ -188,7 +188,7 @@ bool ShapeEntityRenderer::useMaterialPipeline(const graphics::MultiMaterial& mat
ShapeKey ShapeEntityRenderer::getShapeKey() {
auto mat = _materials.find("0");
if (mat != _materials.end() && mat->second.needsUpdate()) {
if (mat != _materials.end() && (mat->second.needsUpdate() || mat->second.areTexturesLoading())) {
RenderPipelines::updateMultiMaterial(mat->second);
}
@ -256,7 +256,7 @@ void ShapeEntityRenderer::doRender(RenderArgs* args) {
batch.setModelTransform(_renderTransform); // use a transform with scale, rotation, registration point and translation
materials = _materials["0"];
auto& schema = materials.getSchemaBuffer().get<graphics::MultiMaterial::Schema>();
outColor = glm::vec4(schema._albedo, schema._opacity);
outColor = glm::vec4(ColorUtils::tosRGBVec3(schema._albedo), schema._opacity);
outColor = EntityRenderer::calculatePulseColor(outColor, _pulseProperties, _created);
if (_procedural.isReady()) {
outColor = _procedural.getColor(outColor);
@ -309,7 +309,7 @@ scriptable::ScriptableModelBase ShapeEntityRenderer::getScriptableModel() {
result.appendMaterials(_materials);
auto materials = _materials.find("0");
if (materials != _materials.end()) {
vertexColor = materials->second.getSchemaBuffer().get<graphics::MultiMaterial::Schema>()._albedo;
vertexColor = ColorUtils::tosRGBVec3(materials->second.getSchemaBuffer().get<graphics::MultiMaterial::Schema>()._albedo);
}
}
if (auto mesh = geometryCache->meshFromShape(geometryShape, vertexColor)) {

View file

@ -409,11 +409,11 @@ void Geometry::setTextures(const QVariantMap& textureMap) {
material->setTextures(textureMap);
_areTexturesLoaded = false;
// If we only use cached textures, they should all be loaded
areTexturesLoaded();
}
}
// If we only use cached textures, they should all be loaded
areTexturesLoaded();
} else {
qCWarning(modelnetworking) << "Ignoring setTextures(); geometry not ready";
}
@ -422,10 +422,7 @@ void Geometry::setTextures(const QVariantMap& textureMap) {
bool Geometry::areTexturesLoaded() const {
if (!_areTexturesLoaded) {
for (auto& material : _materials) {
// Check if material textures are loaded
bool materialMissingTexture = material->isMissingTexture();
if (materialMissingTexture) {
if (material->isMissingTexture()) {
return false;
}

View file

@ -83,7 +83,7 @@ void MeshPartPayload::updateKey(const render::ItemKey& key) {
ItemKey::Builder builder(key);
builder.withTypeShape();
if (_drawMaterials.needsUpdate()) {
if (_drawMaterials.needsUpdate() || _drawMaterials.areTexturesLoading()) {
RenderPipelines::updateMultiMaterial(_drawMaterials);
}
@ -329,7 +329,7 @@ void ModelMeshPartPayload::updateKey(const render::ItemKey& key) {
builder.withDeformed();
}
if (_drawMaterials.needsUpdate()) {
if (_drawMaterials.needsUpdate() || _drawMaterials.areTexturesLoading()) {
RenderPipelines::updateMultiMaterial(_drawMaterials);
}
@ -347,7 +347,7 @@ void ModelMeshPartPayload::setShapeKey(bool invalidateShapeKey, PrimitiveMode pr
return;
}
if (_drawMaterials.needsUpdate()) {
if (_drawMaterials.needsUpdate() || _drawMaterials.areTexturesLoading()) {
RenderPipelines::updateMultiMaterial(_drawMaterials);
}

View file

@ -319,7 +319,7 @@ void batchSetter(const ShapePipeline& pipeline, gpu::Batch& batch, RenderArgs* a
graphics::MultiMaterial::Schema schema;
graphics::MaterialKey schemaKey;
schema._albedo = vec3(1.0f);
schema._albedo = ColorUtils::sRGBToLinearVec3(vec3(1.0f));
schema._opacity = 1.0f;
schema._metallic = 0.1f;
schema._roughness = 0.9f;
@ -482,6 +482,7 @@ void RenderPipelines::updateMultiMaterial(graphics::MultiMaterial& multiMaterial
auto itr = textureMaps.find(graphics::MaterialKey::ALBEDO_MAP);
if (itr != textureMaps.end()) {
if (itr->second->isDefined()) {
material->resetOpacityMap();
drawMaterialTextures->setTexture(gr::Texture::MaterialAlbedo, itr->second->getTextureView());
wasSet = true;
} else {
@ -492,8 +493,8 @@ void RenderPipelines::updateMultiMaterial(graphics::MultiMaterial& multiMaterial
forceDefault = true;
}
schemaKey.setAlbedoMap(true);
schemaKey.setOpacityMaskMap(materialKey.isOpacityMaskMap());
schemaKey.setTranslucentMap(materialKey.isTranslucentMap());
schemaKey.setOpacityMaskMap(material->getKey().isOpacityMaskMap());
schemaKey.setTranslucentMap(material->getKey().isTranslucentMap());
}
break;
case graphics::MaterialKey::METALLIC_MAP_BIT:

View file

@ -1608,30 +1608,42 @@ function sortSelectedEntities(selected) {
return sortedEntities;
}
function recursiveDelete(entities, childrenList, deletedIDs) {
function recursiveDelete(entities, childrenList, deletedIDs, entityHostType) {
var wantDebug = false;
var entitiesLength = entities.length;
for (var i = 0; i < entitiesLength; i++) {
var initialPropertySets = Entities.getMultipleEntityProperties(entities);
var entityHostTypes = Entities.getMultipleEntityProperties(entities, 'entityHostType');
for (var i = 0; i < entitiesLength; ++i) {
var entityID = entities[i];
if (entityHostTypes[i].entityHostType !== entityHostType) {
if (wantDebug) {
console.log("Skipping deletion of entity " + entityID + " with conflicting entityHostType: " +
entityHostTypes[i].entityHostType);
}
continue;
}
var children = Entities.getChildrenIDs(entityID);
var grandchildrenList = [];
recursiveDelete(children, grandchildrenList, deletedIDs);
var initialProperties = Entities.getEntityProperties(entityID);
recursiveDelete(children, grandchildrenList, deletedIDs, entityHostType);
childrenList.push({
entityID: entityID,
properties: initialProperties,
properties: initialPropertySets[i],
children: grandchildrenList
});
deletedIDs.push(entityID);
Entities.deleteEntity(entityID);
}
}
function unparentSelectedEntities() {
if (SelectionManager.hasSelection()) {
var selectedEntities = selectionManager.selections;
var parentCheck = false;
if (selectedEntities.length < 1) {
Window.notifyEditError("You must have an entity selected inorder to unparent it.");
Window.notifyEditError("You must have an entity selected in order to unparent it.");
return;
}
selectedEntities.forEach(function (id, index) {
@ -1694,21 +1706,24 @@ function deleteSelectedEntities() {
SelectionManager.saveProperties();
var savedProperties = [];
var newSortedSelection = sortSelectedEntities(selectionManager.selections);
for (var i = 0; i < newSortedSelection.length; i++) {
var entityHostTypes = Entities.getMultipleEntityProperties(newSortedSelection, 'entityHostType');
for (var i = 0; i < newSortedSelection.length; ++i) {
var entityID = newSortedSelection[i];
var initialProperties = SelectionManager.savedProperties[entityID];
if (!initialProperties.locked) {
var children = Entities.getChildrenIDs(entityID);
var childList = [];
recursiveDelete(children, childList, deletedIDs);
savedProperties.push({
entityID: entityID,
properties: initialProperties,
children: childList
});
deletedIDs.push(entityID);
Entities.deleteEntity(entityID);
if (initialProperties.locked ||
(initialProperties.avatarEntity && initialProperties.owningAvatarID !== MyAvatar.sessionUUID)) {
continue;
}
var children = Entities.getChildrenIDs(entityID);
var childList = [];
recursiveDelete(children, childList, deletedIDs, entityHostTypes[i].entityHostType);
savedProperties.push({
entityID: entityID,
properties: initialProperties,
children: childList
});
deletedIDs.push(entityID);
Entities.deleteEntity(entityID);
}
if (savedProperties.length > 0) {

View file

@ -193,14 +193,25 @@ SelectionManager = (function() {
that._update(true, caller);
};
that.addChildrenEntities = function(parentEntityID, entityList) {
that.addChildrenEntities = function(parentEntityID, entityList, entityHostType) {
var wantDebug = false;
var children = Entities.getChildrenIDs(parentEntityID);
var entityHostTypes = Entities.getMultipleEntityProperties(children, 'entityHostType');
for (var i = 0; i < children.length; i++) {
var childID = children[i];
if (entityHostTypes[i].entityHostType !== entityHostType) {
if (wantDebug) {
console.log("Skipping addition of entity " + childID + " with conflicting entityHostType: " +
entityHostTypes[i].entityHostType);
}
continue;
}
if (entityList.indexOf(childID) < 0) {
entityList.push(childID);
}
that.addChildrenEntities(childID, entityList);
that.addChildrenEntities(childID, entityList, entityHostType);
}
};
@ -250,12 +261,15 @@ SelectionManager = (function() {
SelectionManager.saveProperties();
// build list of entities to duplicate by including any unselected children of selected parent entities
Object.keys(that.savedProperties).forEach(function(originalEntityID) {
if (entitiesToDuplicate.indexOf(originalEntityID) < 0) {
var originalEntityIDs = Object.keys(that.savedProperties);
var entityHostTypes = Entities.getMultipleEntityProperties(originalEntityIDs, 'entityHostType');
for (var i = 0; i < originalEntityIDs.length; i++) {
var originalEntityID = originalEntityIDs[i];
if (entitiesToDuplicate.indexOf(originalEntityID) === -1) {
entitiesToDuplicate.push(originalEntityID);
}
that.addChildrenEntities(originalEntityID, entitiesToDuplicate);
});
that.addChildrenEntities(originalEntityID, entitiesToDuplicate, entityHostTypes[i].entityHostType);
}
// duplicate entities from above and store their original to new entity mappings and children needing re-parenting
for (var i = 0; i < entitiesToDuplicate.length; i++) {
@ -319,7 +333,7 @@ SelectionManager = (function() {
};
// Create the entities in entityProperties, maintaining parent-child relationships.
// @param entityPropertites {array} - Array of entity property objects
// @param entityProperties {array} - Array of entity property objects
that.createEntities = function(entityProperties) {
var entitiesToCreate = [];
var createdEntityIDs = [];
@ -362,15 +376,27 @@ SelectionManager = (function() {
that.copySelectedEntities = function() {
var entityProperties = Entities.getMultipleEntityProperties(that.selections);
var entityHostTypes = Entities.getMultipleEntityProperties(that.selections, 'entityHostType');
var entities = {};
entityProperties.forEach(function(props) {
entities[props.id] = props;
});
function appendChildren(entityID, entities) {
function appendChildren(entityID, entities, entityHostType) {
var wantDebug = false;
var childrenIDs = Entities.getChildrenIDs(entityID);
var entityHostTypes = Entities.getMultipleEntityProperties(childrenIDs, 'entityHostType');
for (var i = 0; i < childrenIDs.length; ++i) {
var id = childrenIDs[i];
if (entityHostTypes[i].entityHostType !== entityHostType) {
if (wantDebug) {
console.warn("Skipping deletion of entity " + id + " with conflicting entityHostType: " +
entityHostTypes[i].entityHostType);
}
continue;
}
if (!(id in entities)) {
entities[id] = Entities.getEntityProperties(id);
appendChildren(id, entities);
@ -380,7 +406,7 @@ SelectionManager = (function() {
var len = entityProperties.length;
for (var i = 0; i < len; ++i) {
appendChildren(entityProperties[i].id, entities);
appendChildren(entityProperties[i].id, entities, entityHostTypes[i].entityHostType);
}
for (var id in entities) {

13
tools/scripts/Readme.md Normal file
View file

@ -0,0 +1,13 @@
## Setup
Run the following command to install all the dependencies:
```pip install -r requirements.txt```
## Usage
```
./rc-branches.py check v0.76.1
./rc-branches.py create v0.77.0
```

218
tools/scripts/rc-branches.py Executable file
View file

@ -0,0 +1,218 @@
#!/usr/bin/env python
import logging
import os
import re
import sys
import argparse
from git import Repo
FORMAT = '[%(levelname)s] %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
remote_name = "upstream"
remote_master_branch = "{}/master".format(remote_name)
class VersionParser:
"""A parser for version numbers"""
def __init__(self, versionString):
# Validate that user passed a valid version
VERSION_RE = re.compile(r"^v?(\d+)\.(\d+)\.(\d+)$")
match = VERSION_RE.match(versionString)
if not match:
raise ValueError("Invalid version (should be X.Y.Z)")
# Parse the version component and build proper version strings
self.major = int(match.group(1))
self.minor = int(match.group(2))
self.patch = int(match.group(3))
self.version = "v{}.{}.{}".format(self.major, self.minor, self.patch) # clean up version
self.is_major_release = False
self.is_minor_release = False
self.is_patch_release = False
if self.patch != 0:
self.is_patch_release = True
self.previous_version = "v{}.{}.{}".format(self.major, self.minor, self.patch - 1)
elif self.minor != 0:
self.is_minor_release = True
self.previous_version = "v{}.{}.{}".format(self.major, self.minor - 1, 0)
else:
self.is_major_release = True
self.previous_version = "v{}.{}.{}".format(self.major - 1, 0, 0)
raise ValueError("Major releases not yet supported")
# Build the branch names
self.previous_rc_branch = "{}-rc".format(self.previous_version)
self.base_branch = "{}-rc-base".format(self.version)
self.rc_branch = "{}-rc".format(self.version)
self.remote_previous_rc_branch = "{}/{}".format(remote_name, self.previous_rc_branch)
self.remote_base_branch = "{}/{}".format(remote_name, self.base_branch)
self.remote_rc_branch = "{}/{}".format(remote_name, self.rc_branch)
def checkVersionBranches(version):
"""Check that the branches for a given version were created properly."""
parser = VersionParser(version)
repo = Repo(os.getcwd(), search_parent_directories=True)
assert not repo.bare
# Verify the branches' existance
if parser.remote_previous_rc_branch not in repo.refs:
raise ValueError("Previous RC branch not found: {}".format(parser.remote_previous_rc_branch))
if parser.remote_base_branch not in repo.refs:
raise ValueError("Base branch not found: {}".format(parser.remote_base_branch))
if parser.remote_rc_branch not in repo.refs:
raise ValueError("RC branch not found: {}".format(parser.remote_rc_branch))
previous_rc = repo.refs[parser.remote_previous_rc_branch]
current_rc_base = repo.refs[parser.remote_base_branch]
current_rc = repo.refs[parser.remote_rc_branch]
master = repo.refs[remote_master_branch]
# Check the base branch is an ancestor of the rc branch
if not repo.is_ancestor(current_rc_base, current_rc):
raise ValueError("{} is not an ancesctor of {}".format(current_rc_base, current_rc))
# Check that the base branch is the merge base of the previous and current RCs
merge_base = repo.merge_base(previous_rc, current_rc)
if current_rc_base.commit not in merge_base:
raise ValueError("Base branch is not the merge base between {} and {}".format(previous_rc, current_rc))
# For patch releases, warn if the base commit is not the previous RC commit
if parser.is_patch_release:
if parser.previous_version not in repo.tags:
logging.warning("The tag {0} does not exist, which suggests {0} has not been released.".format(parser.previous_version))
if current_rc_base.commit != previous_rc.commit:
logging.warning("Previous version has commits not in this patch");
logging.warning("Type \"git diff {}..{}\" to see the commit list".format(current_rc_base, previous_rc));
# Check base branch is part of the previous RC
previous_rc_base_commit = repo.merge_base(previous_rc, master)
if repo.is_ancestor(current_rc_base, previous_rc_base_commit):
raise ValueError("{} is older than {}".format(current_rc_base, previous_rc))
print("[SUCCESS] Checked {}".format(parser.version))
def createVersionBranches(version):
"""Create the branches for a given version."""
parser = VersionParser(version)
repo = Repo(os.getcwd(), search_parent_directories=True)
assert not repo.bare
# Validate the user is on a local branch that has the right merge base
if repo.head.is_detached:
raise ValueError("You must not run this script in a detached HEAD state")
# Validate the user has no pending changes
if repo.is_dirty():
raise ValueError("Your working tree has pending changes. You must have a clean working tree before proceeding.")
# Make sure the remote is up to date
remote = repo.remotes[remote_name]
remote.fetch(prune=True)
# Verify the previous RC branch exists
if parser.remote_previous_rc_branch not in repo.refs:
raise ValueError("Previous RC branch not found: {}".format(parser.remote_previous_rc_branch))
# Verify the branches don't already exist
if parser.remote_base_branch in repo.refs:
raise ValueError("Base branch already exists: {}".format(parser.remote_base_branch))
if parser.remote_rc_branch in repo.refs:
raise ValueError("RC branch already exists: {}".format(parser.remote_rc_branch))
if parser.base_branch in repo.refs:
raise ValueError("Base branch already exists locally: {}".format(parser.base_branch))
if parser.rc_branch in repo.refs:
raise ValueError("RC branch already exists locally: {}".format(parser.rc_branch))
# Save current branch name
current_branch_name = repo.active_branch
# Create the RC branches
if parser.is_patch_release:
# Check tag exists, if it doesn't, print warning and ask for comfirmation
if parser.previous_version not in repo.tags:
logging.warning("The tag {0} does not exist, which suggests {0} has not yet been released.".format(parser.previous_version))
logging.warning("Creating the branches now means that {0} will diverge from {1} if anything is merged into {1}.".format(parser.version, parser.previous_version))
logging.warning("This is not recommended unless necessary.")
validAnswer = False
askCount = 0
while not validAnswer and askCount < 3:
answer = input("Are you sure you want to do this? [y/n] ").strip().lower()
askCount += 1
validAnswer = answer == "y" or answer == "n"
if not validAnswer:
raise ValueError("Did not understand response")
if answer == "n":
print("Aborting")
return
else:
print("Creating branches")
previous_rc = repo.refs[parser.remote_previous_rc_branch]
repo.create_head(parser.base_branch, previous_rc)
remote.push("{0}:{0}".format(parser.base_branch))
repo.create_head(parser.rc_branch, previous_rc)
remote.push("{0}:{0}".format(parser.rc_branch))
else:
previous_rc = repo.refs[parser.remote_previous_rc_branch]
master = repo.refs[remote_master_branch]
merge_base = repo.merge_base(previous_rc, master)
repo.create_head(parser.base_branch, merge_base[0])
remote.push("{0}:{0}".format(parser.base_branch))
repo.create_head(parser.rc_branch, master)
remote.push("{0}:{0}".format(parser.rc_branch))
print("[SUCCESS] Created {} and {}".format(parser.base_branch, parser.rc_branch))
print("[SUCCESS] You can make the PR from the following webpage:")
print("[SUCCESS] https://github.com/highfidelity/hifi/compare/{}...{}".format(parser.base_branch, parser.rc_branch))
if parser.is_patch_release:
print("[SUCCESS] NOTE: You will have to wait for the first fix to be merged into the RC branch to be able to create the PR")
def main():
"""Execute Main entry point."""
global remote_name
parser = argparse.ArgumentParser(description='RC branches tool',
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog='''Example commands you can run:\n%(prog)s check 0.75.0\n%(prog)s create 0.75.1''')
parser.add_argument("command", help="command to execute", choices=["check", "create"])
parser.add_argument("version", help="version of the form \"X.Y.Z\"")
parser.add_argument("--remote", dest="remote_name", default=remote_name,
help="git remote to use as reference")
args = parser.parse_args()
remote_name = args.remote_name
try:
if args.command == "check":
checkVersionBranches(args.version)
elif args.command == "create":
createVersionBranches(args.version)
else:
parser.print_help()
except ValueError as ex:
logging.error(ex)
sys.exit(1)
if __name__ == "__main__":
main()

View file

@ -0,0 +1 @@
GitPython