Added lookAt rendering bubble

This commit is contained in:
Philip Rosedale 2014-05-05 16:19:09 -07:00
parent ab0e854336
commit 691b1d6069
9 changed files with 58 additions and 36 deletions

View file

@ -37,6 +37,7 @@ var radiusMinimum = 0.05;
var radiusMaximum = 0.5;
var modelURLs = [
"http://s3.amazonaws.com/converter.tipodean.com/hifi/gun/Raygun2.fbx",
"http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/Feisar_Ship.FBX",
"http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/birarda/birarda_head.fbx",
"http://highfidelity-public.s3-us-west-1.amazonaws.com/meshes/pug.fbx",
@ -51,6 +52,12 @@ var currentModelURL = 1;
var numModels = modelURLs.length;
function getNewVoxelPosition() {
var camera = Camera.getPosition();
var forwardVector = Quat.getFront(MyAvatar.orientation);
var newPosition = Vec3.sum(camera, Vec3.multiply(forwardVector, 2.0));
return newPosition;
}
function keyPressEvent(event) {
//print("event.text=" + event.text);
@ -63,6 +70,18 @@ function keyPressEvent(event) {
rightRecentlyDeleted = false;
rightModelAlreadyInHand = false;
}
} else if (event.text == "m") {
var URL = Window.prompt("Model URL", "Enter URL, e.g. http://foo.com/model.fbx");
Window.alert("Your response was: " + prompt);
var modelPosition = getNewVoxelPosition();
var properties = { position: { x: modelPosition.x,
y: modelPosition.y,
z: modelPosition.z },
radius: modelRadius,
modelURL: URL
};
newModel = Models.addModel(properties);
} else if (event.text == "DELETE") {
if (leftModelAlreadyInHand) {
print("want to delete leftHandModel=" + leftHandModel);

View file

@ -479,6 +479,7 @@ void Menu::loadSettings(QSettings* settings) {
_audioJitterBufferSamples = loadSetting(settings, "audioJitterBufferSamples", 0);
_fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES);
_realWorldFieldOfView = loadSetting(settings, "realWorldFieldOfView", DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES);
_faceshiftEyeDeflection = loadSetting(settings, "faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION);
_maxVoxels = loadSetting(settings, "maxVoxels", DEFAULT_MAX_VOXELS_PER_SYSTEM);
_maxVoxelPacketsPerSecond = loadSetting(settings, "maxVoxelsPPS", DEFAULT_MAX_VOXEL_PPS);

View file

@ -84,6 +84,9 @@ public:
void setAudioJitterBufferSamples(float audioJitterBufferSamples) { _audioJitterBufferSamples = audioJitterBufferSamples; }
float getFieldOfView() const { return _fieldOfView; }
void setFieldOfView(float fieldOfView) { _fieldOfView = fieldOfView; }
float getRealWorldFieldOfView() const { return _realWorldFieldOfView; }
void setRealWorldFieldOfView(float realWorldFieldOfView) { _realWorldFieldOfView = realWorldFieldOfView; }
float getFaceshiftEyeDeflection() const { return _faceshiftEyeDeflection; }
void setFaceshiftEyeDeflection(float faceshiftEyeDeflection) { _faceshiftEyeDeflection = faceshiftEyeDeflection; }
QString getSnapshotsLocation() const;
@ -228,6 +231,7 @@ private:
int _audioJitterBufferSamples; /// number of extra samples to wait before starting audio playback
BandwidthDialog* _bandwidthDialog;
float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus
float _realWorldFieldOfView; // The actual FOV set by the user's monitor size and view distance
float _faceshiftEyeDeflection;
FrustumDrawMode _frustumDrawMode;
ViewFrustumOffset _viewFrustumOffset;

View file

@ -232,6 +232,16 @@ void Avatar::render(const glm::vec3& cameraPosition, RenderMode renderMode) {
_skeletonModel.renderBoundingCollisionShapes(0.7f);
}
}
// If this is the avatar being looked at, render a little ball above their head
if (_isLookAtTarget) {
const float LOOK_AT_INDICATOR_RADIUS = 0.25f;
const float LOOK_AT_INDICATOR_HEIGHT = 0.65f;
glPushMatrix();
glColor4f(0.0f, 1.0f, 1.0f, 0.5f);
glTranslatef(_position.x, _position.y + (getSkeletonHeight() * LOOK_AT_INDICATOR_HEIGHT), _position.z);
glutSolidSphere(LOOK_AT_INDICATOR_RADIUS, 15, 15);
glPopMatrix();
}
// quick check before falling into the code below:
// (a 10 degree breadth of an almost 2 meter avatar kicks in at about 12m)

View file

@ -79,7 +79,7 @@ public:
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setMouseRay(const glm::vec3 &origin, const glm::vec3 &direction);
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
//getters
bool isInitialized() const { return _initialized; }
SkeletonModel& getSkeletonModel() { return _skeletonModel; }
@ -195,6 +195,7 @@ private:
bool _initialized;
QScopedPointer<Texture> _billboardTexture;
bool _shouldRenderBillboard;
bool _isLookAtTarget;
void renderBillboard();

View file

@ -317,13 +317,16 @@ void MyAvatar::updateFromGyros(float deltaTime) {
}
// Set the rotation of the avatar's head (as seen by others, not affecting view frustum)
// to be scaled. Pitch is greater to emphasize nodding behavior / synchrony.
const float AVATAR_HEAD_PITCH_MAGNIFY = 1.0f;
const float AVATAR_HEAD_YAW_MAGNIFY = 1.0f;
const float AVATAR_HEAD_ROLL_MAGNIFY = 1.0f;
// to be scaled such that when the user's physical head is pointing at edge of screen, the
// avatar head is at the edge of the in-world view frustum. So while a real person may move
// their head only 30 degrees or so, this may correspond to a 90 degree field of view.
// Note that roll is magnified by a constant because it is not related to field of view.
const float AVATAR_HEAD_ROLL_MAGNIFY = 2.0f;
float magnifyFieldOfView = Menu::getInstance()->getFieldOfView() / Menu::getInstance()->getRealWorldFieldOfView();
Head* head = getHead();
head->setDeltaPitch(estimatedRotation.x * AVATAR_HEAD_PITCH_MAGNIFY);
head->setDeltaYaw(estimatedRotation.y * AVATAR_HEAD_YAW_MAGNIFY);
head->setDeltaPitch(estimatedRotation.x * magnifyFieldOfView);
head->setDeltaYaw(estimatedRotation.y * magnifyFieldOfView);
head->setDeltaRoll(estimatedRotation.z * AVATAR_HEAD_ROLL_MAGNIFY);
// Update torso lean distance based on accelerometer data
@ -407,18 +410,15 @@ void MyAvatar::renderHeadMouse(int screenWidth, int screenHeight) const {
Faceshift* faceshift = Application::getInstance()->getFaceshift();
float pixelsPerDegree = screenHeight / Menu::getInstance()->getFieldOfView();
// Display small target box at center or head mouse target that can also be used to measure LOD
float headPitch = getHead()->getFinalPitch();
float headYaw = getHead()->getFinalYaw();
//
// It should be noted that the following constant is a function
// how far the viewer's head is away from both the screen and the size of the screen,
// which are both things we cannot know without adding a calibration phase.
//
const float PIXELS_PER_VERTICAL_DEGREE = 20.0f;
float aspectRatio = (float) screenWidth / (float) screenHeight;
int headMouseX = screenWidth / 2.f - headYaw * aspectRatio * PIXELS_PER_VERTICAL_DEGREE;
int headMouseY = screenHeight / 2.f - headPitch * PIXELS_PER_VERTICAL_DEGREE;
int headMouseX = screenWidth / 2.f - headYaw * aspectRatio * pixelsPerDegree;
int headMouseY = screenHeight / 2.f - headPitch * pixelsPerDegree;
glColor3f(1.0f, 1.0f, 1.0f);
glDisable(GL_LINE_SMOOTH);
@ -435,8 +435,8 @@ void MyAvatar::renderHeadMouse(int screenWidth, int screenHeight) const {
float avgEyePitch = faceshift->getEstimatedEyePitch();
float avgEyeYaw = faceshift->getEstimatedEyeYaw();
int eyeTargetX = (screenWidth / 2) - avgEyeYaw * aspectRatio * PIXELS_PER_VERTICAL_DEGREE;
int eyeTargetY = (screenHeight / 2) - avgEyePitch * PIXELS_PER_VERTICAL_DEGREE;
int eyeTargetX = (screenWidth / 2) - avgEyeYaw * aspectRatio * pixelsPerDegree;
int eyeTargetY = (screenHeight / 2) - avgEyePitch * pixelsPerDegree;
glColor3f(0.0f, 1.0f, 1.0f);
glDisable(GL_LINE_SMOOTH);
@ -537,23 +537,6 @@ void MyAvatar::sendKillAvatar() {
NodeList::getInstance()->broadcastToNodes(killPacket, NodeSet() << NodeType::AvatarMixer);
}
void MyAvatar::orbit(const glm::vec3& position, int deltaX, int deltaY) {
// first orbit horizontally
glm::quat orientation = getOrientation();
const float ANGULAR_SCALE = 0.5f;
glm::quat rotation = glm::angleAxis(glm::radians(- deltaX * ANGULAR_SCALE), orientation * IDENTITY_UP);
setPosition(position + rotation * (getPosition() - position));
orientation = rotation * orientation;
setOrientation(orientation);
// then vertically
float oldPitch = getHead()->getBasePitch();
getHead()->setBasePitch(oldPitch - deltaY * ANGULAR_SCALE);
rotation = glm::angleAxis(glm::radians((getHead()->getBasePitch() - oldPitch)), orientation * IDENTITY_RIGHT);
setPosition(position + rotation * (getPosition() - position));
}
void MyAvatar::updateLookAtTargetAvatar() {
//
// Look at the avatar whose eyes are closest to the ray in direction of my avatar's head
@ -564,6 +547,7 @@ void MyAvatar::updateLookAtTargetAvatar() {
float smallestAngleTo = MIN_LOOKAT_ANGLE;
foreach (const AvatarSharedPointer& avatarPointer, Application::getInstance()->getAvatarManager().getAvatarHash()) {
Avatar* avatar = static_cast<Avatar*>(avatarPointer.data());
avatar->setIsLookAtTarget(false);
if (!avatar->isMyAvatar()) {
float angleTo = glm::angle(getHead()->getFinalOrientation() * glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(avatar->getHead()->getEyePosition() - getHead()->getEyePosition()));
@ -574,6 +558,9 @@ void MyAvatar::updateLookAtTargetAvatar() {
}
}
}
if (_lookAtTargetAvatar) {
static_cast<Avatar*>(_lookAtTargetAvatar.data())->setIsLookAtTarget(true);
}
}
void MyAvatar::clearLookAtTargetAvatar() {

View file

@ -77,8 +77,6 @@ public:
static void sendKillAvatar();
void orbit(const glm::vec3& position, int deltaX, int deltaY);
Q_INVOKABLE glm::vec3 getTargetAvatarPosition() const { return _targetAvatarPosition; }
AvatarData* getLookAtTargetAvatar() const { return _lookAtTargetAvatar.data(); }
void updateLookAtTargetAvatar();

View file

@ -486,6 +486,7 @@ void GeometryReader::run() {
return;
}
try {
qDebug() << "Reading " << _url;
QMetaObject::invokeMethod(geometry.data(), "setGeometry", Q_ARG(const FBXGeometry&,
_url.path().toLower().endsWith(".svo") ? readSVO(_reply->readAll()) : readFBX(_reply->readAll(), _mapping)));

View file

@ -24,6 +24,7 @@
const float DEFAULT_KEYHOLE_RADIUS = 3.0f;
const float DEFAULT_FIELD_OF_VIEW_DEGREES = 90.0f;
const float DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES = 30.f;
const float DEFAULT_ASPECT_RATIO = 16.f/9.f;
const float DEFAULT_NEAR_CLIP = 0.08f;
const float DEFAULT_FAR_CLIP = 50.0f * TREE_SCALE;