mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 09:24:00 +02:00
Removed some debugging prints and drawing
This commit is contained in:
parent
561b712b5b
commit
158c1c6aa8
4 changed files with 0 additions and 61 deletions
|
@ -964,15 +964,6 @@ void Application::paintGL() {
|
|||
} else {
|
||||
_myCamera.setRotation(glm::quat_cast(_myAvatar->getSensorToWorldMatrix() * getHMDSensorPose()));
|
||||
}
|
||||
|
||||
/*
|
||||
qCDebug(interfaceapp, "paintGL");
|
||||
glm::vec3 cameraPos = _myCamera.getPosition();
|
||||
glm::quat cameraRot = _myCamera.getRotation();
|
||||
qCDebug(interfaceapp, "\tcamera pos = (%.5f, %.5f, %.5f)", cameraPos.x, cameraPos.y, cameraPos.z);
|
||||
qCDebug(interfaceapp, "\tcamera rot = (%.5f, %.5f, %.5f, %.5f)", cameraRot.x, cameraRot.y, cameraRot.z, cameraRot.w);
|
||||
*/
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
if (isHMDMode()) {
|
||||
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
|
||||
|
@ -2783,11 +2774,6 @@ void Application::update(float deltaTime) {
|
|||
}
|
||||
}
|
||||
|
||||
// AJT: hack for debug drawing.
|
||||
extern const int NUM_MARKERS;
|
||||
extern glm::mat4 markerMats[];
|
||||
extern glm::vec4 markerColors[];
|
||||
|
||||
void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, int index) {
|
||||
PalmData* palm;
|
||||
bool foundHand = false;
|
||||
|
@ -2818,9 +2804,6 @@ void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, int i
|
|||
|
||||
palm->setRawPosition(extractTranslation(objectPose));
|
||||
palm->setRawRotation(glm::quat_cast(objectPose));
|
||||
|
||||
// AJT: Hack for debug drawing.
|
||||
//markerMats[index] = sensorToWorldMat * poseMat;
|
||||
}
|
||||
|
||||
void Application::emulateMouse(Hand* hand, float click, float shift, int index) {
|
||||
|
|
|
@ -33,13 +33,6 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
// AJT HACK, I'm using these markers for debugging.
|
||||
// extern them and set them in other cpp files and they will be rendered
|
||||
// with the world box.
|
||||
const int NUM_MARKERS = 4;
|
||||
glm::mat4 markerMats[NUM_MARKERS];
|
||||
glm::vec4 markerColors[NUM_MARKERS] = {{1, 0, 0, 1}, {0, 1, 0, 1}, {0, 0, 1, 1}, {0, 1, 1, 1}};
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch) {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
|
@ -78,26 +71,6 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE));
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, grey);
|
||||
|
||||
// draw marker spheres
|
||||
for (int i = 0; i < NUM_MARKERS; i++) {
|
||||
transform.setTranslation(extractTranslation(markerMats[i]));
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSphere(batch, 0.02f, 10, 10, markerColors[i]);
|
||||
}
|
||||
|
||||
// draw marker axes
|
||||
auto identity = Transform{};
|
||||
batch.setModelTransform(identity);
|
||||
for (int i = 0; i < NUM_MARKERS; i++) {
|
||||
glm::vec3 base = extractTranslation(markerMats[i]);
|
||||
glm::vec3 xAxis = transformPoint(markerMats[i], glm::vec3(1, 0, 0));
|
||||
glm::vec3 yAxis = transformPoint(markerMats[i], glm::vec3(0, 1, 0));
|
||||
glm::vec3 zAxis = transformPoint(markerMats[i], glm::vec3(0, 0, 1));
|
||||
geometryCache->renderLine(batch, base, xAxis, red);
|
||||
geometryCache->renderLine(batch, base, yAxis, green);
|
||||
geometryCache->renderLine(batch, base, zAxis, blue);
|
||||
}
|
||||
}
|
||||
|
||||
// Return a random vector of average length 1
|
||||
|
|
|
@ -1699,7 +1699,6 @@ void MyAvatar::relayDriveKeysToCharacterController() {
|
|||
}
|
||||
}
|
||||
|
||||
// overriden, because they must move the sensor mat, so that the avatar will be at the given location.
|
||||
void MyAvatar::setPosition(const glm::vec3 position, bool overideReferential) {
|
||||
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
|
@ -1710,7 +1709,6 @@ void MyAvatar::setPosition(const glm::vec3 position, bool overideReferential) {
|
|||
Avatar::setPosition(position);
|
||||
}
|
||||
|
||||
// overriden, because they must move the sensor mat, so that the avatar will face the given orienation.
|
||||
void MyAvatar::setOrientation(const glm::quat& orientation, bool overideReferential) {
|
||||
|
||||
// update the sensor mat so that the body position will end up in the desired
|
||||
|
|
|
@ -357,13 +357,6 @@ void DynamicCharacterController::preSimulation(btScalar timeStep) {
|
|||
glm::vec3 position = _avatarData->getPosition() + rotation * _shapeLocalOffset;
|
||||
_rigidBody->setWorldTransform(btTransform(glmToBullet(rotation), glmToBullet(position)));
|
||||
|
||||
/*
|
||||
qCDebug(physics, "preSimulation()");
|
||||
qCDebug(physics, "\trigidbody position = (%.5f, %.5f, %.5f)", position.x, position.y, position.z);
|
||||
glm::vec3 p = _avatarData->getPosition();
|
||||
qCDebug(physics, "\tavatar position = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
|
||||
*/
|
||||
|
||||
// the rotation is dictated by AvatarData
|
||||
btTransform xform = _rigidBody->getWorldTransform();
|
||||
xform.setRotation(glmToBullet(rotation));
|
||||
|
@ -419,13 +412,5 @@ void DynamicCharacterController::postSimulation() {
|
|||
_avatarData->setOrientation(rotation);
|
||||
_avatarData->setPosition(position - rotation * _shapeLocalOffset);
|
||||
_avatarData->setVelocity(bulletToGLM(_rigidBody->getLinearVelocity()));
|
||||
|
||||
/*
|
||||
qCDebug(physics, "postSimulation()");
|
||||
qCDebug(physics, "\trigidbody position = (%.5f, %.5f, %.5f)", position.x, position.y, position.z);
|
||||
glm::vec3 p = position - rotation * _shapeLocalOffset;
|
||||
qCDebug(physics, "\tavatar position = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
|
||||
qCDebug(physics, "\t_shapeLocalOffset = (%.5f, %.5f, %.5f)", _shapeLocalOffset.x, _shapeLocalOffset.y, _shapeLocalOffset.z);
|
||||
*/
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue