Remove unnecessary code

This commit is contained in:
vladest 2017-09-12 19:00:00 +02:00
parent 00038664a6
commit 58c0141f27
5 changed files with 26 additions and 294 deletions

View file

@ -17,9 +17,6 @@ endforeach()
find_package(Qt5LinguistTools REQUIRED)
find_package(Qt5LinguistToolsMacros)
find_package(Qt5Gui)
include_directories(${Qt5Gui_PRIVATE_INCLUDE_DIRS})
if (WIN32)
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} -bigobj")
add_definitions(-D_USE_MATH_DEFINES) # apparently needed to get M_PI and other defines from cmath/math.h

View file

@ -6,7 +6,7 @@ import QtQuick.Controls 2.2
import "../styles-uit" as StylesUIt
Flickable {
Item {
id: flick
property alias url: _webview.url
@ -14,17 +14,17 @@ Flickable {
property alias webViewCore: _webview
property alias webViewCoreProfile: _webview.profile
interactive: true
property bool interactive: false
property string userScriptUrl: ""
property string urlTag: "noDownload=false";
signal newViewRequestedCallback(var request)
signal loadingChangedCallback(var loadRequest)
pressDelay: 300
//pressDelay: 300
property real previousHeight: height
boundsBehavior: Flickable.StopAtBounds
//boundsBehavior: Flickable.StopAtBounds
StylesUIt.HifiConstants {
id: hifi
@ -38,17 +38,17 @@ Flickable {
// }
// }
ScrollBar.vertical: ScrollBar {
id: scrollBar
visible: flick.contentHeight > flick.height
// ScrollBar.vertical: ScrollBar {
// id: scrollBar
// visible: flick.contentHeight > flick.height
contentItem: Rectangle {
opacity: 0.75
implicitWidth: hifi.dimensions.scrollbarHandleWidth
radius: height / 2
color: hifi.colors.tableScrollHandleDark
}
}
// contentItem: Rectangle {
// opacity: 0.75
// implicitWidth: hifi.dimensions.scrollbarHandleWidth
// radius: height / 2
// color: hifi.colors.tableScrollHandleDark
// }
// }
function onLoadingChanged(loadRequest) {
if (WebEngineView.LoadStartedStatus === loadRequest.status) {
@ -152,7 +152,7 @@ Flickable {
property string newUrl: ""
Component.onCompleted: {
width = Qt.binding(function() { return flick.width; });
//width = Qt.binding(function() { return flick.width; });
webChannel.registerObject("eventBridge", eventBridge);
webChannel.registerObject("eventBridgeWrapper", eventBridgeWrapper);
// Ensure the JS from the web-engine makes it to our logging
@ -196,10 +196,10 @@ Flickable {
z: 10000
}
MouseArea {
anchors.fill: parent
onWheel: {
flick.flick(0, wheel.angleDelta.y*10)
}
}
// MouseArea {
// anchors.fill: parent
// onWheel: {
// flick.flick(0, wheel.angleDelta.y*10)
// }
// }
}

View file

@ -281,34 +281,6 @@ private:
Q_LOGGING_CATEGORY(trace_app_input_mouse, "trace.app.input.mouse")
namespace utils {
inline bool isTouchEvent(const QEvent* event)
{
switch (event->type()) {
case QEvent::TouchBegin:
case QEvent::TouchUpdate:
case QEvent::TouchEnd:
return true;
default:
return false;
}
}
inline bool isMouseEvent(const QEvent* event)
{
switch (event->type()) {
case QEvent::MouseButtonPress:
case QEvent::MouseMove:
case QEvent::MouseButtonRelease:
case QEvent::MouseButtonDblClick:
return true;
default:
return false;
}
}
}
using namespace std;
static QTimer locationUpdateTimer;
@ -3091,228 +3063,9 @@ bool Application::eventFilter(QObject* object, QEvent* event) {
}
}
// We try to be smart, if we received real touch event, we are probably on a device
// with touch screen, and we should not have touch mocking.
if (!event->spontaneous() || m_realTouchEventReceived)
return false;
if (utils::isTouchEvent(event)) {
if (m_pendingFakeTouchEventCount)
--m_pendingFakeTouchEventCount;
else
m_realTouchEventReceived = true;
return false;
}
QQuickWindow* window = qobject_cast<QQuickWindow*>(object);
if (!window) {
return false;
}
m_holdingControl = QGuiApplication::keyboardModifiers().testFlag(Qt::ControlModifier);
if (event->type() == QEvent::KeyRelease && static_cast<QKeyEvent*>(event)->key() == Qt::Key_Control) {
foreach (int id, m_heldTouchPoints)
if (m_touchPoints.contains(id) && !QGuiApplication::mouseButtons().testFlag(Qt::MouseButton(id))) {
m_touchPoints[id].setState(Qt::TouchPointReleased);
m_heldTouchPoints.remove(id);
} else
m_touchPoints[id].setState(Qt::TouchPointStationary);
sendTouchEvent(window, m_heldTouchPoints.isEmpty() ? QEvent::TouchEnd : QEvent::TouchUpdate, static_cast<QKeyEvent*>(event)->timestamp());
}
if (utils::isMouseEvent(event)) {
const QMouseEvent* const mouseEvent = static_cast<QMouseEvent*>(event);
QTouchEvent::TouchPoint touchPoint;
touchPoint.setPressure(1);
QEvent::Type touchType = QEvent::None;
switch (mouseEvent->type()) {
case QEvent::MouseButtonPress:
touchPoint.setId(mouseEvent->button());
if (m_touchPoints.contains(touchPoint.id())) {
touchPoint.setState(Qt::TouchPointMoved);
touchType = QEvent::TouchUpdate;
} else {
touchPoint.setState(Qt::TouchPointPressed);
// Check if more buttons are held down than just the event triggering one.
if (mouseEvent->buttons() > mouseEvent->button())
touchType = QEvent::TouchUpdate;
else
touchType = QEvent::TouchBegin;
}
break;
case QEvent::MouseMove:
if (!mouseEvent->buttons()) {
// We have to swallow the event instead of propagating it,
// since we avoid sending the mouse release events and if the
// Flickable is the mouse grabber it would receive the event
// and would move the content.
event->accept();
return true;
}
touchType = QEvent::TouchUpdate;
touchPoint.setId(mouseEvent->buttons());
touchPoint.setState(Qt::TouchPointMoved);
break;
case QEvent::MouseButtonRelease:
// Check if any buttons are still held down after this event.
if (mouseEvent->buttons())
touchType = QEvent::TouchUpdate;
else
touchType = QEvent::TouchEnd;
touchPoint.setId(mouseEvent->button());
touchPoint.setState(Qt::TouchPointReleased);
break;
case QEvent::MouseButtonDblClick:
// Eat double-clicks, their accompanying press event is all we need.
event->accept();
return true;
default:
Q_ASSERT_X(false, "multi-touch mocking", "unhandled event type");
}
// A move can have resulted in multiple buttons, so we need check them individually.
if (touchPoint.id() & Qt::LeftButton)
updateTouchPoint(mouseEvent, touchPoint, Qt::LeftButton);
if (touchPoint.id() & Qt::MidButton)
updateTouchPoint(mouseEvent, touchPoint, Qt::MidButton);
if (touchPoint.id() & Qt::RightButton)
updateTouchPoint(mouseEvent, touchPoint, Qt::RightButton);
if (m_holdingControl && touchPoint.state() == Qt::TouchPointReleased) {
// We avoid sending the release event because the Flickable is
// listening to mouse events and would start a bounce-back
// animation if it received a mouse release.
event->accept();
return true;
}
// Update states for all other touch-points
for (QHash<int, QTouchEvent::TouchPoint>::iterator it = m_touchPoints.begin(), end = m_touchPoints.end(); it != end; ++it) {
if (!(it.value().id() & touchPoint.id()))
it.value().setState(Qt::TouchPointStationary);
}
Q_ASSERT(touchType != QEvent::None);
if (!sendTouchEvent(window, touchType, mouseEvent->timestamp()))
return false;
event->accept();
return true;
}
return false;
}
static inline QRectF touchRectForPosition(QPointF centerPoint) {
QRectF touchRect(0, 0, 40, 40);
touchRect.moveCenter(centerPoint);
return touchRect;
}
void Application::updateTouchPoint(const QMouseEvent* mouseEvent, QTouchEvent::TouchPoint touchPoint, Qt::MouseButton mouseButton)
{
// Ignore inserting additional touch points if Ctrl isn't held because it produces
// inconsistent touch events and results in assers in the gesture recognizers.
if (!m_holdingControl && m_touchPoints.size() && !m_touchPoints.contains(mouseButton))
return;
if (m_holdingControl && touchPoint.state() == Qt::TouchPointReleased) {
m_heldTouchPoints.insert(mouseButton);
return;
}
// Gesture recognition uses the screen position for the initial threshold
// but since the canvas translates touch events we actually need to pass
// the screen position as the scene position to deliver the appropriate
// coordinates to the target.
touchPoint.setRect(touchRectForPosition(mouseEvent->localPos()));
touchPoint.setSceneRect(touchRectForPosition(mouseEvent->screenPos()));
if (touchPoint.state() == Qt::TouchPointPressed)
touchPoint.setStartScenePos(mouseEvent->screenPos());
else {
const QTouchEvent::TouchPoint& oldTouchPoint = m_touchPoints[mouseButton];
touchPoint.setStartScenePos(oldTouchPoint.startScenePos());
touchPoint.setLastPos(oldTouchPoint.pos());
touchPoint.setLastScenePos(oldTouchPoint.scenePos());
}
// Update current touch-point.
touchPoint.setId(mouseButton);
m_touchPoints.insert(mouseButton, touchPoint);
}
bool Application::sendTouchEvent(QQuickWindow* window, QEvent::Type type, ulong timestamp)
{
static QTouchDevice* device = 0;
if (!device) {
device = new QTouchDevice;
device->setType(QTouchDevice::TouchScreen);
QWindowSystemInterface::registerTouchDevice(device);
}
m_pendingFakeTouchEventCount++;
const QList<QTouchEvent::TouchPoint>& currentTouchPoints = m_touchPoints.values();
Qt::TouchPointStates touchPointStates = 0;
foreach (const QTouchEvent::TouchPoint& touchPoint, currentTouchPoints)
touchPointStates |= touchPoint.state();
QTouchEvent event(type, device, Qt::NoModifier, touchPointStates, currentTouchPoints);
event.setTimestamp(timestamp);
event.setAccepted(false);
QGuiApplication::notify(window, &event);
//updateVisualMockTouchPoints(window,m_holdingControl ? currentTouchPoints : QList<QTouchEvent::TouchPoint>());
// Get rid of touch-points that are no longer valid
foreach (const QTouchEvent::TouchPoint& touchPoint, currentTouchPoints) {
if (touchPoint.state() == Qt::TouchPointReleased)
m_touchPoints.remove(touchPoint.id());
}
return event.isAccepted();
}
//void Application::updateVisualMockTouchPoints(QQuickWindow* window, const QList<QTouchEvent::TouchPoint>& touchPoints)
//{
// if (touchPoints.isEmpty()) {
// // Hide all touch indicator items.
// foreach (QQuickItem* item, m_activeMockComponents.values())
// item->setProperty("pressed", false);
// return;
// }
// foreach (const QTouchEvent::TouchPoint& touchPoint, touchPoints) {
// QQuickItem* mockTouchPointItem = m_activeMockComponents.value(touchPoint.id());
// if (!mockTouchPointItem) {
// QQmlComponent touchMockPointComponent(window->engine(), QUrl("qrc:///qml/MockTouchPoint.qml"));
// mockTouchPointItem = qobject_cast<QQuickItem*>(touchMockPointComponent.create());
// Q_ASSERT(mockTouchPointItem);
// m_activeMockComponents.insert(touchPoint.id(), mockTouchPointItem);
// mockTouchPointItem->setProperty("pointId", QVariant(touchPoint.id()));
// mockTouchPointItem->setParent(window->rootObject());
// mockTouchPointItem->setParentItem(window->rootObject());
// }
// QRectF touchRect = touchPoint.rect();
// mockTouchPointItem->setX(touchRect.center().x());
// mockTouchPointItem->setY(touchRect.center().y());
// mockTouchPointItem->setWidth(touchRect.width());
// mockTouchPointItem->setHeight(touchRect.height());
// mockTouchPointItem->setProperty("pressed", QVariant(touchPoint.state() != Qt::TouchPointReleased));
// }
//}
static bool _altPressed{ false };
void Application::keyPressEvent(QKeyEvent* event) {

View file

@ -25,8 +25,6 @@
#include <QtWidgets/QApplication>
#include <QtWidgets/QUndoStack>
#include <qpa/qwindowsysteminterface.h>
#include <ThreadHelpers.h>
#include <AbstractScriptingServicesInterface.h>
#include <AbstractViewStateInterface.h>
@ -518,10 +516,6 @@ private:
void maybeToggleMenuVisible(QMouseEvent* event) const;
void toggleTabletUI(bool shouldOpen = false) const;
void updateTouchPoint(const QMouseEvent *event, QTouchEvent::TouchPoint, Qt::MouseButton);
bool sendTouchEvent(QQuickWindow* window, QEvent::Type, ulong timestamp);
//void updateVisualMockTouchPoints(QQuickWindow *window,const QList<QTouchEvent::TouchPoint> &touchPoints);
MainWindow* _window;
QElapsedTimer& _sessionRunTimer;
@ -711,20 +705,6 @@ private:
RayPickManager _rayPickManager;
LaserPointerManager _laserPointerManager;
//touch mocking
bool m_realTouchEventReceived { false };
int m_pendingFakeTouchEventCount { 0 };
QPointF m_lastPos;
QPointF m_lastScreenPos;
QPointF m_startScreenPos;
QHash<int, QTouchEvent::TouchPoint> m_touchPoints;
QSet<int> m_heldTouchPoints;
QHash<int, QQuickItem*> m_activeMockComponents;
bool m_holdingControl { false };
friend class RenderEventHandler;
};
#endif // hifi_Application_h

View file

@ -67,7 +67,6 @@ Web3DOverlay::Web3DOverlay() : _dpi(DPI) {
_touchDevice.setType(QTouchDevice::TouchScreen);
_touchDevice.setName("RenderableWebEntityItemTouchDevice");
_touchDevice.setMaximumTouchPoints(4);
QWindowSystemInterface::registerTouchDevice(&_touchDevice);
_geometryId = DependencyManager::get<GeometryCache>()->allocateID();
connect(this, &Web3DOverlay::requestWebSurface, this, &Web3DOverlay::buildWebSurface);
connect(this, &Web3DOverlay::releaseWebSurface, this, &Web3DOverlay::destroyWebSurface);
@ -419,12 +418,14 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
}
static QTouchEvent::TouchPoint oldTouchPoint;
{
QTouchEvent::TouchPoint point;
point.setId(event.getID());
point.setState(state);
point.setPos(windowPoint);
point.setScreenPos(windowPoint);
point.setPressure(1);
// Gesture recognition uses the screen position for the initial threshold
@ -446,8 +447,6 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
_activeTouchPoints[event.getID()] = point;
}
QTouchEvent touchEvent(touchType, &_touchDevice, event.getKeyboardModifiers());
{
QList<QTouchEvent::TouchPoint> touchPoints;
@ -485,6 +484,9 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
}
#endif
touchEvent.setTimestamp((ulong)QDateTime::currentMSecsSinceEpoch());
touchEvent.setAccepted(false);
if (touchType == QEvent::TouchBegin) {
_touchBeginAccepted = QCoreApplication::sendEvent(_webSurface->getWindow(), &touchEvent);
} else if (_touchBeginAccepted) {