Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
quiview.mm
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:significant reason:default
4
5#include "quiview.h"
6
7#include "qiosglobal.h"
11#include "qiosscreen.h"
12#include "qioswindow.h"
14#include "quiwindow.h"
15#ifndef Q_OS_TVOS
16#include "qiosmenu.h"
17#endif
18
19#include <QtCore/qmath.h>
20#include <QtGui/qpointingdevice.h>
21#include <QtGui/private/qguiapplication_p.h>
22#include <QtGui/private/qwindow_p.h>
23#include <QtGui/private/qapplekeymapper_p.h>
24#include <QtGui/private/qpointingdevice_p.h>
25#include <qpa/qwindowsysteminterface_p.h>
26
27Q_LOGGING_CATEGORY(lcQpaTablet, "qt.qpa.input.tablet")
28Q_LOGGING_CATEGORY(lcQpaInputEvents, "qt.qpa.input.events")
29
30namespace {
31inline ulong getTimeStamp(UIEvent *event)
32{
33#if TARGET_OS_SIMULATOR == 1
34 // We currently build Qt for simulator using X86_64, even on ARM based macs.
35 // This results in the simulator running on ARM, while the app is running
36 // inside it using Rosetta. And with this combination, the event.timestamp, which is
37 // documented to be in seconds, looks to be something else, and is not progressing
38 // in sync with a normal clock.
39 // Sending out mouse events with a timestamp that doesn't follow normal clock time
40 // will cause problems for mouse-, and pointer handlers that uses them to e.g calculate
41 // the time between a press and release, and to decide if the user is performing a tap
42 // or a drag.
43 // For that reason, we choose to ignore UIEvent.timestamp under the mentioned condition, and
44 // instead rely on NSProcessInfo. Note that if we force the whole simulator to use Rosetta
45 // (and not only the Qt app), the timestamps will progress normally.
46#if defined(Q_PROCESSOR_ARM)
47 #warning The timestamp work-around for x86_64 can (probably) be removed when building for ARM
48#endif
49 return ulong(NSProcessInfo.processInfo.systemUptime * 1000);
50#endif
51
52 return ulong(event.timestamp * 1000);
53}
54}
55
56@implementation QUIView {
57 QHash<NSUInteger, QWindowSystemInterface::TouchPoint> m_activeTouches;
58 UITouch *m_activePencilTouch;
59 NSMutableArray<UIAccessibilityElement *> *m_accessibleElements;
60 UIPanGestureRecognizer *m_scrollGestureRecognizer;
61 CGPoint m_lastScrollCursorPos;
62 CGPoint m_lastScrollDelta;
63#if QT_CONFIG(tabletevent)
64 UIHoverGestureRecognizer *m_hoverGestureRecognizer;
65#endif
66}
67
68+ (Class)layerClass
69{
70#if QT_CONFIG(opengl)
71 return [CAEAGLLayer class];
72#endif
73 return [super layerClass];
74}
75
76- (instancetype)initWithQIOSWindow:(QT_PREPEND_NAMESPACE(QIOSWindow) *)window
77{
78 if (self = [self initWithFrame:window->geometry().toCGRect()]) {
79 self.platformWindow = window;
80
81 if (isQtApplication())
82 self.hidden = YES;
83
84 m_accessibleElements = [[NSMutableArray<UIAccessibilityElement *> alloc] init];
85
86#ifndef Q_OS_TVOS
87 self.multipleTouchEnabled = YES;
88#endif
89
90 m_scrollGestureRecognizer = [[UIPanGestureRecognizer alloc]
91 initWithTarget:self
92 action:@selector(handleScroll:)];
93 // The gesture recognizer should only care about scroll gestures (for now)
94 // Set allowedTouchTypes to empty array here to not interfere with touch events
95 // handled by the UIView. Scroll gestures, even those coming from touch devices,
96 // such as trackpads will still be received as they are not touch events
97 m_scrollGestureRecognizer.allowedTouchTypes = [NSArray array];
98 if (@available(ios 13.4, *)) {
99 m_scrollGestureRecognizer.allowedScrollTypesMask = UIScrollTypeMaskAll;
100 }
101 m_scrollGestureRecognizer.maximumNumberOfTouches = 0;
102 m_lastScrollDelta = CGPointZero;
103 m_lastScrollCursorPos = CGPointZero;
104 [self addGestureRecognizer:m_scrollGestureRecognizer];
105
106#if QT_CONFIG(tabletevent)
107 m_hoverGestureRecognizer = [[UIHoverGestureRecognizer alloc]
108 initWithTarget:self
109 action:@selector(handleHover:)];
110 [self addGestureRecognizer:m_hoverGestureRecognizer];
111#endif
112
113 // Set up layer
114 if ([self.layer isKindOfClass:CAMetalLayer.class]) {
115 QWindow *window = self.platformWindow->window();
116 if (QColorSpace colorSpace = window->format().colorSpace(); colorSpace.isValid()) {
117 QCFType<CFDataRef> iccData = colorSpace.iccProfile().toCFData();
118 QCFType<CGColorSpaceRef> cgColorSpace = CGColorSpaceCreateWithICCData(iccData);
119 CAMetalLayer *metalLayer = static_cast<CAMetalLayer *>(self.layer);
120 metalLayer.colorspace = cgColorSpace;
121 qCDebug(lcQpaWindow) << "Set" << self << "color space to" << metalLayer.colorspace;
122 }
123 }
124#if QT_CONFIG(opengl)
125 else if ([self.layer isKindOfClass:[CAEAGLLayer class]]) {
126 CAEAGLLayer *eaglLayer = static_cast<CAEAGLLayer *>(self.layer);
127 eaglLayer.opaque = TRUE;
128 eaglLayer.drawableProperties = @{
129 kEAGLDrawablePropertyRetainedBacking: @(YES),
130 kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
131 };
132 }
133#endif
134
135#if defined(Q_OS_VISIONOS)
136 // Although the "Drawing sharp layer-based content in visionOS" docs
137 // claim that by default a CALayer rasterizes at a 2x scale this does
138 // not seem to be the case in practice. So we explicitly set the view's
139 // scale factor based on the screen, where we hard-code it to 2.0.
140 self.contentScaleFactor = self.platformWindow->screen()->devicePixelRatio();
141#endif
142 }
143
144 return self;
145}
146
147- (void)dealloc
148{
149 [m_accessibleElements release];
150 [m_scrollGestureRecognizer release];
151
152 [super dealloc];
153}
154
155- (NSString *)description
156{
157 NSMutableString *description = [NSMutableString stringWithString:[super description]];
158
159#ifndef QT_NO_DEBUG_STREAM
160 QString platformWindowDescription;
161 QDebug debug(&platformWindowDescription);
162 debug.nospace() << "; " << self.platformWindow << ">";
163 NSRange lastCharacter = [description rangeOfComposedCharacterSequenceAtIndex:description.length - 1];
164 [description replaceCharactersInRange:lastCharacter withString:platformWindowDescription.toNSString()];
165#endif
166
167 return description;
168}
169
170#if !defined(Q_OS_VISIONOS)
171- (void)willMoveToWindow:(UIWindow *)newWindow
172{
173 // UIKIt will normally set the scale factor of a view to match the corresponding
174 // screen scale factor, but views backed by CAEAGLLayers need to do this manually.
175 self.contentScaleFactor = newWindow && newWindow.screen ?
176 newWindow.screen.scale : [[UIScreen mainScreen] scale];
177
178 // FIXME: Allow the scale factor to be customized through QSurfaceFormat.
179}
180#endif
181
182- (void)didAddSubview:(UIView *)subview
183{
184 if ([subview isKindOfClass:[QUIView class]])
185 self.clipsToBounds = YES;
186}
187
188- (void)willRemoveSubview:(UIView *)subview
189{
190 for (UIView *view in self.subviews) {
191 if (view != subview && [view isKindOfClass:[QUIView class]])
192 return;
193 }
194
195 self.clipsToBounds = NO;
196}
197
198- (void)setNeedsDisplay
199{
200 [super setNeedsDisplay];
201
202 // We didn't implement drawRect: so we have to manually
203 // mark the layer as needing display.
204 [self.layer setNeedsDisplay];
205}
206
207- (void)layoutSubviews
208{
209 // This method is the de facto way to know that view has been resized,
210 // or otherwise needs invalidation of its buffers. Note though that we
211 // do not get this callback when the view just changes its position, so
212 // the position of our QWindow (and platform window) will only get updated
213 // when the size is also changed.
214
215 if (!CGAffineTransformIsIdentity(self.transform))
216 qWarning() << self << "has a transform set. This is not supported.";
217
218 QWindow *window = self.platformWindow->window();
219 QRect lastReportedGeometry = qt_window_private(window)->geometry;
220 QRect currentGeometry = QRectF::fromCGRect(self.frame).toRect();
221 qCDebug(lcQpaWindow) << self.platformWindow << "new geometry is" << currentGeometry;
222 QWindowSystemInterface::handleGeometryChange(window, currentGeometry);
223
224 if (currentGeometry.size() != lastReportedGeometry.size()) {
225 // Trigger expose event on resize
226 [self setNeedsDisplay];
227
228 // A new size means we also need to resize the FBO's corresponding buffers,
229 // but we defer that to when the application calls makeCurrent.
230 }
231}
232
233- (void)displayLayer:(CALayer *)layer
234{
235 Q_UNUSED(layer);
236 Q_ASSERT(layer == self.layer);
237
238 if (!self.platformWindow)
239 return;
240
241 [self sendUpdatedExposeEvent];
242}
243
244- (void)sendUpdatedExposeEvent
245{
246 QRegion region;
247
248 if (self.platformWindow->isExposed()) {
249 QSize bounds = QRectF::fromCGRect(self.layer.bounds).toRect().size();
250
251 Q_ASSERT(self.platformWindow->geometry().size() == bounds);
252 Q_ASSERT(self.hidden == !self.platformWindow->window()->isVisible());
253
254 region = QRect(QPoint(), bounds);
255 }
256
257 qCDebug(lcQpaWindow) << self.platformWindow << region << "isExposed" << self.platformWindow->isExposed();
258 QWindowSystemInterface::handleExposeEvent(self.platformWindow->window(), region);
259}
260
261- (void)safeAreaInsetsDidChange
262{
263 QWindowSystemInterface::handleSafeAreaMarginsChanged(self.platformWindow->window());
264}
265
266// -------------------------------------------------------------------------
267
268- (BOOL)canBecomeFirstResponder
269{
270 return !(self.platformWindow->window()->flags() & (Qt::WindowDoesNotAcceptFocus
271 | Qt::WindowTransparentForInput));
272}
273
274- (BOOL)becomeFirstResponder
275{
276 {
277 // Scope for the duration of becoming first responder only, as the window
278 // activation event may trigger new responders, which we don't want to be
279 // blocked by this guard.
280 FirstResponderCandidate firstResponderCandidate(self);
281
282 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
283
284 if (![super becomeFirstResponder]) {
285 qImDebug() << self << "was not allowed to become first responder";
286 return NO;
287 }
288
289 qImDebug() << self << "became first responder";
290 }
291
292 if (qGuiApp->focusWindow() != self.platformWindow->window())
293 QWindowSystemInterface::handleFocusWindowChanged(self.platformWindow->window(), Qt::ActiveWindowFocusReason);
294 else
295 qImDebug() << self.platformWindow->window() << "already active, not sending window activation";
296
297 return YES;
298}
299
300- (BOOL)responderShouldTriggerWindowDeactivation:(UIResponder *)responder
301{
302 // We don't want to send window deactivation in case the resign
303 // was a result of another Qt window becoming first responder.
304 if ([responder isKindOfClass:[QUIView class]])
305 return NO;
306
307 // Nor do we want to deactivate the Qt window if the new responder
308 // is temporarily handling text input on behalf of a Qt window.
309 if ([responder isKindOfClass:[QIOSTextResponder class]]) {
310 while ((responder = [responder nextResponder])) {
311 if ([responder isKindOfClass:[QUIView class]])
312 return NO;
313 }
314 }
315
316 return YES;
317}
318
319- (BOOL)resignFirstResponder
320{
321 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
322
323 if (![super resignFirstResponder])
324 return NO;
325
326 qImDebug() << self << "resigned first responder";
327
328 if (qGuiApp) {
329 UIResponder *newResponder = FirstResponderCandidate::currentCandidate();
330 if ([self responderShouldTriggerWindowDeactivation:newResponder])
331 QWindowSystemInterface::handleFocusWindowChanged(nullptr, Qt::ActiveWindowFocusReason);
332 }
333
334 return YES;
335}
336
337- (BOOL)isActiveWindow
338{
339 // Normally this is determined exclusivly by being firstResponder, but
340 // since we employ a separate first responder for text input we need to
341 // handle both cases as this view being the active Qt window.
342
343 if ([self isFirstResponder])
344 return YES;
345
346 UIResponder *firstResponder = [UIResponder qt_currentFirstResponder];
347 if ([firstResponder isKindOfClass:[QIOSTextInputResponder class]]
348 && [firstResponder nextResponder] == self)
349 return YES;
350
351 return NO;
352}
353
354// -------------------------------------------------------------------------
355
356- (void)traitCollectionDidChange:(UITraitCollection *)previousTraitCollection
357{
358 [super traitCollectionDidChange: previousTraitCollection];
359
360 QPointingDevice *touchDevice = QIOSIntegration::instance()->touchDevice();
361 auto *devicePriv = QPointingDevicePrivate::get(touchDevice);
362
363 auto capabilities = touchDevice->capabilities();
364 capabilities.setFlag(QPointingDevice::Capability::Pressure,
365 (self.traitCollection.forceTouchCapability == UIForceTouchCapabilityAvailable));
366 devicePriv->setCapabilities(capabilities);
367}
368
369-(BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event
370{
371 if (self.platformWindow->window()->flags() & Qt::WindowTransparentForInput)
372 return NO;
373 return [super pointInside:point withEvent:event];
374}
375
376#if QT_CONFIG(tabletevent)
377- (void)handlePencilEventForLocationInView:(CGPoint)locationInView withState:(QEventPoint::State)state withTimestamp:(ulong)timeStamp
378 withForce:(CGFloat)force withMaximumPossibleForce:(CGFloat)maximumPossibleForce withZOffset:(CGFloat)zOffset
379 withAzimuthUnitVector:(CGVector)azimuth withAltitudeAngleRadian:(CGFloat)altitudeAngleRadian
380{
381 QIOSIntegration *iosIntegration = QIOSIntegration::instance();
382
383 QPointF localViewPosition = QPointF::fromCGPoint(locationInView);
384 QPoint localViewPositionI = localViewPosition.toPoint();
385 QPointF globalScreenPosition = self.platformWindow->mapToGlobal(localViewPositionI) +
386 (localViewPosition - localViewPositionI);
387 qreal pressure = 0;
388 if (force != 0 && maximumPossibleForce != 0)
389 pressure = force / maximumPossibleForce;
390 // azimuth unit vector: +x to the right, +y going downwards
391 // altitudeAngleRadian given in radians, pi / 2 is with the stylus perpendicular to the iPad, smaller values mean more tilted, but never negative.
392 // Convert to degrees with zero being perpendicular.
393 qreal altitudeAngle = 90 - qRadiansToDegrees(altitudeAngleRadian);
394 qreal xTilt = qBound(-60.0, altitudeAngle * azimuth.dx, 60.0);
395 qreal yTilt = qBound(-60.0, altitudeAngle * azimuth.dy, 60.0);
396
397 qCDebug(lcQpaTablet) << ":" << timeStamp << localViewPosition << pressure << state << "azimuth" << azimuth.dx << azimuth.dy
398 << "altitude" << altitudeAngleRadian << "xTilt" << xTilt << "yTilt" << yTilt;
399 QWindowSystemInterface::handleTabletEvent(self.platformWindow->window(), timeStamp,
400 // device, local, global
401 iosIntegration->pencilDevice(), localViewPosition, globalScreenPosition,
402 // buttons
403 state == QEventPoint::State::Released ? Qt::NoButton : Qt::LeftButton,
404 // pressure, xTilt, yTilt, tangentialPressure, rotation, z, modifiers
405 pressure, xTilt, yTilt, 0, 0, zOffset, Qt::NoModifier);
406}
407#endif
408
409- (void)handleTouches:(NSSet *)touches withEvent:(UIEvent *)event withState:(QEventPoint::State)state withTimestamp:(ulong)timeStamp
410{
411 QIOSIntegration *iosIntegration = QIOSIntegration::instance();
412 bool supportsPressure = QIOSIntegration::instance()->touchDevice()->capabilities() & QPointingDevice::Capability::Pressure;
413
414#if QT_CONFIG(tabletevent)
415 if (m_activePencilTouch && [touches containsObject:m_activePencilTouch]) {
416 NSArray<UITouch *> *cTouches = [event coalescedTouchesForTouch:m_activePencilTouch];
417 for (UITouch *cTouch in cTouches) {
418 [self handlePencilEventForLocationInView:[cTouch preciseLocationInView:self] withState:state withTimestamp:timeStamp
419 withForce:cTouch.force withMaximumPossibleForce:cTouch.maximumPossibleForce withZOffset:0
420 withAzimuthUnitVector:[cTouch azimuthUnitVectorInView:self]
421 withAltitudeAngleRadian:cTouch.altitudeAngle];
422 }
423 }
424#endif
425
426 if (m_activeTouches.isEmpty())
427 return;
428 for (auto it = m_activeTouches.begin(); it != m_activeTouches.end(); ++it) {
429 auto hash = it.key();
430 QWindowSystemInterface::TouchPoint &touchPoint = it.value();
431 UITouch *uiTouch = nil;
432 for (UITouch *touch in touches) {
433 if (touch.hash == hash) {
434 uiTouch = touch;
435 break;
436 }
437 }
438 if (!uiTouch) {
439 touchPoint.state = QEventPoint::State::Stationary;
440 } else {
441 touchPoint.state = state;
442
443 // Touch positions are expected to be in QScreen global coordinates, and
444 // as we already have the QWindow positioned at the right place, we can
445 // just map from the local view position to global coordinates.
446 // tvOS: all touches start at the center of the screen and move from there.
447 QPoint localViewPosition = QPointF::fromCGPoint([uiTouch locationInView:self]).toPoint();
448 QPoint globalScreenPosition = self.platformWindow->mapToGlobal(localViewPosition);
449
450 touchPoint.area = QRectF(globalScreenPosition, QSize(0, 0));
451
452 // FIXME: Do we really need to support QPointingDevice::Capability::NormalizedPosition?
453 QSize screenSize = self.platformWindow->screen()->geometry().size();
454 touchPoint.normalPosition = QPointF(globalScreenPosition.x() / screenSize.width(),
455 globalScreenPosition.y() / screenSize.height());
456
457 if (supportsPressure) {
458 // Note: iOS will deliver touchesBegan with a touch force of 0, which
459 // we will reflect/propagate as a 0 pressure, but there is no clear
460 // alternative, as we don't want to wait for a touchedMoved before
461 // sending a touch press event to Qt, just to have a valid pressure.
462 touchPoint.pressure = uiTouch.force / uiTouch.maximumPossibleForce;
463 } else {
464 // We don't claim that our touch device supports QPointingDevice::Capability::Pressure,
465 // but fill in a meaningful value in case clients use it anyway.
466 touchPoint.pressure = (state == QEventPoint::State::Released) ? 0.0 : 1.0;
467 }
468 }
469 }
470
471 if ([self.window isKindOfClass:[QUIWindow class]] &&
472 !static_cast<QUIWindow *>(self.window).sendingEvent) {
473 // The event is likely delivered as part of delayed touch delivery, via
474 // _UIGestureEnvironmentSortAndSendDelayedTouches, due to one of the two
475 // _UISystemGestureGateGestureRecognizer instances on the top level window
476 // having its delaysTouchesBegan set to YES. During this delivery, it's not
477 // safe to spin up a recursive event loop, as our calling function is not
478 // reentrant, so any gestures used by the recursive code, e.g. a native
479 // alert dialog, will fail to recognize. To be on the safe side, we deliver
480 // the event asynchronously.
481 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
482 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
483 } else {
484 // Send the touch event asynchronously, as the application might spin a recursive
485 // event loop in response to the touch event (a dialog e.g.), which will deadlock
486 // the UIKit event delivery system (QTBUG-98651).
487 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
488 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
489 }
490}
491
492- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
493{
494 // UIKit generates [Began -> Moved -> Ended] event sequences for
495 // each touch point. Internally we keep a hashmap of active UITouch
496 // points to QWindowSystemInterface::TouchPoints, and assigns each TouchPoint
497 // an id for use by Qt.
498 for (UITouch *touch in touches) {
499#if QT_CONFIG(tabletevent)
500 if (touch.type == UITouchTypeStylus) {
501 if (Q_UNLIKELY(m_activePencilTouch)) {
502 qWarning("ignoring additional Pencil while first is still active");
503 continue;
504 }
505 m_activePencilTouch = touch;
506 } else
507 {
508 Q_ASSERT(!m_activeTouches.contains(touch.hash));
509#endif
510 // Use window-independent touch identifiers, so that
511 // multi-touch works across windows.
512 static quint16 nextTouchId = 0;
513 m_activeTouches[touch.hash].id = nextTouchId++;
514#if QT_CONFIG(tabletevent)
515 }
516#endif
517 }
518
519 if (self.platformWindow->shouldAutoActivateWindow() && m_activeTouches.size() == 1) {
520 QPlatformWindow *topLevel = self.platformWindow;
521 while (QPlatformWindow *p = topLevel->parent())
522 topLevel = p;
523 if (topLevel->window() != QGuiApplication::focusWindow())
524 topLevel->requestActivateWindow();
525 }
526
527 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Pressed withTimestamp:getTimeStamp(event)];
528}
529
530- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
531{
532 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Updated withTimestamp:getTimeStamp(event)];
533}
534
535- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
536{
537 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Released withTimestamp:getTimeStamp(event)];
538
539 // Remove ended touch points from the active set:
540#ifndef Q_OS_TVOS
541 for (UITouch *touch in touches) {
542#if QT_CONFIG(tabletevent)
543 if (touch.type == UITouchTypeStylus) {
544 m_activePencilTouch = nil;
545 } else
546#endif
547 {
548 m_activeTouches.remove(touch.hash);
549 }
550 }
551#else
552 // tvOS only supports single touch
553 m_activeTouches.clear();
554#endif
555}
556
557- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event
558{
559 if (m_activeTouches.isEmpty() && !m_activePencilTouch)
560 return;
561
562 // When four-finger swiping, we get a touchesCancelled callback
563 // which includes all four touch points. The swipe gesture is
564 // then active until all four touches have been released, and
565 // we start getting touchesBegan events again.
566
567 // When five-finger pinching, we also get a touchesCancelled
568 // callback with all five touch points, but the pinch gesture
569 // ends when the second to last finger is released from the
570 // screen. The last finger will not emit any more touch
571 // events, _but_, will contribute to starting another pinch
572 // gesture. That second pinch gesture will _not_ trigger a
573 // touchesCancelled event when starting, but as each finger
574 // is released, and we may get touchesMoved events for the
575 // remaining fingers. [event allTouches] also contains one
576 // less touch point than it should, so this behavior is
577 // likely a bug in the iOS system gesture recognizer, but we
578 // have to take it into account when maintaining the Qt state.
579 // We do this by assuming that there are no cases where a
580 // sub-set of the active touch events are intentionally cancelled.
581
582 NSInteger count = static_cast<NSInteger>([touches count]);
583 if (count != 0 && count != m_activeTouches.count() && !m_activePencilTouch)
584 qWarning("Subset of active touches cancelled by UIKit");
585
586 m_activeTouches.clear();
587 m_activePencilTouch = nil;
588
589 ulong timestamp = event ? getTimeStamp(event) : ([[NSProcessInfo processInfo] systemUptime] * 1000);
590
591 QIOSIntegration *iosIntegration = static_cast<QIOSIntegration *>(QGuiApplicationPrivate::platformIntegration());
592
593 // Send the touch event asynchronously, as the application might spin a recursive
594 // event loop in response to the touch event (a dialog e.g.), which will deadlock
595 // the UIKit event delivery system (QTBUG-98651).
596 QWindowSystemInterface::handleTouchCancelEvent<QWindowSystemInterface::AsynchronousDelivery>(
597 self.platformWindow->window(), timestamp, iosIntegration->touchDevice());
598}
599
600- (int)mapPressTypeToKey:(UIPress*)press withModifiers:(Qt::KeyboardModifiers)qtModifiers text:(QString &)text
601{
602 switch (press.type) {
603 case UIPressTypeUpArrow: return Qt::Key_Up;
604 case UIPressTypeDownArrow: return Qt::Key_Down;
605 case UIPressTypeLeftArrow: return Qt::Key_Left;
606 case UIPressTypeRightArrow: return Qt::Key_Right;
607 case UIPressTypeSelect: return Qt::Key_Select;
608 case UIPressTypeMenu: return Qt::Key_Menu;
609 case UIPressTypePlayPause: return Qt::Key_MediaTogglePlayPause;
610 }
611 if (@available(ios 13.4, *)) {
612 Qt::Key key = QAppleKeyMapper::fromUIKitKey(press.key.keyCode);
613 if (key != Qt::Key_unknown)
614 return key;
615 NSString *charactersIgnoringModifiers = press.key.charactersIgnoringModifiers;
616 key = QAppleKeyMapper::fromUIKitKey(charactersIgnoringModifiers);
617 if (key != Qt::Key_unknown)
618 return key;
619 key = QAppleKeyMapper::fromNSString(qtModifiers, press.key.characters,
620 charactersIgnoringModifiers, text);
621 if (key != Qt::Key_unknown)
622 return key;
623 }
624 return Qt::Key_unknown;
625}
626
627- (bool)isControlKey:(Qt::Key)key
628{
629 switch (key) {
630 case Qt::Key_Up:
631 case Qt::Key_Down:
632 case Qt::Key_Left:
633 case Qt::Key_Right:
634 return true;
635 default:
636 break;
637 }
638
639 return false;
640}
641
642- (bool)handlePresses:(NSSet<UIPress *> *)presses eventType:(QEvent::Type)type
643{
644 // Presses on Menu button will generate a Menu key event. By default, not handling
645 // this event will cause the application to return to Headboard (tvOS launcher).
646 // When handling the event (for example, as a back button), both press and
647 // release events must be handled accordingly.
648 if (!qApp->focusWindow())
649 return false;
650
651 bool eventHandled = false;
652 const bool imEnabled = QIOSInputContext::instance()->inputMethodAccepted();
653
654 for (UIPress* press in presses) {
655 Qt::KeyboardModifiers qtModifiers = Qt::NoModifier;
656 if (@available(ios 13.4, *))
657 qtModifiers = QAppleKeyMapper::fromUIKitModifiers(press.key.modifierFlags);
658 QString text;
659 int key = [self mapPressTypeToKey:press withModifiers:qtModifiers text:text];
660 if (key == Qt::Key_unknown)
661 continue;
662 if (imEnabled && ![self isControlKey:Qt::Key(key)])
663 continue;
664
665 bool keyHandled = QWindowSystemInterface::handleKeyEvent(
666 self.platformWindow->window(), type, key, qtModifiers, text);
667 eventHandled = eventHandled || keyHandled;
668 }
669
670 return eventHandled;
671}
672
673- (void)pressesBegan:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
674{
675 if (![self handlePresses:presses eventType:QEvent::KeyPress])
676 [super pressesBegan:presses withEvent:event];
677}
678
679- (void)pressesChanged:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
680{
681 if (![self handlePresses:presses eventType:QEvent::KeyPress])
682 [super pressesChanged:presses withEvent:event];
683 [super pressesChanged:presses withEvent:event];
684}
685
686- (void)pressesEnded:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
687{
688 if (![self handlePresses:presses eventType:QEvent::KeyRelease])
689 [super pressesEnded:presses withEvent:event];
690 [super pressesEnded:presses withEvent:event];
691}
692
693- (BOOL)canPerformAction:(SEL)action withSender:(id)sender
694{
695#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
696 // Check first if QIOSMenu should handle the action before continuing up the responder chain
697 return [QIOSMenu::menuActionTarget() targetForAction:action withSender:sender] != 0;
698#else
699 Q_UNUSED(action);
700 Q_UNUSED(sender);
701 return false;
702#endif
703}
704
705- (id)forwardingTargetForSelector:(SEL)selector
706{
707 Q_UNUSED(selector);
708#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
709 return QIOSMenu::menuActionTarget();
710#else
711 return nil;
712#endif
713}
714
715- (void)addInteraction:(id<UIInteraction>)interaction
716{
717 if ([NSStringFromClass(interaction.class) isEqualToString:@"UITextInteraction"])
718 return;
719
720 [super addInteraction:interaction];
721}
722
723- (UIEditingInteractionConfiguration)editingInteractionConfiguration
724{
725 // We only want the three-finger-tap edit menu to be available when there's
726 // actually something to edit. Otherwise the OS will cause a slight delay
727 // before delivering the release of three finger touch input. Note that we
728 // do not do any hit testing here to check that the focus object is the one
729 // being tapped, as the behavior of native iOS apps is to trigger the menu
730 // regardless of where the gesture is being made.
731 return QIOSInputContext::instance()->inputMethodAccepted() ?
732 UIEditingInteractionConfigurationDefault : UIEditingInteractionConfigurationNone;
733}
734
735#if QT_CONFIG(wheelevent)
736- (void)handleScroll:(UIPanGestureRecognizer *)recognizer
737{
738 if (!self.platformWindow->window())
739 return;
740
741 if (!self.canBecomeFirstResponder)
742 return;
743
744 CGPoint translation = [recognizer translationInView:self];
745 CGFloat deltaX = translation.x - m_lastScrollDelta.x;
746 CGFloat deltaY = translation.y - m_lastScrollDelta.y;
747
748 QPoint angleDelta;
749 // From QNSView implementation:
750 // "Since deviceDelta is delivered as pixels rather than degrees, we need to
751 // convert from pixels to degrees in a sensible manner.
752 // It looks like 1/4 degrees per pixel behaves most native.
753 // (NB: Qt expects the unit for delta to be 8 per degree):"
754 const int pixelsToDegrees = 2; // 8 * 1/4
755 angleDelta.setX(deltaX * pixelsToDegrees);
756 angleDelta.setY(deltaY * pixelsToDegrees);
757
758 QPoint pixelDelta;
759 pixelDelta.setX(deltaX);
760 pixelDelta.setY(deltaY);
761
762 NSTimeInterval time_stamp = [[NSProcessInfo processInfo] systemUptime];
763 ulong qt_timestamp = time_stamp * 1000;
764
765 Qt::KeyboardModifiers qt_modifierFlags = Qt::NoModifier;
766 if (@available(ios 13.4, *))
767 qt_modifierFlags = QAppleKeyMapper::fromUIKitModifiers(recognizer.modifierFlags);
768
769 if (recognizer.state == UIGestureRecognizerStateBegan)
770 // locationInView: doesn't return the cursor position at the time of the wheel event,
771 // but rather gives us the position with the deltas applied, so we need to save the
772 // cursor position at the beginning of the gesture
773 m_lastScrollCursorPos = [recognizer locationInView:self];
774
775 if (recognizer.state != UIGestureRecognizerStateEnded) {
776 m_lastScrollDelta.x = translation.x;
777 m_lastScrollDelta.y = translation.y;
778 } else {
779 m_lastScrollDelta = CGPointZero;
780 }
781
782 QPoint qt_local = QPointF::fromCGPoint(m_lastScrollCursorPos).toPoint();
783 QPoint qt_global = self.platformWindow->mapToGlobal(qt_local);
784
785 qCInfo(lcQpaInputEvents).nospace() << "wheel event" << " at " << qt_local
786 << " pixelDelta=" << pixelDelta << " angleDelta=" << angleDelta;
787
788 QWindowSystemInterface::handleWheelEvent(self.platformWindow->window(), qt_timestamp, qt_local, qt_global, pixelDelta, angleDelta, qt_modifierFlags);
789}
790#endif // QT_CONFIG(wheelevent)
791
792#if QT_CONFIG(tabletevent)
793- (void)handleHover:(UIHoverGestureRecognizer *)recognizer
794{
795 if (!self.platformWindow)
796 return;
797
798 ulong timeStamp = [[NSProcessInfo processInfo] systemUptime] * 1000;
799
800 CGFloat zOffset = 0;
801 if (@available(ios 16.1, *))
802 zOffset = [recognizer zOffset];
803
804 CGVector azimuth;
805 CGFloat altitudeAngleRadian = 0;
806 if (@available(ios 16.4, *)) {
807 azimuth = [recognizer azimuthUnitVectorInView:self];
808 altitudeAngleRadian = recognizer.altitudeAngle;
809 }
810
811 [self handlePencilEventForLocationInView:[recognizer locationInView:self] withState:QEventPoint::State::Released
812 withTimestamp:timeStamp withForce:0 withMaximumPossibleForce:0 withZOffset:zOffset
813 withAzimuthUnitVector:azimuth withAltitudeAngleRadian:altitudeAngleRadian];
814}
815#endif
816
817@end
818
819@implementation UIView (QtHelpers)
820
821- (QWindow *)qwindow
822{
823 if ([self isKindOfClass:[QUIView class]]) {
824 if (QT_PREPEND_NAMESPACE(QIOSWindow) *w = static_cast<QUIView *>(self).platformWindow)
825 return w->window();
826 }
827 return nil;
828}
829
830- (UIViewController *)viewController
831{
832 id responder = self;
833 while ((responder = [responder nextResponder])) {
834 if ([responder isKindOfClass:UIViewController.class])
835 return responder;
836 }
837 return nil;
838}
839
840- (QIOSViewController*)qtViewController
841{
842 UIViewController *vc = self.viewController;
843 if ([vc isKindOfClass:QIOSViewController.class])
844 return static_cast<QIOSViewController *>(vc);
845
846 return nil;
847}
848
849@end
850
851#if QT_CONFIG(metal)
852@implementation QUIMetalView
853
854+ (Class)layerClass
855{
856 return [CAMetalLayer class];
857}
858
859@end
860#endif
861
862#if QT_CONFIG(accessibility)
863// Include category as an alternative to using -ObjC (Apple QA1490)
864#include "quiview_accessibility.mm"
865#endif
Q_FORWARD_DECLARE_OBJC_CLASS(NSString)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
Q_FORWARD_DECLARE_OBJC_CLASS(UIViewController)
#define qImDebug
Definition qiosglobal.h:21