Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
quiview.mm
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:significant reason:default
4
5#include "quiview.h"
6
7#include "qiosglobal.h"
11#include "qiosscreen.h"
12#include "qioswindow.h"
14#include "quiwindow.h"
15#ifndef Q_OS_TVOS
16#include "qiosmenu.h"
17#endif
18
19#include <QtCore/qmath.h>
20#include <QtGui/qpointingdevice.h>
21#include <QtGui/private/qguiapplication_p.h>
22#include <QtGui/private/qwindow_p.h>
23#include <QtGui/private/qapplekeymapper_p.h>
24#include <QtGui/private/qpointingdevice_p.h>
25#include <qpa/qwindowsysteminterface_p.h>
26
27Q_LOGGING_CATEGORY(lcQpaMouse, "qt.qpa.input.mouse")
28Q_LOGGING_CATEGORY(lcQpaTablet, "qt.qpa.input.tablet")
29Q_LOGGING_CATEGORY(lcQpaInputEvents, "qt.qpa.input.events")
30
31namespace {
32inline ulong getTimeStamp(UIEvent *event)
33{
34 bool useEvent = event != nil;
35
36#if TARGET_OS_SIMULATOR == 1
37 // We currently build Qt for simulator using X86_64, even on ARM based macs.
38 // This results in the simulator running on ARM, while the app is running
39 // inside it using Rosetta. And with this combination, the event.timestamp, which is
40 // documented to be in seconds, looks to be something else, and is not progressing
41 // in sync with a normal clock.
42 // Sending out mouse events with a timestamp that doesn't follow normal clock time
43 // will cause problems for mouse-, and pointer handlers that uses them to e.g calculate
44 // the time between a press and release, and to decide if the user is performing a tap
45 // or a drag.
46 // For that reason, we choose to ignore UIEvent.timestamp under the mentioned condition, and
47 // instead rely on NSProcessInfo. Note that if we force the whole simulator to use Rosetta
48 // (and not only the Qt app), the timestamps will progress normally.
49#if defined(Q_PROCESSOR_ARM)
50 #warning The timestamp work-around for x86_64 can (probably) be removed when building for ARM
51#endif
52 useEvent = false;
53#endif
54
55 return ulong(useEvent ? event.timestamp : NSProcessInfo.processInfo.systemUptime) * 1000;
56}
57}
58
59#if QT_CONFIG(tabletevent)
60@protocol TabletEventSender
61- (CGPoint)locationInView:(UIView *)view;
62- (CGVector)azimuthUnitVectorInView:(UIView *)view;
63@property (nonatomic, readonly) CGFloat altitudeAngle;
64@property (nonatomic, readonly) CGFloat rollAngle;
65@end
66@interface UIGestureRecognizer (TabletEventSender) <TabletEventSender> @end
67@interface UITouch (TabletEventSender) <TabletEventSender> @end
68#endif
69
70@implementation QUIView {
71 QHash<NSUInteger, QWindowSystemInterface::TouchPoint> m_activeTouches;
72 UITouch *m_activePencilTouch;
73 NSMutableArray<UIAccessibilityElement *> *m_accessibleElements;
74 CGPoint m_lastScrollCursorPos;
75 CGPoint m_lastScrollDelta;
76}
77
78+ (Class)layerClass
79{
80#if QT_CONFIG(opengl)
81 return [CAEAGLLayer class];
82#endif
83 return [super layerClass];
84}
85
86- (instancetype)initWithQIOSWindow:(QT_PREPEND_NAMESPACE(QIOSWindow) *)window
87{
88 if (self = [self initWithFrame:window->geometry().toCGRect()]) {
89 self.platformWindow = window;
90
91 if (isQtApplication())
92 self.hidden = YES;
93
94 m_accessibleElements = [[NSMutableArray<UIAccessibilityElement *> alloc] init];
95
96#ifndef Q_OS_TVOS
97 self.multipleTouchEnabled = YES;
98#endif
99
100 auto scrollGestureRecognizer = [[UIPanGestureRecognizer alloc]
101 initWithTarget:self action:@selector(handleScroll:)];
102 // The gesture recognizer should only care about scroll gestures (for now)
103 // Set allowedTouchTypes to empty array here to not interfere with touch events
104 // handled by the UIView. Scroll gestures, even those coming from touch devices,
105 // such as trackpads will still be received as they are not touch events
106 scrollGestureRecognizer.allowedTouchTypes = @[];
107 scrollGestureRecognizer.allowedScrollTypesMask = UIScrollTypeMaskAll;
108 scrollGestureRecognizer.maximumNumberOfTouches = 0;
109 m_lastScrollDelta = CGPointZero;
110 m_lastScrollCursorPos = CGPointZero;
111 [self addGestureRecognizer:[scrollGestureRecognizer autorelease]];
112
113 auto mouseHoverGestureRecognizer = [[UIHoverGestureRecognizer alloc]
114 initWithTarget:self action:@selector(handleMouseHover:)];
115 mouseHoverGestureRecognizer.allowedTouchTypes = @[ @(UITouchTypeIndirectPointer) ];
116 [self addGestureRecognizer:[mouseHoverGestureRecognizer autorelease]];
117
118#if QT_CONFIG(tabletevent)
119 auto pencilHoverGestureRecognizer = [[UIHoverGestureRecognizer alloc]
120 initWithTarget:self action:@selector(handlePencilHover:)];
121 pencilHoverGestureRecognizer.allowedTouchTypes = @[ @(UITouchTypePencil) ];
122 [self addGestureRecognizer:[pencilHoverGestureRecognizer autorelease]];
123#endif
124
125 // Set up layer
126 if ([self.layer isKindOfClass:CAMetalLayer.class]) {
127 QWindow *window = self.platformWindow->window();
128 if (QColorSpace colorSpace = window->format().colorSpace(); colorSpace.isValid()) {
129 QCFType<CFDataRef> iccData = colorSpace.iccProfile().toCFData();
130 QCFType<CGColorSpaceRef> cgColorSpace = CGColorSpaceCreateWithICCData(iccData);
131 CAMetalLayer *metalLayer = static_cast<CAMetalLayer *>(self.layer);
132 metalLayer.colorspace = cgColorSpace;
133 qCDebug(lcQpaWindow) << "Set" << self << "color space to" << metalLayer.colorspace;
134 }
135 }
136#if QT_CONFIG(opengl)
137 else if ([self.layer isKindOfClass:[CAEAGLLayer class]]) {
138 CAEAGLLayer *eaglLayer = static_cast<CAEAGLLayer *>(self.layer);
139 eaglLayer.opaque = TRUE;
140 eaglLayer.drawableProperties = @{
141 kEAGLDrawablePropertyRetainedBacking: @(YES),
142 kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
143 };
144 }
145#endif
146
147#if defined(Q_OS_VISIONOS)
148 // Although the "Drawing sharp layer-based content in visionOS" docs
149 // claim that by default a CALayer rasterizes at a 2x scale this does
150 // not seem to be the case in practice. So we explicitly set the view's
151 // scale factor based on the screen, where we hard-code it to 2.0.
152 self.contentScaleFactor = self.platformWindow->screen()->devicePixelRatio();
153#endif
154 }
155
156 return self;
157}
158
159- (void)dealloc
160{
161 [m_accessibleElements release];
162
163 [super dealloc];
164}
165
166- (NSString *)description
167{
168 NSMutableString *description = [NSMutableString stringWithString:[super description]];
169
170#ifndef QT_NO_DEBUG_STREAM
171 QString platformWindowDescription;
172 QDebug debug(&platformWindowDescription);
173 debug.nospace() << "; " << self.platformWindow << ">";
174 NSRange lastCharacter = [description rangeOfComposedCharacterSequenceAtIndex:description.length - 1];
175 [description replaceCharactersInRange:lastCharacter withString:platformWindowDescription.toNSString()];
176#endif
177
178 return description;
179}
180
181#if !defined(Q_OS_VISIONOS)
182- (void)willMoveToWindow:(UIWindow *)newWindow
183{
184 // UIKIt will normally set the scale factor of a view to match the corresponding
185 // screen scale factor, but views backed by CAEAGLLayers need to do this manually.
186 self.contentScaleFactor = newWindow && newWindow.screen ?
187 newWindow.screen.scale : [[UIScreen mainScreen] scale];
188
189 // FIXME: Allow the scale factor to be customized through QSurfaceFormat.
190}
191#endif
192
193- (void)didAddSubview:(UIView *)subview
194{
195 if ([subview isKindOfClass:[QUIView class]])
196 self.clipsToBounds = YES;
197}
198
199- (void)willRemoveSubview:(UIView *)subview
200{
201 for (UIView *view in self.subviews) {
202 if (view != subview && [view isKindOfClass:[QUIView class]])
203 return;
204 }
205
206 self.clipsToBounds = NO;
207}
208
209- (void)setNeedsDisplay
210{
211 [super setNeedsDisplay];
212
213 // We didn't implement drawRect: so we have to manually
214 // mark the layer as needing display.
215 [self.layer setNeedsDisplay];
216}
217
218- (void)layoutSubviews
219{
220 // This method is the de facto way to know that view has been resized,
221 // or otherwise needs invalidation of its buffers. Note though that we
222 // do not get this callback when the view just changes its position, so
223 // the position of our QWindow (and platform window) will only get updated
224 // when the size is also changed.
225
226 if (!CGAffineTransformIsIdentity(self.transform))
227 qWarning() << self << "has a transform set. This is not supported.";
228
229 QWindow *window = self.platformWindow->window();
230 QRect lastReportedGeometry = qt_window_private(window)->geometry;
231 QRect currentGeometry = QRectF::fromCGRect(self.frame).toRect();
232 qCDebug(lcQpaWindow) << self.platformWindow << "new geometry is" << currentGeometry;
233 QWindowSystemInterface::handleGeometryChange(window, currentGeometry);
234
235 if (currentGeometry.size() != lastReportedGeometry.size()) {
236 // Trigger expose event on resize
237 [self setNeedsDisplay];
238
239 // A new size means we also need to resize the FBO's corresponding buffers,
240 // but we defer that to when the application calls makeCurrent.
241 }
242}
243
244- (void)displayLayer:(CALayer *)layer
245{
246 Q_UNUSED(layer);
247 Q_ASSERT(layer == self.layer);
248
249 if (!self.platformWindow)
250 return;
251
252 [self sendUpdatedExposeEvent];
253}
254
255- (void)sendUpdatedExposeEvent
256{
257 QRegion region;
258
259 if (self.platformWindow->isExposed()) {
260 QSize bounds = QRectF::fromCGRect(self.layer.bounds).toRect().size();
261
262 Q_ASSERT(self.platformWindow->geometry().size() == bounds);
263 Q_ASSERT(self.hidden == !self.platformWindow->window()->isVisible());
264
265 region = QRect(QPoint(), bounds);
266 }
267
268 qCDebug(lcQpaWindow) << self.platformWindow << region << "isExposed" << self.platformWindow->isExposed();
269 QWindowSystemInterface::handleExposeEvent(self.platformWindow->window(), region);
270}
271
272- (void)safeAreaInsetsDidChange
273{
274 QWindowSystemInterface::handleSafeAreaMarginsChanged(self.platformWindow->window());
275}
276
277// -------------------------------------------------------------------------
278
279- (BOOL)canBecomeFirstResponder
280{
281 return !(self.platformWindow->window()->flags() & (Qt::WindowDoesNotAcceptFocus
282 | Qt::WindowTransparentForInput));
283}
284
285- (BOOL)becomeFirstResponder
286{
287 {
288 // Scope for the duration of becoming first responder only, as the window
289 // activation event may trigger new responders, which we don't want to be
290 // blocked by this guard.
291 FirstResponderCandidate firstResponderCandidate(self);
292
293 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
294
295 if (![super becomeFirstResponder]) {
296 qImDebug() << self << "was not allowed to become first responder";
297 return NO;
298 }
299
300 qImDebug() << self << "became first responder";
301 }
302
303 if (qGuiApp->focusWindow() != self.platformWindow->window())
304 QWindowSystemInterface::handleFocusWindowChanged(self.platformWindow->window(), Qt::ActiveWindowFocusReason);
305 else
306 qImDebug() << self.platformWindow->window() << "already active, not sending window activation";
307
308 return YES;
309}
310
311- (BOOL)responderShouldTriggerWindowDeactivation:(UIResponder *)responder
312{
313 // We don't want to send window deactivation in case the resign
314 // was a result of another Qt window becoming first responder.
315 if ([responder isKindOfClass:[QUIView class]])
316 return NO;
317
318 // Nor do we want to deactivate the Qt window if the new responder
319 // is temporarily handling text input on behalf of a Qt window.
320 if ([responder isKindOfClass:[QIOSTextResponder class]]) {
321 while ((responder = [responder nextResponder])) {
322 if ([responder isKindOfClass:[QUIView class]])
323 return NO;
324 }
325 }
326
327 return YES;
328}
329
330- (BOOL)resignFirstResponder
331{
332 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
333
334 if (![super resignFirstResponder])
335 return NO;
336
337 qImDebug() << self << "resigned first responder";
338
339 if (qGuiApp) {
340 UIResponder *newResponder = FirstResponderCandidate::currentCandidate();
341 if ([self responderShouldTriggerWindowDeactivation:newResponder])
342 QWindowSystemInterface::handleFocusWindowChanged(nullptr, Qt::ActiveWindowFocusReason);
343 }
344
345 return YES;
346}
347
348- (BOOL)isActiveWindow
349{
350 // Normally this is determined exclusivly by being firstResponder, but
351 // since we employ a separate first responder for text input we need to
352 // handle both cases as this view being the active Qt window.
353
354 if ([self isFirstResponder])
355 return YES;
356
357 UIResponder *firstResponder = [UIResponder qt_currentFirstResponder];
358 if ([firstResponder isKindOfClass:[QIOSTextInputResponder class]]
359 && [firstResponder nextResponder] == self)
360 return YES;
361
362 return NO;
363}
364
365// -------------------------------------------------------------------------
366
367- (void)traitCollectionDidChange:(UITraitCollection *)previousTraitCollection
368{
369 [super traitCollectionDidChange: previousTraitCollection];
370
371 QPointingDevice *touchDevice = QIOSIntegration::instance()->touchDevice();
372 auto *devicePriv = QPointingDevicePrivate::get(touchDevice);
373
374 auto capabilities = touchDevice->capabilities();
375 capabilities.setFlag(QPointingDevice::Capability::Pressure,
376 (self.traitCollection.forceTouchCapability == UIForceTouchCapabilityAvailable));
377 devicePriv->setCapabilities(capabilities);
378}
379
380-(BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event
381{
382 if (self.platformWindow->window()->flags() & Qt::WindowTransparentForInput)
383 return NO;
384 return [super pointInside:point withEvent:event];
385}
386
387- (void)handleTouches:(NSSet *)touches withEvent:(UIEvent *)event withState:(QEventPoint::State)state
388{
389 QIOSIntegration *iosIntegration = QIOSIntegration::instance();
390 const ulong timeStamp = getTimeStamp(event);
391
392#if QT_CONFIG(tabletevent)
393 if (m_activePencilTouch && [touches containsObject:m_activePencilTouch]) {
394 NSArray<UITouch *> *cTouches = [event coalescedTouchesForTouch:m_activePencilTouch];
395 for (UITouch *cTouch in cTouches) {
396 QEvent::Type eventType = [&]{
397 switch (cTouch.phase) {
398 case UITouchPhaseBegan:
399 return QEvent::TabletPress;
400 case UITouchPhaseEnded:
401 return QEvent::TabletRelease;
402 case UITouchPhaseMoved:
403 case UITouchPhaseStationary:
404 return QEvent::TabletMove;
405 default:
406 return QEvent::None;
407 }
408 }();
409 [self handleTabletEvent:eventType withSender:cTouch andTimestamp:timeStamp];
410 }
411 }
412#endif
413
414 if (m_activeTouches.isEmpty())
415 return;
416 for (auto it = m_activeTouches.begin(); it != m_activeTouches.end(); ++it) {
417 auto hash = it.key();
418 QWindowSystemInterface::TouchPoint &touchPoint = it.value();
419 UITouch *uiTouch = nil;
420 for (UITouch *touch in touches) {
421 if (touch.hash == hash) {
422 uiTouch = touch;
423 break;
424 }
425 }
426 if (!uiTouch) {
427 touchPoint.state = QEventPoint::State::Stationary;
428 } else {
429 touchPoint.state = state;
430
431 // Touch positions are expected to be in QScreen global coordinates, and
432 // as we already have the QWindow positioned at the right place, we can
433 // just map from the local view position to global coordinates.
434 // tvOS: all touches start at the center of the screen and move from there.
435 QPoint localViewPosition = QPointF::fromCGPoint([uiTouch locationInView:self]).toPoint();
436 QPoint globalScreenPosition = self.platformWindow->mapToGlobal(localViewPosition);
437
438 touchPoint.area = QRectF(globalScreenPosition, QSize(0, 0));
439
440 // FIXME: Do we really need to support QPointingDevice::Capability::NormalizedPosition?
441 QSize screenSize = self.platformWindow->screen()->geometry().size();
442 touchPoint.normalPosition = QPointF(globalScreenPosition.x() / screenSize.width(),
443 globalScreenPosition.y() / screenSize.height());
444
445 touchPoint.pressure = [self pressureForTouch:uiTouch];
446 }
447 }
448
449 if ([self.window isKindOfClass:[QUIWindow class]] &&
450 !static_cast<QUIWindow *>(self.window).sendingEvent) {
451 // The event is likely delivered as part of delayed touch delivery, via
452 // _UIGestureEnvironmentSortAndSendDelayedTouches, due to one of the two
453 // _UISystemGestureGateGestureRecognizer instances on the top level window
454 // having its delaysTouchesBegan set to YES. During this delivery, it's not
455 // safe to spin up a recursive event loop, as our calling function is not
456 // reentrant, so any gestures used by the recursive code, e.g. a native
457 // alert dialog, will fail to recognize. To be on the safe side, we deliver
458 // the event asynchronously.
459 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
460 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
461 } else {
462 // Send the touch event asynchronously, as the application might spin a recursive
463 // event loop in response to the touch event (a dialog e.g.), which will deadlock
464 // the UIKit event delivery system (QTBUG-98651).
465 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
466 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
467 }
468}
469
470- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
471{
472 // UIKit generates [Began -> Moved -> Ended] event sequences for
473 // each touch point. Internally we keep a hashmap of active UITouch
474 // points to QWindowSystemInterface::TouchPoints, and assigns each TouchPoint
475 // an id for use by Qt.
476 for (UITouch *touch in touches) {
477#if QT_CONFIG(tabletevent)
478 if (touch.type == UITouchTypePencil) {
479 if (Q_UNLIKELY(m_activePencilTouch)) {
480 qWarning("ignoring additional Pencil while first is still active");
481 continue;
482 }
483 m_activePencilTouch = touch;
484 } else
485 {
486 Q_ASSERT(!m_activeTouches.contains(touch.hash));
487#endif
488 // Use window-independent touch identifiers, so that
489 // multi-touch works across windows.
490 static quint16 nextTouchId = 0;
491 m_activeTouches[touch.hash].id = nextTouchId++;
492#if QT_CONFIG(tabletevent)
493 }
494#endif
495 }
496
497 if (self.platformWindow->shouldAutoActivateWindow() && m_activeTouches.size() == 1) {
498 QPlatformWindow *topLevel = self.platformWindow;
499 while (QPlatformWindow *p = topLevel->parent())
500 topLevel = p;
501 if (topLevel->window() != QGuiApplication::focusWindow())
502 topLevel->requestActivateWindow();
503 }
504
505 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Pressed];
506}
507
508- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
509{
510 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Updated];
511}
512
513- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
514{
515 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Released];
516
517 // Remove ended touch points from the active set:
518#ifndef Q_OS_TVOS
519 for (UITouch *touch in touches) {
520#if QT_CONFIG(tabletevent)
521 if (touch.type == UITouchTypePencil) {
522 m_activePencilTouch = nil;
523 } else
524#endif
525 {
526 m_activeTouches.remove(touch.hash);
527 }
528 }
529#else
530 // tvOS only supports single touch
531 m_activeTouches.clear();
532#endif
533}
534
535- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event
536{
537 if (m_activeTouches.isEmpty() && !m_activePencilTouch)
538 return;
539
540 // When four-finger swiping, we get a touchesCancelled callback
541 // which includes all four touch points. The swipe gesture is
542 // then active until all four touches have been released, and
543 // we start getting touchesBegan events again.
544
545 // When five-finger pinching, we also get a touchesCancelled
546 // callback with all five touch points, but the pinch gesture
547 // ends when the second to last finger is released from the
548 // screen. The last finger will not emit any more touch
549 // events, _but_, will contribute to starting another pinch
550 // gesture. That second pinch gesture will _not_ trigger a
551 // touchesCancelled event when starting, but as each finger
552 // is released, and we may get touchesMoved events for the
553 // remaining fingers. [event allTouches] also contains one
554 // less touch point than it should, so this behavior is
555 // likely a bug in the iOS system gesture recognizer, but we
556 // have to take it into account when maintaining the Qt state.
557 // We do this by assuming that there are no cases where a
558 // sub-set of the active touch events are intentionally cancelled.
559
560 NSInteger count = static_cast<NSInteger>([touches count]);
561 if (count != 0 && count != m_activeTouches.count() && !m_activePencilTouch)
562 qWarning("Subset of active touches cancelled by UIKit");
563
564 m_activeTouches.clear();
565 m_activePencilTouch = nil;
566
567 QIOSIntegration *iosIntegration = static_cast<QIOSIntegration *>(QGuiApplicationPrivate::platformIntegration());
568
569 // Send the touch event asynchronously, as the application might spin a recursive
570 // event loop in response to the touch event (a dialog e.g.), which will deadlock
571 // the UIKit event delivery system (QTBUG-98651).
572 QWindowSystemInterface::handleTouchCancelEvent<QWindowSystemInterface::AsynchronousDelivery>(
573 self.platformWindow->window(), getTimeStamp(event), iosIntegration->touchDevice());
574}
575
576- (void)handleMouseHover:(UIHoverGestureRecognizer *)recognizer
577{
578 if (!self.platformWindow)
579 return;
580
581 auto *window = self.platformWindow->window();
582 auto localPosition = QPointF::fromCGPoint([recognizer locationInView:self]);
583 auto globalPosition = self.platformWindow->mapToGlobalF(localPosition);
584
585 switch (recognizer.state) {
586 case UIGestureRecognizerStateBegan:
587 qCDebug(lcQpaMouse) << "🖱️ enter" << window << "local =" << localPosition;
588 QWindowSystemInterface::handleEnterEvent(window, localPosition, globalPosition);
589 break;
590 case UIGestureRecognizerStateEnded:
591 qCDebug(lcQpaMouse) << "🖱️ leave" << window;
592 QWindowSystemInterface::handleLeaveEvent(window);
593 break;
594 case UIGestureRecognizerStateChanged:
595 qCDebug(lcQpaMouse) << "🖱️ move" << "local =" << localPosition << "global =" << globalPosition;
596 QWindowSystemInterface::handleMouseEvent(window, localPosition, globalPosition,
597 Qt::NoButton, Qt::NoButton, QEvent::MouseMove);
598 break;
599 default:
600 qCWarning(lcQpaMouse) << "Unknown hover state for" << recognizer;
601 }
602}
603
604/*
605 Computes the normalized axial pressure for a touch point.
606
607 Note that this is not the same as the perpendicular force,
608 which would incorporate the altitudeAngle. Qt does not have
609 an API for this pressure.
610*/
611- (qreal)pressureForTouch:(UITouch*)touch
612{
613 if (!touch)
614 return 0;
615
616 if (touch.maximumPossibleForce) {
617 // Note: iOS will deliver touchesBegan with a touch force of 0, which
618 // we will reflect/propagate as a 0 pressure, but there is no clear
619 // alternative, as we don't want to wait for a touchedMoved before
620 // sending a touch press event to Qt, just to have a valid pressure.
621 return touch.force / touch.maximumPossibleForce;
622 } else {
623 // We don't claim that our touch device supports QPointingDevice::Capability::Pressure,
624 // but fill in a meaningful value in case clients use it anyway. We match the behavior
625 // from above, not sending pressure for touchesBegan.
626 return (touch.phase == UITouchPhaseMoved || touch.phase == UITouchPhaseStationary) ? 1.0 : 0.0;
627 }
628}
629
630// -------------------------------------------------------------------------
631
632#if QT_CONFIG(tabletevent)
633- (void)handlePencilHover:(UIHoverGestureRecognizer *)recognizer
634{
635 if (!self.platformWindow)
636 return;
637
638 // Just before lifting the pencil we might receive a hover event,
639 // but the event's position wrongly reflects the position where the
640 // pencil was first pressed instead of the current position, and
641 // semantically it doesn't make sense to send hover before release.
642 if (m_activePencilTouch)
643 return;
644
645 QEvent::Type eventType = [&]{
646 switch (recognizer.state) {
647 case UIGestureRecognizerStateBegan: return QEvent::TabletEnterProximity;
648 case UIGestureRecognizerStateEnded: return QEvent::TabletLeaveProximity;
649 case UIGestureRecognizerStateChanged: return QEvent::TabletMove;
650 default: return QEvent::None;
651 }
652 }();
653
654 if (!eventType) {
655 qCWarning(lcQpaTablet) << "Unknown hover state for" << recognizer;
656 return;
657 }
658
659 [self handleTabletEvent:eventType withSender:recognizer andTimestamp:getTimeStamp(nil)];
660}
661
662- (void)handleTabletEvent:(QEvent::Type)eventType withSender:(id<TabletEventSender>)sender andTimestamp:(ulong)timeStamp
663{
664 QWindow *window = self.platformWindow->window();
665
666 auto localViewPosition = QPointF::fromCGPoint([sender locationInView:self]);
667 auto globalScreenPosition = self.platformWindow->mapToGlobalF(localViewPosition);
668
669 // Azimuth unit vector: +x to the right, +y going downwards
670 CGVector azimuth = [sender azimuthUnitVectorInView:self];
671 // Altitude angle given in radians. π/2 is with the pen perpendicular
672 // to the iPad. Smaller values mean more tilted, but never negative.
673 CGFloat altitudeAngleRadians = sender.altitudeAngle;
674 // Convert to degrees with zero being perpendicular
675 qreal altitudeAngleDegrees = 90 - qRadiansToDegrees(altitudeAngleRadians);
676 qreal xTilt = qBound(-60.0, altitudeAngleDegrees * azimuth.dx, 60.0);
677 qreal yTilt = qBound(-60.0, altitudeAngleDegrees * azimuth.dy, 60.0);
678
679 auto zOffset = qt_objc_cast<UIHoverGestureRecognizer*>(sender).zOffset;
680 auto pressure = [self pressureForTouch:qt_objc_cast<UITouch*>(sender)];
681
682 // Apple Pencil models that don’t support barrel-roll angle data will return 0
683 auto rotation = -qRadiansToDegrees(sender.rollAngle);
684
685 // Apple Pencils do not have the concept of tangential pressure, which
686 // is commonly found on Wacom tablet pens with a dedicated finger wheel.
687 static const int tangentialPressure = 0;
688
689 // The semantics of buttons in a QTabletEvent is that the left button is
690 // the tip, so we report it when the pencil is touching the screen.
691 Qt::MouseButtons buttons = qt_objc_cast<UITouch*>(sender) &&
692 eventType != QEvent::TabletRelease ? Qt::LeftButton : Qt::NoButton;
693 static const auto modifiers = Qt::NoModifier;
694
695 QPointingDevice *pencilDevice = QIOSIntegration::instance()->pencilDevice();
696
697 qCDebug(lcQpaTablet) << "✏️" << eventType
698 << "local =" << localViewPosition << "global =" << globalScreenPosition
699 << "z =" << zOffset << "pressure =" << pressure
700 << "rotation =" << rotation << "tilt =" << QPointF(xTilt, yTilt);
701
702 switch (eventType) {
703 case QEvent::TabletEnterProximity:
704 case QEvent::TabletLeaveProximity: {
705 const bool inProximity = eventType == QEvent::TabletEnterProximity;
706 QWindowSystemInterface::handleTabletEnterLeaveProximityEvent(window, timeStamp,
707 pencilDevice, inProximity, localViewPosition, globalScreenPosition,
708 buttons, xTilt, yTilt, tangentialPressure, rotation, zOffset, modifiers);
709 break;
710 }
711 case QEvent::TabletPress:
712 case QEvent::TabletMove:
713 case QEvent::TabletRelease: {
714 QWindowSystemInterface::handleTabletEvent(window, timeStamp,
715 pencilDevice, localViewPosition, globalScreenPosition,
716 buttons, pressure, xTilt, yTilt, tangentialPressure, rotation, zOffset, modifiers);
717 break;
718 }
719 default:
720 qCWarning(lcQpaTablet) << "Unknown tablet event type" << eventType << "for" << sender;
721 }
722}
723#endif // CONFIG(tabletevent)
724
725// -------------------------------------------------------------------------
726
727- (int)mapPressTypeToKey:(UIPress*)press withModifiers:(Qt::KeyboardModifiers)qtModifiers text:(QString &)text
728{
729 switch (press.type) {
730 case UIPressTypeUpArrow: return Qt::Key_Up;
731 case UIPressTypeDownArrow: return Qt::Key_Down;
732 case UIPressTypeLeftArrow: return Qt::Key_Left;
733 case UIPressTypeRightArrow: return Qt::Key_Right;
734 case UIPressTypeSelect: return Qt::Key_Select;
735 case UIPressTypeMenu: return Qt::Key_Menu;
736 case UIPressTypePlayPause: return Qt::Key_MediaTogglePlayPause;
737 }
738 Qt::Key key = QAppleKeyMapper::fromUIKitKey(press.key.keyCode);
739 if (key != Qt::Key_unknown)
740 return key;
741 NSString *charactersIgnoringModifiers = press.key.charactersIgnoringModifiers;
742 key = QAppleKeyMapper::fromUIKitKey(charactersIgnoringModifiers);
743 if (key != Qt::Key_unknown)
744 return key;
745 key = QAppleKeyMapper::fromNSString(qtModifiers, press.key.characters,
746 charactersIgnoringModifiers, text);
747 if (key != Qt::Key_unknown)
748 return key;
749 return Qt::Key_unknown;
750}
751
752- (bool)isControlKey:(Qt::Key)key
753{
754 switch (key) {
755 case Qt::Key_Up:
756 case Qt::Key_Down:
757 case Qt::Key_Left:
758 case Qt::Key_Right:
759 return true;
760 default:
761 break;
762 }
763
764 return false;
765}
766
767- (bool)handlePresses:(NSSet<UIPress *> *)presses eventType:(QEvent::Type)type
768{
769 // Presses on Menu button will generate a Menu key event. By default, not handling
770 // this event will cause the application to return to Headboard (tvOS launcher).
771 // When handling the event (for example, as a back button), both press and
772 // release events must be handled accordingly.
773 if (!qApp->focusWindow())
774 return false;
775
776 bool eventHandled = false;
777 const bool imEnabled = QIOSInputContext::instance()->inputMethodAccepted();
778
779 for (UIPress* press in presses) {
780 Qt::KeyboardModifiers qtModifiers = QAppleKeyMapper::fromUIKitModifiers(press.key.modifierFlags);
781 QString text;
782 int key = [self mapPressTypeToKey:press withModifiers:qtModifiers text:text];
783 if (key == Qt::Key_unknown)
784 continue;
785 if (imEnabled && ![self isControlKey:Qt::Key(key)])
786 continue;
787
788 bool keyHandled = QWindowSystemInterface::handleKeyEvent(
789 self.platformWindow->window(), type, key, qtModifiers, text);
790 eventHandled = eventHandled || keyHandled;
791 }
792
793 return eventHandled;
794}
795
796- (void)pressesBegan:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
797{
798 if (![self handlePresses:presses eventType:QEvent::KeyPress])
799 [super pressesBegan:presses withEvent:event];
800}
801
802- (void)pressesChanged:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
803{
804 if (![self handlePresses:presses eventType:QEvent::KeyPress])
805 [super pressesChanged:presses withEvent:event];
806 [super pressesChanged:presses withEvent:event];
807}
808
809- (void)pressesEnded:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
810{
811 if (![self handlePresses:presses eventType:QEvent::KeyRelease])
812 [super pressesEnded:presses withEvent:event];
813 [super pressesEnded:presses withEvent:event];
814}
815
816- (BOOL)canPerformAction:(SEL)action withSender:(id)sender
817{
818#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
819 // Check first if QIOSMenu should handle the action before continuing up the responder chain
820 return [QIOSMenu::menuActionTarget() targetForAction:action withSender:sender] != 0;
821#else
822 Q_UNUSED(action);
823 Q_UNUSED(sender);
824 return false;
825#endif
826}
827
828- (id)forwardingTargetForSelector:(SEL)selector
829{
830 Q_UNUSED(selector);
831#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
832 return QIOSMenu::menuActionTarget();
833#else
834 return nil;
835#endif
836}
837
838- (void)addInteraction:(id<UIInteraction>)interaction
839{
840 if ([NSStringFromClass(interaction.class) isEqualToString:@"UITextInteraction"])
841 return;
842
843 [super addInteraction:interaction];
844}
845
846- (UIEditingInteractionConfiguration)editingInteractionConfiguration
847{
848 // We only want the three-finger-tap edit menu to be available when there's
849 // actually something to edit. Otherwise the OS will cause a slight delay
850 // before delivering the release of three finger touch input. Note that we
851 // do not do any hit testing here to check that the focus object is the one
852 // being tapped, as the behavior of native iOS apps is to trigger the menu
853 // regardless of where the gesture is being made.
854 return QIOSInputContext::instance()->inputMethodAccepted() ?
855 UIEditingInteractionConfigurationDefault : UIEditingInteractionConfigurationNone;
856}
857
858#if QT_CONFIG(wheelevent)
859- (void)handleScroll:(UIPanGestureRecognizer *)recognizer
860{
861 if (!self.platformWindow->window())
862 return;
863
864 if (!self.canBecomeFirstResponder)
865 return;
866
867 CGPoint translation = [recognizer translationInView:self];
868 CGFloat deltaX = translation.x - m_lastScrollDelta.x;
869 CGFloat deltaY = translation.y - m_lastScrollDelta.y;
870
871 QPoint angleDelta;
872 // From QNSView implementation:
873 // "Since deviceDelta is delivered as pixels rather than degrees, we need to
874 // convert from pixels to degrees in a sensible manner.
875 // It looks like 1/4 degrees per pixel behaves most native.
876 // (NB: Qt expects the unit for delta to be 8 per degree):"
877 const int pixelsToDegrees = 2; // 8 * 1/4
878 angleDelta.setX(deltaX * pixelsToDegrees);
879 angleDelta.setY(deltaY * pixelsToDegrees);
880
881 QPoint pixelDelta;
882 pixelDelta.setX(deltaX);
883 pixelDelta.setY(deltaY);
884
885 Qt::KeyboardModifiers qt_modifierFlags =
886 QAppleKeyMapper::fromUIKitModifiers(recognizer.modifierFlags);
887
888 if (recognizer.state == UIGestureRecognizerStateBegan)
889 // locationInView: doesn't return the cursor position at the time of the wheel event,
890 // but rather gives us the position with the deltas applied, so we need to save the
891 // cursor position at the beginning of the gesture
892 m_lastScrollCursorPos = [recognizer locationInView:self];
893
894 if (recognizer.state != UIGestureRecognizerStateEnded) {
895 m_lastScrollDelta.x = translation.x;
896 m_lastScrollDelta.y = translation.y;
897 } else {
898 m_lastScrollDelta = CGPointZero;
899 }
900
901 QPoint qt_local = QPointF::fromCGPoint(m_lastScrollCursorPos).toPoint();
902 QPoint qt_global = self.platformWindow->mapToGlobal(qt_local);
903
904 qCInfo(lcQpaInputEvents).nospace() << "wheel event" << " at " << qt_local
905 << " pixelDelta=" << pixelDelta << " angleDelta=" << angleDelta;
906
907 QWindowSystemInterface::handleWheelEvent(self.platformWindow->window(),
908 getTimeStamp(nil), qt_local, qt_global, pixelDelta, angleDelta, qt_modifierFlags);
909}
910
911#endif // QT_CONFIG(wheelevent)
912
913@end
914
915@implementation UIView (QtHelpers)
916
917- (QWindow *)qwindow
918{
919 if ([self isKindOfClass:[QUIView class]]) {
920 if (QT_PREPEND_NAMESPACE(QIOSWindow) *w = static_cast<QUIView *>(self).platformWindow)
921 return w->window();
922 }
923 return nil;
924}
925
926- (UIViewController *)viewController
927{
928 id responder = self;
929 while ((responder = [responder nextResponder])) {
930 if ([responder isKindOfClass:UIViewController.class])
931 return responder;
932 }
933 return nil;
934}
935
936- (QIOSViewController*)qtViewController
937{
938 UIViewController *vc = self.viewController;
939 if ([vc isKindOfClass:QIOSViewController.class])
940 return static_cast<QIOSViewController *>(vc);
941
942 return nil;
943}
944
945@end
946
947#if QT_CONFIG(metal)
948@implementation QUIMetalView
949
950+ (Class)layerClass
951{
952 return [CAMetalLayer class];
953}
954
955@end
956#endif
957
958#if QT_CONFIG(accessibility)
959// Include category as an alternative to using -ObjC (Apple QA1490)
960#include "quiview_accessibility.mm"
961#endif
Q_FORWARD_DECLARE_OBJC_CLASS(NSString)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
Q_FORWARD_DECLARE_OBJC_CLASS(UIViewController)
#define qImDebug
Definition qiosglobal.h:21