Qt
Internal/Contributor docs for the Qt SDK. Note: These are NOT official API docs; those are found at https://doc.qt.io/
Loading...
Searching...
No Matches
quiview.mm
Go to the documentation of this file.
1// Copyright (C) 2021 The Qt Company Ltd.
2// SPDX-License-Identifier: LicenseRef-Qt-Commercial OR LGPL-3.0-only OR GPL-2.0-only OR GPL-3.0-only
3// Qt-Security score:significant reason:default
4
5#include "quiview.h"
6
7#include "qiosglobal.h"
11#include "qiosscreen.h"
12#include "qioswindow.h"
14#include "quiwindow.h"
15#ifndef Q_OS_TVOS
16#include "qiosmenu.h"
17#endif
18
19#include <QtCore/qmath.h>
20#include <QtGui/qpointingdevice.h>
21#include <QtGui/private/qguiapplication_p.h>
22#include <QtGui/private/qwindow_p.h>
23#include <QtGui/private/qapplekeymapper_p.h>
24#include <QtGui/private/qpointingdevice_p.h>
25#include <qpa/qwindowsysteminterface_p.h>
26
27Q_LOGGING_CATEGORY(lcQpaTablet, "qt.qpa.input.tablet")
28Q_LOGGING_CATEGORY(lcQpaInputEvents, "qt.qpa.input.events")
29
30namespace {
31inline ulong getTimeStamp(UIEvent *event)
32{
33#if TARGET_OS_SIMULATOR == 1
34 // We currently build Qt for simulator using X86_64, even on ARM based macs.
35 // This results in the simulator running on ARM, while the app is running
36 // inside it using Rosetta. And with this combination, the event.timestamp, which is
37 // documented to be in seconds, looks to be something else, and is not progressing
38 // in sync with a normal clock.
39 // Sending out mouse events with a timestamp that doesn't follow normal clock time
40 // will cause problems for mouse-, and pointer handlers that uses them to e.g calculate
41 // the time between a press and release, and to decide if the user is performing a tap
42 // or a drag.
43 // For that reason, we choose to ignore UIEvent.timestamp under the mentioned condition, and
44 // instead rely on NSProcessInfo. Note that if we force the whole simulator to use Rosetta
45 // (and not only the Qt app), the timestamps will progress normally.
46#if defined(Q_PROCESSOR_ARM)
47 #warning The timestamp work-around for x86_64 can (probably) be removed when building for ARM
48#endif
49 return ulong(NSProcessInfo.processInfo.systemUptime * 1000);
50#endif
51
52 return ulong(event.timestamp * 1000);
53}
54}
55
56@implementation QUIView {
57 QHash<NSUInteger, QWindowSystemInterface::TouchPoint> m_activeTouches;
58 UITouch *m_activePencilTouch;
59 NSMutableArray<UIAccessibilityElement *> *m_accessibleElements;
60 UIPanGestureRecognizer *m_scrollGestureRecognizer;
61 CGPoint m_lastScrollCursorPos;
62 CGPoint m_lastScrollDelta;
63#if QT_CONFIG(tabletevent)
64 UIHoverGestureRecognizer *m_hoverGestureRecognizer;
65#endif
66}
67
68+ (Class)layerClass
69{
70#if QT_CONFIG(opengl)
71 return [CAEAGLLayer class];
72#endif
73 return [super layerClass];
74}
75
76- (instancetype)initWithQIOSWindow:(QT_PREPEND_NAMESPACE(QIOSWindow) *)window
77{
78 if (self = [self initWithFrame:window->geometry().toCGRect()]) {
79 self.platformWindow = window;
80
81 if (isQtApplication())
82 self.hidden = YES;
83
84 m_accessibleElements = [[NSMutableArray<UIAccessibilityElement *> alloc] init];
85
86#ifndef Q_OS_TVOS
87 self.multipleTouchEnabled = YES;
88#endif
89
90 m_scrollGestureRecognizer = [[UIPanGestureRecognizer alloc]
91 initWithTarget:self
92 action:@selector(handleScroll:)];
93 // The gesture recognizer should only care about scroll gestures (for now)
94 // Set allowedTouchTypes to empty array here to not interfere with touch events
95 // handled by the UIView. Scroll gestures, even those coming from touch devices,
96 // such as trackpads will still be received as they are not touch events
97 m_scrollGestureRecognizer.allowedTouchTypes = [NSArray array];
98 m_scrollGestureRecognizer.allowedScrollTypesMask = UIScrollTypeMaskAll;
99 m_scrollGestureRecognizer.maximumNumberOfTouches = 0;
100 m_lastScrollDelta = CGPointZero;
101 m_lastScrollCursorPos = CGPointZero;
102 [self addGestureRecognizer:m_scrollGestureRecognizer];
103
104#if QT_CONFIG(tabletevent)
105 m_hoverGestureRecognizer = [[UIHoverGestureRecognizer alloc]
106 initWithTarget:self
107 action:@selector(handleHover:)];
108 [self addGestureRecognizer:m_hoverGestureRecognizer];
109#endif
110
111 // Set up layer
112 if ([self.layer isKindOfClass:CAMetalLayer.class]) {
113 QWindow *window = self.platformWindow->window();
114 if (QColorSpace colorSpace = window->format().colorSpace(); colorSpace.isValid()) {
115 QCFType<CFDataRef> iccData = colorSpace.iccProfile().toCFData();
116 QCFType<CGColorSpaceRef> cgColorSpace = CGColorSpaceCreateWithICCData(iccData);
117 CAMetalLayer *metalLayer = static_cast<CAMetalLayer *>(self.layer);
118 metalLayer.colorspace = cgColorSpace;
119 qCDebug(lcQpaWindow) << "Set" << self << "color space to" << metalLayer.colorspace;
120 }
121 }
122#if QT_CONFIG(opengl)
123 else if ([self.layer isKindOfClass:[CAEAGLLayer class]]) {
124 CAEAGLLayer *eaglLayer = static_cast<CAEAGLLayer *>(self.layer);
125 eaglLayer.opaque = TRUE;
126 eaglLayer.drawableProperties = @{
127 kEAGLDrawablePropertyRetainedBacking: @(YES),
128 kEAGLDrawablePropertyColorFormat: kEAGLColorFormatRGBA8
129 };
130 }
131#endif
132
133#if defined(Q_OS_VISIONOS)
134 // Although the "Drawing sharp layer-based content in visionOS" docs
135 // claim that by default a CALayer rasterizes at a 2x scale this does
136 // not seem to be the case in practice. So we explicitly set the view's
137 // scale factor based on the screen, where we hard-code it to 2.0.
138 self.contentScaleFactor = self.platformWindow->screen()->devicePixelRatio();
139#endif
140 }
141
142 return self;
143}
144
145- (void)dealloc
146{
147 [m_accessibleElements release];
148 [m_scrollGestureRecognizer release];
149
150 [super dealloc];
151}
152
153- (NSString *)description
154{
155 NSMutableString *description = [NSMutableString stringWithString:[super description]];
156
157#ifndef QT_NO_DEBUG_STREAM
158 QString platformWindowDescription;
159 QDebug debug(&platformWindowDescription);
160 debug.nospace() << "; " << self.platformWindow << ">";
161 NSRange lastCharacter = [description rangeOfComposedCharacterSequenceAtIndex:description.length - 1];
162 [description replaceCharactersInRange:lastCharacter withString:platformWindowDescription.toNSString()];
163#endif
164
165 return description;
166}
167
168#if !defined(Q_OS_VISIONOS)
169- (void)willMoveToWindow:(UIWindow *)newWindow
170{
171 // UIKIt will normally set the scale factor of a view to match the corresponding
172 // screen scale factor, but views backed by CAEAGLLayers need to do this manually.
173 self.contentScaleFactor = newWindow && newWindow.screen ?
174 newWindow.screen.scale : [[UIScreen mainScreen] scale];
175
176 // FIXME: Allow the scale factor to be customized through QSurfaceFormat.
177}
178#endif
179
180- (void)didAddSubview:(UIView *)subview
181{
182 if ([subview isKindOfClass:[QUIView class]])
183 self.clipsToBounds = YES;
184}
185
186- (void)willRemoveSubview:(UIView *)subview
187{
188 for (UIView *view in self.subviews) {
189 if (view != subview && [view isKindOfClass:[QUIView class]])
190 return;
191 }
192
193 self.clipsToBounds = NO;
194}
195
196- (void)setNeedsDisplay
197{
198 [super setNeedsDisplay];
199
200 // We didn't implement drawRect: so we have to manually
201 // mark the layer as needing display.
202 [self.layer setNeedsDisplay];
203}
204
205- (void)layoutSubviews
206{
207 // This method is the de facto way to know that view has been resized,
208 // or otherwise needs invalidation of its buffers. Note though that we
209 // do not get this callback when the view just changes its position, so
210 // the position of our QWindow (and platform window) will only get updated
211 // when the size is also changed.
212
213 if (!CGAffineTransformIsIdentity(self.transform))
214 qWarning() << self << "has a transform set. This is not supported.";
215
216 QWindow *window = self.platformWindow->window();
217 QRect lastReportedGeometry = qt_window_private(window)->geometry;
218 QRect currentGeometry = QRectF::fromCGRect(self.frame).toRect();
219 qCDebug(lcQpaWindow) << self.platformWindow << "new geometry is" << currentGeometry;
220 QWindowSystemInterface::handleGeometryChange(window, currentGeometry);
221
222 if (currentGeometry.size() != lastReportedGeometry.size()) {
223 // Trigger expose event on resize
224 [self setNeedsDisplay];
225
226 // A new size means we also need to resize the FBO's corresponding buffers,
227 // but we defer that to when the application calls makeCurrent.
228 }
229}
230
231- (void)displayLayer:(CALayer *)layer
232{
233 Q_UNUSED(layer);
234 Q_ASSERT(layer == self.layer);
235
236 if (!self.platformWindow)
237 return;
238
239 [self sendUpdatedExposeEvent];
240}
241
242- (void)sendUpdatedExposeEvent
243{
244 QRegion region;
245
246 if (self.platformWindow->isExposed()) {
247 QSize bounds = QRectF::fromCGRect(self.layer.bounds).toRect().size();
248
249 Q_ASSERT(self.platformWindow->geometry().size() == bounds);
250 Q_ASSERT(self.hidden == !self.platformWindow->window()->isVisible());
251
252 region = QRect(QPoint(), bounds);
253 }
254
255 qCDebug(lcQpaWindow) << self.platformWindow << region << "isExposed" << self.platformWindow->isExposed();
256 QWindowSystemInterface::handleExposeEvent(self.platformWindow->window(), region);
257}
258
259- (void)safeAreaInsetsDidChange
260{
261 QWindowSystemInterface::handleSafeAreaMarginsChanged(self.platformWindow->window());
262}
263
264// -------------------------------------------------------------------------
265
266- (BOOL)canBecomeFirstResponder
267{
268 return !(self.platformWindow->window()->flags() & (Qt::WindowDoesNotAcceptFocus
269 | Qt::WindowTransparentForInput));
270}
271
272- (BOOL)becomeFirstResponder
273{
274 {
275 // Scope for the duration of becoming first responder only, as the window
276 // activation event may trigger new responders, which we don't want to be
277 // blocked by this guard.
278 FirstResponderCandidate firstResponderCandidate(self);
279
280 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
281
282 if (![super becomeFirstResponder]) {
283 qImDebug() << self << "was not allowed to become first responder";
284 return NO;
285 }
286
287 qImDebug() << self << "became first responder";
288 }
289
290 if (qGuiApp->focusWindow() != self.platformWindow->window())
291 QWindowSystemInterface::handleFocusWindowChanged(self.platformWindow->window(), Qt::ActiveWindowFocusReason);
292 else
293 qImDebug() << self.platformWindow->window() << "already active, not sending window activation";
294
295 return YES;
296}
297
298- (BOOL)responderShouldTriggerWindowDeactivation:(UIResponder *)responder
299{
300 // We don't want to send window deactivation in case the resign
301 // was a result of another Qt window becoming first responder.
302 if ([responder isKindOfClass:[QUIView class]])
303 return NO;
304
305 // Nor do we want to deactivate the Qt window if the new responder
306 // is temporarily handling text input on behalf of a Qt window.
307 if ([responder isKindOfClass:[QIOSTextResponder class]]) {
308 while ((responder = [responder nextResponder])) {
309 if ([responder isKindOfClass:[QUIView class]])
310 return NO;
311 }
312 }
313
314 return YES;
315}
316
317- (BOOL)resignFirstResponder
318{
319 qImDebug() << "self:" << self << "first:" << [UIResponder qt_currentFirstResponder];
320
321 if (![super resignFirstResponder])
322 return NO;
323
324 qImDebug() << self << "resigned first responder";
325
326 if (qGuiApp) {
327 UIResponder *newResponder = FirstResponderCandidate::currentCandidate();
328 if ([self responderShouldTriggerWindowDeactivation:newResponder])
329 QWindowSystemInterface::handleFocusWindowChanged(nullptr, Qt::ActiveWindowFocusReason);
330 }
331
332 return YES;
333}
334
335- (BOOL)isActiveWindow
336{
337 // Normally this is determined exclusivly by being firstResponder, but
338 // since we employ a separate first responder for text input we need to
339 // handle both cases as this view being the active Qt window.
340
341 if ([self isFirstResponder])
342 return YES;
343
344 UIResponder *firstResponder = [UIResponder qt_currentFirstResponder];
345 if ([firstResponder isKindOfClass:[QIOSTextInputResponder class]]
346 && [firstResponder nextResponder] == self)
347 return YES;
348
349 return NO;
350}
351
352// -------------------------------------------------------------------------
353
354- (void)traitCollectionDidChange:(UITraitCollection *)previousTraitCollection
355{
356 [super traitCollectionDidChange: previousTraitCollection];
357
358 QPointingDevice *touchDevice = QIOSIntegration::instance()->touchDevice();
359 auto *devicePriv = QPointingDevicePrivate::get(touchDevice);
360
361 auto capabilities = touchDevice->capabilities();
362 capabilities.setFlag(QPointingDevice::Capability::Pressure,
363 (self.traitCollection.forceTouchCapability == UIForceTouchCapabilityAvailable));
364 devicePriv->setCapabilities(capabilities);
365}
366
367-(BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event
368{
369 if (self.platformWindow->window()->flags() & Qt::WindowTransparentForInput)
370 return NO;
371 return [super pointInside:point withEvent:event];
372}
373
374#if QT_CONFIG(tabletevent)
375- (void)handlePencilEventForLocationInView:(CGPoint)locationInView withState:(QEventPoint::State)state withTimestamp:(ulong)timeStamp
376 withForce:(CGFloat)force withMaximumPossibleForce:(CGFloat)maximumPossibleForce withZOffset:(CGFloat)zOffset
377 withAzimuthUnitVector:(CGVector)azimuth withAltitudeAngleRadian:(CGFloat)altitudeAngleRadian
378{
379 QIOSIntegration *iosIntegration = QIOSIntegration::instance();
380
381 QPointF localViewPosition = QPointF::fromCGPoint(locationInView);
382 QPoint localViewPositionI = localViewPosition.toPoint();
383 QPointF globalScreenPosition = self.platformWindow->mapToGlobal(localViewPositionI) +
384 (localViewPosition - localViewPositionI);
385 qreal pressure = 0;
386 if (force != 0 && maximumPossibleForce != 0)
387 pressure = force / maximumPossibleForce;
388 // azimuth unit vector: +x to the right, +y going downwards
389 // altitudeAngleRadian given in radians, pi / 2 is with the stylus perpendicular to the iPad, smaller values mean more tilted, but never negative.
390 // Convert to degrees with zero being perpendicular.
391 qreal altitudeAngle = 90 - qRadiansToDegrees(altitudeAngleRadian);
392 qreal xTilt = qBound(-60.0, altitudeAngle * azimuth.dx, 60.0);
393 qreal yTilt = qBound(-60.0, altitudeAngle * azimuth.dy, 60.0);
394
395 qCDebug(lcQpaTablet) << ":" << timeStamp << localViewPosition << pressure << state << "azimuth" << azimuth.dx << azimuth.dy
396 << "altitude" << altitudeAngleRadian << "xTilt" << xTilt << "yTilt" << yTilt;
397 QWindowSystemInterface::handleTabletEvent(self.platformWindow->window(), timeStamp,
398 // device, local, global
399 iosIntegration->pencilDevice(), localViewPosition, globalScreenPosition,
400 // buttons
401 state == QEventPoint::State::Released ? Qt::NoButton : Qt::LeftButton,
402 // pressure, xTilt, yTilt, tangentialPressure, rotation, z, modifiers
403 pressure, xTilt, yTilt, 0, 0, zOffset, Qt::NoModifier);
404}
405#endif
406
407- (void)handleTouches:(NSSet *)touches withEvent:(UIEvent *)event withState:(QEventPoint::State)state withTimestamp:(ulong)timeStamp
408{
409 QIOSIntegration *iosIntegration = QIOSIntegration::instance();
410 bool supportsPressure = QIOSIntegration::instance()->touchDevice()->capabilities() & QPointingDevice::Capability::Pressure;
411
412#if QT_CONFIG(tabletevent)
413 if (m_activePencilTouch && [touches containsObject:m_activePencilTouch]) {
414 NSArray<UITouch *> *cTouches = [event coalescedTouchesForTouch:m_activePencilTouch];
415 for (UITouch *cTouch in cTouches) {
416 [self handlePencilEventForLocationInView:[cTouch preciseLocationInView:self] withState:state withTimestamp:timeStamp
417 withForce:cTouch.force withMaximumPossibleForce:cTouch.maximumPossibleForce withZOffset:0
418 withAzimuthUnitVector:[cTouch azimuthUnitVectorInView:self]
419 withAltitudeAngleRadian:cTouch.altitudeAngle];
420 }
421 }
422#endif
423
424 if (m_activeTouches.isEmpty())
425 return;
426 for (auto it = m_activeTouches.begin(); it != m_activeTouches.end(); ++it) {
427 auto hash = it.key();
428 QWindowSystemInterface::TouchPoint &touchPoint = it.value();
429 UITouch *uiTouch = nil;
430 for (UITouch *touch in touches) {
431 if (touch.hash == hash) {
432 uiTouch = touch;
433 break;
434 }
435 }
436 if (!uiTouch) {
437 touchPoint.state = QEventPoint::State::Stationary;
438 } else {
439 touchPoint.state = state;
440
441 // Touch positions are expected to be in QScreen global coordinates, and
442 // as we already have the QWindow positioned at the right place, we can
443 // just map from the local view position to global coordinates.
444 // tvOS: all touches start at the center of the screen and move from there.
445 QPoint localViewPosition = QPointF::fromCGPoint([uiTouch locationInView:self]).toPoint();
446 QPoint globalScreenPosition = self.platformWindow->mapToGlobal(localViewPosition);
447
448 touchPoint.area = QRectF(globalScreenPosition, QSize(0, 0));
449
450 // FIXME: Do we really need to support QPointingDevice::Capability::NormalizedPosition?
451 QSize screenSize = self.platformWindow->screen()->geometry().size();
452 touchPoint.normalPosition = QPointF(globalScreenPosition.x() / screenSize.width(),
453 globalScreenPosition.y() / screenSize.height());
454
455 if (supportsPressure) {
456 // Note: iOS will deliver touchesBegan with a touch force of 0, which
457 // we will reflect/propagate as a 0 pressure, but there is no clear
458 // alternative, as we don't want to wait for a touchedMoved before
459 // sending a touch press event to Qt, just to have a valid pressure.
460 touchPoint.pressure = uiTouch.force / uiTouch.maximumPossibleForce;
461 } else {
462 // We don't claim that our touch device supports QPointingDevice::Capability::Pressure,
463 // but fill in a meaningful value in case clients use it anyway.
464 touchPoint.pressure = (state == QEventPoint::State::Released) ? 0.0 : 1.0;
465 }
466 }
467 }
468
469 if ([self.window isKindOfClass:[QUIWindow class]] &&
470 !static_cast<QUIWindow *>(self.window).sendingEvent) {
471 // The event is likely delivered as part of delayed touch delivery, via
472 // _UIGestureEnvironmentSortAndSendDelayedTouches, due to one of the two
473 // _UISystemGestureGateGestureRecognizer instances on the top level window
474 // having its delaysTouchesBegan set to YES. During this delivery, it's not
475 // safe to spin up a recursive event loop, as our calling function is not
476 // reentrant, so any gestures used by the recursive code, e.g. a native
477 // alert dialog, will fail to recognize. To be on the safe side, we deliver
478 // the event asynchronously.
479 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
480 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
481 } else {
482 // Send the touch event asynchronously, as the application might spin a recursive
483 // event loop in response to the touch event (a dialog e.g.), which will deadlock
484 // the UIKit event delivery system (QTBUG-98651).
485 QWindowSystemInterface::handleTouchEvent<QWindowSystemInterface::AsynchronousDelivery>(
486 self.platformWindow->window(), timeStamp, iosIntegration->touchDevice(), m_activeTouches.values());
487 }
488}
489
490- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
491{
492 // UIKit generates [Began -> Moved -> Ended] event sequences for
493 // each touch point. Internally we keep a hashmap of active UITouch
494 // points to QWindowSystemInterface::TouchPoints, and assigns each TouchPoint
495 // an id for use by Qt.
496 for (UITouch *touch in touches) {
497#if QT_CONFIG(tabletevent)
498 if (touch.type == UITouchTypeStylus) {
499 if (Q_UNLIKELY(m_activePencilTouch)) {
500 qWarning("ignoring additional Pencil while first is still active");
501 continue;
502 }
503 m_activePencilTouch = touch;
504 } else
505 {
506 Q_ASSERT(!m_activeTouches.contains(touch.hash));
507#endif
508 // Use window-independent touch identifiers, so that
509 // multi-touch works across windows.
510 static quint16 nextTouchId = 0;
511 m_activeTouches[touch.hash].id = nextTouchId++;
512#if QT_CONFIG(tabletevent)
513 }
514#endif
515 }
516
517 if (self.platformWindow->shouldAutoActivateWindow() && m_activeTouches.size() == 1) {
518 QPlatformWindow *topLevel = self.platformWindow;
519 while (QPlatformWindow *p = topLevel->parent())
520 topLevel = p;
521 if (topLevel->window() != QGuiApplication::focusWindow())
522 topLevel->requestActivateWindow();
523 }
524
525 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Pressed withTimestamp:getTimeStamp(event)];
526}
527
528- (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event
529{
530 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Updated withTimestamp:getTimeStamp(event)];
531}
532
533- (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event
534{
535 [self handleTouches:touches withEvent:event withState:QEventPoint::State::Released withTimestamp:getTimeStamp(event)];
536
537 // Remove ended touch points from the active set:
538#ifndef Q_OS_TVOS
539 for (UITouch *touch in touches) {
540#if QT_CONFIG(tabletevent)
541 if (touch.type == UITouchTypeStylus) {
542 m_activePencilTouch = nil;
543 } else
544#endif
545 {
546 m_activeTouches.remove(touch.hash);
547 }
548 }
549#else
550 // tvOS only supports single touch
551 m_activeTouches.clear();
552#endif
553}
554
555- (void)touchesCancelled:(NSSet *)touches withEvent:(UIEvent *)event
556{
557 if (m_activeTouches.isEmpty() && !m_activePencilTouch)
558 return;
559
560 // When four-finger swiping, we get a touchesCancelled callback
561 // which includes all four touch points. The swipe gesture is
562 // then active until all four touches have been released, and
563 // we start getting touchesBegan events again.
564
565 // When five-finger pinching, we also get a touchesCancelled
566 // callback with all five touch points, but the pinch gesture
567 // ends when the second to last finger is released from the
568 // screen. The last finger will not emit any more touch
569 // events, _but_, will contribute to starting another pinch
570 // gesture. That second pinch gesture will _not_ trigger a
571 // touchesCancelled event when starting, but as each finger
572 // is released, and we may get touchesMoved events for the
573 // remaining fingers. [event allTouches] also contains one
574 // less touch point than it should, so this behavior is
575 // likely a bug in the iOS system gesture recognizer, but we
576 // have to take it into account when maintaining the Qt state.
577 // We do this by assuming that there are no cases where a
578 // sub-set of the active touch events are intentionally cancelled.
579
580 NSInteger count = static_cast<NSInteger>([touches count]);
581 if (count != 0 && count != m_activeTouches.count() && !m_activePencilTouch)
582 qWarning("Subset of active touches cancelled by UIKit");
583
584 m_activeTouches.clear();
585 m_activePencilTouch = nil;
586
587 ulong timestamp = event ? getTimeStamp(event) : ([[NSProcessInfo processInfo] systemUptime] * 1000);
588
589 QIOSIntegration *iosIntegration = static_cast<QIOSIntegration *>(QGuiApplicationPrivate::platformIntegration());
590
591 // Send the touch event asynchronously, as the application might spin a recursive
592 // event loop in response to the touch event (a dialog e.g.), which will deadlock
593 // the UIKit event delivery system (QTBUG-98651).
594 QWindowSystemInterface::handleTouchCancelEvent<QWindowSystemInterface::AsynchronousDelivery>(
595 self.platformWindow->window(), timestamp, iosIntegration->touchDevice());
596}
597
598- (int)mapPressTypeToKey:(UIPress*)press withModifiers:(Qt::KeyboardModifiers)qtModifiers text:(QString &)text
599{
600 switch (press.type) {
601 case UIPressTypeUpArrow: return Qt::Key_Up;
602 case UIPressTypeDownArrow: return Qt::Key_Down;
603 case UIPressTypeLeftArrow: return Qt::Key_Left;
604 case UIPressTypeRightArrow: return Qt::Key_Right;
605 case UIPressTypeSelect: return Qt::Key_Select;
606 case UIPressTypeMenu: return Qt::Key_Menu;
607 case UIPressTypePlayPause: return Qt::Key_MediaTogglePlayPause;
608 }
609 Qt::Key key = QAppleKeyMapper::fromUIKitKey(press.key.keyCode);
610 if (key != Qt::Key_unknown)
611 return key;
612 NSString *charactersIgnoringModifiers = press.key.charactersIgnoringModifiers;
613 key = QAppleKeyMapper::fromUIKitKey(charactersIgnoringModifiers);
614 if (key != Qt::Key_unknown)
615 return key;
616 key = QAppleKeyMapper::fromNSString(qtModifiers, press.key.characters,
617 charactersIgnoringModifiers, text);
618 if (key != Qt::Key_unknown)
619 return key;
620 return Qt::Key_unknown;
621}
622
623- (bool)isControlKey:(Qt::Key)key
624{
625 switch (key) {
626 case Qt::Key_Up:
627 case Qt::Key_Down:
628 case Qt::Key_Left:
629 case Qt::Key_Right:
630 return true;
631 default:
632 break;
633 }
634
635 return false;
636}
637
638- (bool)handlePresses:(NSSet<UIPress *> *)presses eventType:(QEvent::Type)type
639{
640 // Presses on Menu button will generate a Menu key event. By default, not handling
641 // this event will cause the application to return to Headboard (tvOS launcher).
642 // When handling the event (for example, as a back button), both press and
643 // release events must be handled accordingly.
644 if (!qApp->focusWindow())
645 return false;
646
647 bool eventHandled = false;
648 const bool imEnabled = QIOSInputContext::instance()->inputMethodAccepted();
649
650 for (UIPress* press in presses) {
651 Qt::KeyboardModifiers qtModifiers = QAppleKeyMapper::fromUIKitModifiers(press.key.modifierFlags);
652 QString text;
653 int key = [self mapPressTypeToKey:press withModifiers:qtModifiers text:text];
654 if (key == Qt::Key_unknown)
655 continue;
656 if (imEnabled && ![self isControlKey:Qt::Key(key)])
657 continue;
658
659 bool keyHandled = QWindowSystemInterface::handleKeyEvent(
660 self.platformWindow->window(), type, key, qtModifiers, text);
661 eventHandled = eventHandled || keyHandled;
662 }
663
664 return eventHandled;
665}
666
667- (void)pressesBegan:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
668{
669 if (![self handlePresses:presses eventType:QEvent::KeyPress])
670 [super pressesBegan:presses withEvent:event];
671}
672
673- (void)pressesChanged:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
674{
675 if (![self handlePresses:presses eventType:QEvent::KeyPress])
676 [super pressesChanged:presses withEvent:event];
677 [super pressesChanged:presses withEvent:event];
678}
679
680- (void)pressesEnded:(NSSet<UIPress *> *)presses withEvent:(UIPressesEvent *)event
681{
682 if (![self handlePresses:presses eventType:QEvent::KeyRelease])
683 [super pressesEnded:presses withEvent:event];
684 [super pressesEnded:presses withEvent:event];
685}
686
687- (BOOL)canPerformAction:(SEL)action withSender:(id)sender
688{
689#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
690 // Check first if QIOSMenu should handle the action before continuing up the responder chain
691 return [QIOSMenu::menuActionTarget() targetForAction:action withSender:sender] != 0;
692#else
693 Q_UNUSED(action);
694 Q_UNUSED(sender);
695 return false;
696#endif
697}
698
699- (id)forwardingTargetForSelector:(SEL)selector
700{
701 Q_UNUSED(selector);
702#if !defined(Q_OS_TVOS) && !defined(Q_OS_VISIONOS)
703 return QIOSMenu::menuActionTarget();
704#else
705 return nil;
706#endif
707}
708
709- (void)addInteraction:(id<UIInteraction>)interaction
710{
711 if ([NSStringFromClass(interaction.class) isEqualToString:@"UITextInteraction"])
712 return;
713
714 [super addInteraction:interaction];
715}
716
717- (UIEditingInteractionConfiguration)editingInteractionConfiguration
718{
719 // We only want the three-finger-tap edit menu to be available when there's
720 // actually something to edit. Otherwise the OS will cause a slight delay
721 // before delivering the release of three finger touch input. Note that we
722 // do not do any hit testing here to check that the focus object is the one
723 // being tapped, as the behavior of native iOS apps is to trigger the menu
724 // regardless of where the gesture is being made.
725 return QIOSInputContext::instance()->inputMethodAccepted() ?
726 UIEditingInteractionConfigurationDefault : UIEditingInteractionConfigurationNone;
727}
728
729#if QT_CONFIG(wheelevent)
730- (void)handleScroll:(UIPanGestureRecognizer *)recognizer
731{
732 if (!self.platformWindow->window())
733 return;
734
735 if (!self.canBecomeFirstResponder)
736 return;
737
738 CGPoint translation = [recognizer translationInView:self];
739 CGFloat deltaX = translation.x - m_lastScrollDelta.x;
740 CGFloat deltaY = translation.y - m_lastScrollDelta.y;
741
742 QPoint angleDelta;
743 // From QNSView implementation:
744 // "Since deviceDelta is delivered as pixels rather than degrees, we need to
745 // convert from pixels to degrees in a sensible manner.
746 // It looks like 1/4 degrees per pixel behaves most native.
747 // (NB: Qt expects the unit for delta to be 8 per degree):"
748 const int pixelsToDegrees = 2; // 8 * 1/4
749 angleDelta.setX(deltaX * pixelsToDegrees);
750 angleDelta.setY(deltaY * pixelsToDegrees);
751
752 QPoint pixelDelta;
753 pixelDelta.setX(deltaX);
754 pixelDelta.setY(deltaY);
755
756 NSTimeInterval time_stamp = [[NSProcessInfo processInfo] systemUptime];
757 ulong qt_timestamp = time_stamp * 1000;
758
759 Qt::KeyboardModifiers qt_modifierFlags = QAppleKeyMapper::fromUIKitModifiers(recognizer.modifierFlags);
760
761 if (recognizer.state == UIGestureRecognizerStateBegan)
762 // locationInView: doesn't return the cursor position at the time of the wheel event,
763 // but rather gives us the position with the deltas applied, so we need to save the
764 // cursor position at the beginning of the gesture
765 m_lastScrollCursorPos = [recognizer locationInView:self];
766
767 if (recognizer.state != UIGestureRecognizerStateEnded) {
768 m_lastScrollDelta.x = translation.x;
769 m_lastScrollDelta.y = translation.y;
770 } else {
771 m_lastScrollDelta = CGPointZero;
772 }
773
774 QPoint qt_local = QPointF::fromCGPoint(m_lastScrollCursorPos).toPoint();
775 QPoint qt_global = self.platformWindow->mapToGlobal(qt_local);
776
777 qCInfo(lcQpaInputEvents).nospace() << "wheel event" << " at " << qt_local
778 << " pixelDelta=" << pixelDelta << " angleDelta=" << angleDelta;
779
780 QWindowSystemInterface::handleWheelEvent(self.platformWindow->window(), qt_timestamp, qt_local, qt_global, pixelDelta, angleDelta, qt_modifierFlags);
781}
782#endif // QT_CONFIG(wheelevent)
783
784#if QT_CONFIG(tabletevent)
785- (void)handleHover:(UIHoverGestureRecognizer *)recognizer
786{
787 if (!self.platformWindow)
788 return;
789
790 ulong timeStamp = [[NSProcessInfo processInfo] systemUptime] * 1000;
791
792 CGFloat zOffset = [recognizer zOffset];
793
794 CGVector azimuth = [recognizer azimuthUnitVectorInView:self];
795 CGFloat altitudeAngleRadian = recognizer.altitudeAngle;
796
797 [self handlePencilEventForLocationInView:[recognizer locationInView:self] withState:QEventPoint::State::Released
798 withTimestamp:timeStamp withForce:0 withMaximumPossibleForce:0 withZOffset:zOffset
799 withAzimuthUnitVector:azimuth withAltitudeAngleRadian:altitudeAngleRadian];
800}
801#endif
802
803@end
804
805@implementation UIView (QtHelpers)
806
807- (QWindow *)qwindow
808{
809 if ([self isKindOfClass:[QUIView class]]) {
810 if (QT_PREPEND_NAMESPACE(QIOSWindow) *w = static_cast<QUIView *>(self).platformWindow)
811 return w->window();
812 }
813 return nil;
814}
815
816- (UIViewController *)viewController
817{
818 id responder = self;
819 while ((responder = [responder nextResponder])) {
820 if ([responder isKindOfClass:UIViewController.class])
821 return responder;
822 }
823 return nil;
824}
825
826- (QIOSViewController*)qtViewController
827{
828 UIViewController *vc = self.viewController;
829 if ([vc isKindOfClass:QIOSViewController.class])
830 return static_cast<QIOSViewController *>(vc);
831
832 return nil;
833}
834
835@end
836
837#if QT_CONFIG(metal)
838@implementation QUIMetalView
839
840+ (Class)layerClass
841{
842 return [CAMetalLayer class];
843}
844
845@end
846#endif
847
848#if QT_CONFIG(accessibility)
849// Include category as an alternative to using -ObjC (Apple QA1490)
850#include "quiview_accessibility.mm"
851#endif
Q_FORWARD_DECLARE_OBJC_CLASS(NSString)
Q_LOGGING_CATEGORY(lcEventDispatcher, "qt.eventdispatcher")
Q_FORWARD_DECLARE_OBJC_CLASS(UIViewController)
#define qImDebug
Definition qiosglobal.h:21