|
1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
3 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
5 |
|
6 // Moz headers (alphabetical) |
|
7 #include "MetroInput.h" |
|
8 #include "MetroUtils.h" // Logging, POINT_CEIL_*, ActivateGenericInstance, etc |
|
9 #include "MetroWidget.h" // MetroInput::mWidget |
|
10 #include "mozilla/dom/Touch.h" // Touch |
|
11 #include "nsTArray.h" // Touch lists |
|
12 #include "nsIDOMSimpleGestureEvent.h" // Constants for gesture events |
|
13 #include "InputData.h" |
|
14 #include "UIABridgePrivate.h" |
|
15 #include "MetroAppShell.h" |
|
16 #include "mozilla/EventStateManager.h" |
|
17 #include "mozilla/EventStates.h" |
|
18 #include "mozilla/MouseEvents.h" |
|
19 #include "mozilla/TouchEvents.h" |
|
20 #include "mozilla/Preferences.h" // for Preferences |
|
21 #include "WinUtils.h" |
|
22 #include "nsIPresShell.h" |
|
23 |
|
24 // System headers (alphabetical) |
|
25 #include <windows.ui.core.h> // ABI::Window::UI::Core namespace |
|
26 #include <windows.ui.input.h> // ABI::Window::UI::Input namespace |
|
27 |
|
28 //#define DEBUG_INPUT |
|
29 |
|
30 // Using declarations |
|
31 using namespace ABI::Windows; // UI, System, Foundation namespaces |
|
32 using namespace Microsoft; // WRL namespace (ComPtr, possibly others) |
|
33 using namespace mozilla; |
|
34 using namespace mozilla::widget; |
|
35 using namespace mozilla::widget::winrt; |
|
36 using namespace mozilla::dom; |
|
37 |
|
38 // File-scoped statics (unnamed namespace) |
|
39 namespace { |
|
40 // XXX: Set these min values appropriately |
|
41 const double SWIPE_MIN_DISTANCE = 5.0; |
|
42 const double SWIPE_MIN_VELOCITY = 5.0; |
|
43 |
|
44 // Convenience typedefs for event handler types |
|
45 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CEdgeGesture_Windows__CUI__CInput__CEdgeGestureEventArgs_t EdgeGestureHandler; |
|
46 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CCore__CCoreDispatcher_Windows__CUI__CCore__CAcceleratorKeyEventArgs_t AcceleratorKeyActivatedHandler; |
|
47 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CCore__CCoreWindow_Windows__CUI__CCore__CPointerEventArgs_t PointerEventHandler; |
|
48 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CGestureRecognizer_Windows__CUI__CInput__CTappedEventArgs_t TappedEventHandler; |
|
49 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CGestureRecognizer_Windows__CUI__CInput__CRightTappedEventArgs_t RightTappedEventHandler; |
|
50 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CGestureRecognizer_Windows__CUI__CInput__CManipulationStartedEventArgs_t ManipulationStartedEventHandler; |
|
51 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CGestureRecognizer_Windows__CUI__CInput__CManipulationUpdatedEventArgs_t ManipulationUpdatedEventHandler; |
|
52 typedef Foundation::__FITypedEventHandler_2_Windows__CUI__CInput__CGestureRecognizer_Windows__CUI__CInput__CManipulationCompletedEventArgs_t ManipulationCompletedEventHandler; |
|
53 |
|
54 // Other convenience typedefs |
|
55 typedef ABI::Windows::UI::Core::ICoreAcceleratorKeys ICoreAcceleratorKeys; |
|
56 |
|
57 /** |
|
58 * Specifies whether touch-action property is in force. |
|
59 */ |
|
60 static bool gTouchActionPropertyEnabled = false; |
|
61 |
|
62 /** |
|
63 * Creates and returns a new {@link Touch} from the given |
|
64 * ABI::Windows::UI::Input::IPointerPoint. Note that the caller is |
|
65 * responsible for freeing the memory for the Touch returned from |
|
66 * this function. |
|
67 * |
|
68 * @param aPoint the ABI::Windows::UI::Input::IPointerPoint containing the |
|
69 * metadata from which to create our new {@link Touch} |
|
70 * @return a new {@link Touch} representing the touch point. The caller |
|
71 * is responsible for freeing the memory for this touch point. |
|
72 */ |
|
73 Touch* |
|
74 CreateDOMTouch(UI::Input::IPointerPoint* aPoint) { |
|
75 WRL::ComPtr<UI::Input::IPointerPointProperties> props; |
|
76 Foundation::Point position; |
|
77 uint32_t pointerId; |
|
78 Foundation::Rect contactRect; |
|
79 float pressure; |
|
80 float tiltX; |
|
81 float tiltY; |
|
82 |
|
83 aPoint->get_Properties(props.GetAddressOf()); |
|
84 aPoint->get_Position(&position); |
|
85 aPoint->get_PointerId(&pointerId); |
|
86 props->get_ContactRect(&contactRect); |
|
87 props->get_Pressure(&pressure); |
|
88 props->get_XTilt(&tiltX); |
|
89 props->get_YTilt(&tiltY); |
|
90 |
|
91 nsIntPoint touchPoint = MetroUtils::LogToPhys(position); |
|
92 nsIntPoint touchRadius; |
|
93 touchRadius.x = WinUtils::LogToPhys(contactRect.Width) / 2; |
|
94 touchRadius.y = WinUtils::LogToPhys(contactRect.Height) / 2; |
|
95 Touch* touch = |
|
96 new Touch(pointerId, |
|
97 touchPoint, |
|
98 // Rotation radius and angle. |
|
99 // W3C touch events v1 do not use these. |
|
100 // The draft for W3C touch events v2 explains that |
|
101 // radius and angle should describe the ellipse that |
|
102 // most closely circumscribes the touching area. Since |
|
103 // Windows gives us a bounding rectangle rather than an |
|
104 // ellipse, we provide the ellipse that is most closely |
|
105 // circumscribed by the bounding rectangle that Windows |
|
106 // gave us. |
|
107 touchRadius, |
|
108 0.0f, |
|
109 // Pressure |
|
110 // W3C touch events v1 do not use this. |
|
111 // The current draft for W3C touch events v2 says that |
|
112 // this should be a value between 0.0 and 1.0, which is |
|
113 // consistent with what Windows provides us here. |
|
114 // XXX: Windows defaults to 0.5, but the current W3C |
|
115 // draft says that the value should be 0.0 if no value |
|
116 // known. |
|
117 pressure); |
|
118 touch->tiltX = tiltX; |
|
119 touch->tiltY = tiltY; |
|
120 return touch; |
|
121 } |
|
122 |
|
123 /** |
|
124 * Test if a touchpoint position has moved. See Touch.Equals for |
|
125 * criteria. |
|
126 * |
|
127 * @param aTouch previous touch point |
|
128 * @param aPoint new winrt touch point |
|
129 * @return true if the point has moved |
|
130 */ |
|
131 bool |
|
132 HasPointMoved(Touch* aTouch, UI::Input::IPointerPoint* aPoint) { |
|
133 WRL::ComPtr<UI::Input::IPointerPointProperties> props; |
|
134 Foundation::Point position; |
|
135 Foundation::Rect contactRect; |
|
136 float pressure; |
|
137 |
|
138 aPoint->get_Properties(props.GetAddressOf()); |
|
139 aPoint->get_Position(&position); |
|
140 props->get_ContactRect(&contactRect); |
|
141 props->get_Pressure(&pressure); |
|
142 nsIntPoint touchPoint = MetroUtils::LogToPhys(position); |
|
143 nsIntPoint touchRadius; |
|
144 touchRadius.x = WinUtils::LogToPhys(contactRect.Width) / 2; |
|
145 touchRadius.y = WinUtils::LogToPhys(contactRect.Height) / 2; |
|
146 |
|
147 // from Touch.Equals |
|
148 return touchPoint != aTouch->mRefPoint || |
|
149 pressure != aTouch->Force() || |
|
150 /* mRotationAngle == aTouch->RotationAngle() || */ |
|
151 touchRadius.x != aTouch->RadiusX() || |
|
152 touchRadius.y != aTouch->RadiusY(); |
|
153 } |
|
154 |
|
155 /** |
|
156 * Converts from the Devices::Input::PointerDeviceType enumeration |
|
157 * to a nsIDOMMouseEvent::MOZ_SOURCE_* value. |
|
158 * |
|
159 * @param aDeviceType the value to convert |
|
160 * @param aMozInputSource the converted value |
|
161 */ |
|
162 void |
|
163 MozInputSourceFromDeviceType( |
|
164 Devices::Input::PointerDeviceType const& aDeviceType, |
|
165 unsigned short& aMozInputSource) { |
|
166 if (Devices::Input::PointerDeviceType::PointerDeviceType_Mouse |
|
167 == aDeviceType) { |
|
168 aMozInputSource = nsIDOMMouseEvent::MOZ_SOURCE_MOUSE; |
|
169 } else if (Devices::Input::PointerDeviceType::PointerDeviceType_Touch |
|
170 == aDeviceType) { |
|
171 aMozInputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
172 } else if (Devices::Input::PointerDeviceType::PointerDeviceType_Pen |
|
173 == aDeviceType) { |
|
174 aMozInputSource = nsIDOMMouseEvent::MOZ_SOURCE_PEN; |
|
175 } |
|
176 } |
|
177 |
|
178 int16_t |
|
179 ButtonsForPointerPoint(UI::Input::IPointerPoint* aPoint) { |
|
180 WRL::ComPtr<UI::Input::IPointerPointProperties> props; |
|
181 aPoint->get_Properties(props.GetAddressOf()); |
|
182 |
|
183 int16_t buttons = 0; |
|
184 boolean buttonPressed; |
|
185 |
|
186 props->get_IsLeftButtonPressed(&buttonPressed); |
|
187 if (buttonPressed) { |
|
188 buttons |= WidgetMouseEvent::eLeftButtonFlag; |
|
189 } |
|
190 props->get_IsMiddleButtonPressed(&buttonPressed); |
|
191 if (buttonPressed) { |
|
192 buttons |= WidgetMouseEvent::eMiddleButtonFlag; |
|
193 } |
|
194 props->get_IsRightButtonPressed(&buttonPressed); |
|
195 if (buttonPressed) { |
|
196 buttons |= WidgetMouseEvent::eRightButtonFlag; |
|
197 } |
|
198 props->get_IsXButton1Pressed(&buttonPressed); |
|
199 if (buttonPressed) { |
|
200 buttons |= WidgetMouseEvent::e4thButtonFlag; |
|
201 } |
|
202 props->get_IsXButton2Pressed(&buttonPressed); |
|
203 if (buttonPressed) { |
|
204 buttons |= WidgetMouseEvent::e5thButtonFlag; |
|
205 } |
|
206 return buttons; |
|
207 } |
|
208 |
|
209 /** |
|
210 * This function is for use with mTouches.Enumerate. It will |
|
211 * append each element it encounters to the {@link nsTArray} |
|
212 * of {@link mozilla::dom::Touch}es passed in through the third (void*) |
|
213 * parameter. |
|
214 * |
|
215 * NOTE: This function will set the `mChanged` member of each |
|
216 * element it encounters to `false`, since this function is only |
|
217 * used to populate a touchlist that is about to be dispatched |
|
218 * in a gecko touch event. |
|
219 * |
|
220 * @param aKey the key of the current element being enumerated |
|
221 * @param aData the value of the current element being enumerated |
|
222 * @param aTouchList the {@link nsTArray} to append to |
|
223 */ |
|
224 PLDHashOperator |
|
225 AppendToTouchList(const unsigned int& aKey, |
|
226 nsRefPtr<Touch>& aData, |
|
227 void *aTouchList) |
|
228 { |
|
229 nsTArray<nsRefPtr<Touch> > *touches = |
|
230 static_cast<nsTArray<nsRefPtr<Touch> > *>(aTouchList); |
|
231 nsRefPtr<Touch> copy = new Touch(aData->mIdentifier, |
|
232 aData->mRefPoint, |
|
233 aData->mRadius, |
|
234 aData->mRotationAngle, |
|
235 aData->mForce); |
|
236 copy->tiltX = aData->tiltX; |
|
237 copy->tiltY = aData->tiltY; |
|
238 touches->AppendElement(copy); |
|
239 aData->mChanged = false; |
|
240 return PL_DHASH_NEXT; |
|
241 } |
|
242 |
|
243 // Helper for making sure event ptrs get freed. |
|
244 class AutoDeleteEvent |
|
245 { |
|
246 public: |
|
247 AutoDeleteEvent(WidgetGUIEvent* aPtr) : |
|
248 mPtr(aPtr) {} |
|
249 ~AutoDeleteEvent() { |
|
250 if (mPtr) { |
|
251 delete mPtr; |
|
252 } |
|
253 } |
|
254 WidgetGUIEvent* mPtr; |
|
255 }; |
|
256 } |
|
257 |
|
258 namespace mozilla { |
|
259 namespace widget { |
|
260 namespace winrt { |
|
261 |
|
262 MetroInput::InputPrecisionLevel MetroInput::sCurrentInputLevel = |
|
263 MetroInput::InputPrecisionLevel::LEVEL_IMPRECISE; |
|
264 |
|
265 MetroInput::MetroInput(MetroWidget* aWidget, |
|
266 UI::Core::ICoreWindow* aWindow) |
|
267 : mWidget(aWidget), |
|
268 mNonApzTargetForTouch(false), |
|
269 mWindow(aWindow) |
|
270 { |
|
271 LogFunction(); |
|
272 NS_ASSERTION(aWidget, "Attempted to create MetroInput for null widget!"); |
|
273 NS_ASSERTION(aWindow, "Attempted to create MetroInput for null window!"); |
|
274 |
|
275 Preferences::AddBoolVarCache(&gTouchActionPropertyEnabled, "layout.css.touch_action.enabled", gTouchActionPropertyEnabled); |
|
276 mTokenPointerPressed.value = 0; |
|
277 mTokenPointerReleased.value = 0; |
|
278 mTokenPointerMoved.value = 0; |
|
279 mTokenPointerEntered.value = 0; |
|
280 mTokenPointerExited.value = 0; |
|
281 mTokenEdgeStarted.value = 0; |
|
282 mTokenEdgeCanceled.value = 0; |
|
283 mTokenEdgeCompleted.value = 0; |
|
284 mTokenManipulationCompleted.value = 0; |
|
285 mTokenTapped.value = 0; |
|
286 mTokenRightTapped.value = 0; |
|
287 |
|
288 // Create our Gesture Recognizer |
|
289 ActivateGenericInstance(RuntimeClass_Windows_UI_Input_GestureRecognizer, |
|
290 mGestureRecognizer); |
|
291 NS_ASSERTION(mGestureRecognizer, "Failed to create GestureRecognizer!"); |
|
292 |
|
293 RegisterInputEvents(); |
|
294 } |
|
295 |
|
296 MetroInput::~MetroInput() |
|
297 { |
|
298 LogFunction(); |
|
299 UnregisterInputEvents(); |
|
300 } |
|
301 |
|
302 /* static */ |
|
303 bool MetroInput::IsInputModeImprecise() |
|
304 { |
|
305 return sCurrentInputLevel == LEVEL_IMPRECISE; |
|
306 } |
|
307 |
|
308 /** |
|
309 * Tracks the current input level (precise/imprecise) and fires an observer |
|
310 * when the mode changes. |
|
311 */ |
|
312 void |
|
313 MetroInput::UpdateInputLevel(InputPrecisionLevel aInputLevel) |
|
314 { |
|
315 // ignore mouse input if we have active touch input. |
|
316 if (aInputLevel == LEVEL_PRECISE && mTouches.Count() > 0) { |
|
317 return; |
|
318 } |
|
319 if (sCurrentInputLevel != aInputLevel) { |
|
320 sCurrentInputLevel = aInputLevel; |
|
321 MetroUtils::FireObserver(sCurrentInputLevel == LEVEL_PRECISE ? |
|
322 "metro_precise_input" : "metro_imprecise_input"); |
|
323 } |
|
324 } |
|
325 |
|
326 /** |
|
327 * Processes an IEdgeGestureEventArgs and returns the input source type |
|
328 * for the event. Also updates input level via UpdateInputLevel. |
|
329 */ |
|
330 uint16_t |
|
331 MetroInput::ProcessInputTypeForGesture(UI::Input::IEdgeGestureEventArgs* aArgs) |
|
332 { |
|
333 MOZ_ASSERT(aArgs); |
|
334 UI::Input::EdgeGestureKind kind; |
|
335 aArgs->get_Kind(&kind); |
|
336 switch(kind) { |
|
337 case UI::Input::EdgeGestureKind::EdgeGestureKind_Touch: |
|
338 UpdateInputLevel(LEVEL_IMPRECISE); |
|
339 return nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
340 break; |
|
341 case UI::Input::EdgeGestureKind::EdgeGestureKind_Keyboard: |
|
342 return nsIDOMMouseEvent::MOZ_SOURCE_KEYBOARD; |
|
343 break; |
|
344 case UI::Input::EdgeGestureKind::EdgeGestureKind_Mouse: |
|
345 UpdateInputLevel(LEVEL_PRECISE); |
|
346 return nsIDOMMouseEvent::MOZ_SOURCE_MOUSE; |
|
347 break; |
|
348 } |
|
349 return nsIDOMMouseEvent::MOZ_SOURCE_UNKNOWN; |
|
350 } |
|
351 |
|
352 /** |
|
353 * When the user swipes her/his finger in from the top of the screen, |
|
354 * we receive this event. |
|
355 * |
|
356 * @param sender the CoreDispatcher that fired this event |
|
357 * @param aArgs the event-specific args we use when processing this event |
|
358 * @returns S_OK |
|
359 */ |
|
360 HRESULT |
|
361 MetroInput::OnEdgeGestureStarted(UI::Input::IEdgeGesture* sender, |
|
362 UI::Input::IEdgeGestureEventArgs* aArgs) |
|
363 { |
|
364 #ifdef DEBUG_INPUT |
|
365 LogFunction(); |
|
366 #endif |
|
367 WidgetSimpleGestureEvent geckoEvent(true, |
|
368 NS_SIMPLE_GESTURE_EDGE_STARTED, |
|
369 mWidget.Get()); |
|
370 mModifierKeyState.Update(); |
|
371 mModifierKeyState.InitInputEvent(geckoEvent); |
|
372 geckoEvent.time = ::GetMessageTime(); |
|
373 geckoEvent.inputSource = ProcessInputTypeForGesture(aArgs); |
|
374 |
|
375 // Safe |
|
376 DispatchEventIgnoreStatus(&geckoEvent); |
|
377 return S_OK; |
|
378 } |
|
379 |
|
380 /** |
|
381 * This event can be received if the user swipes her/his finger back to |
|
382 * the top of the screen, or continues moving her/his finger such that |
|
383 * the movement is interpreted as a "grab this window" gesture |
|
384 * |
|
385 * @param sender the CoreDispatcher that fired this event |
|
386 * @param aArgs the event-specific args we use when processing this event |
|
387 * @returns S_OK |
|
388 */ |
|
389 HRESULT |
|
390 MetroInput::OnEdgeGestureCanceled(UI::Input::IEdgeGesture* sender, |
|
391 UI::Input::IEdgeGestureEventArgs* aArgs) |
|
392 { |
|
393 #ifdef DEBUG_INPUT |
|
394 LogFunction(); |
|
395 #endif |
|
396 WidgetSimpleGestureEvent geckoEvent(true, |
|
397 NS_SIMPLE_GESTURE_EDGE_CANCELED, |
|
398 mWidget.Get()); |
|
399 mModifierKeyState.Update(); |
|
400 mModifierKeyState.InitInputEvent(geckoEvent); |
|
401 geckoEvent.time = ::GetMessageTime(); |
|
402 geckoEvent.inputSource = ProcessInputTypeForGesture(aArgs); |
|
403 |
|
404 // Safe |
|
405 DispatchEventIgnoreStatus(&geckoEvent); |
|
406 return S_OK; |
|
407 } |
|
408 |
|
409 /** |
|
410 * This event is received if the user presses ctrl+Z or lifts her/his |
|
411 * finger after causing an EdgeGestureStarting event to fire. |
|
412 * |
|
413 * @param sender the CoreDispatcher that fired this event |
|
414 * @param aArgs the event-specific args we use when processing this event |
|
415 * @returns S_OK |
|
416 */ |
|
417 HRESULT |
|
418 MetroInput::OnEdgeGestureCompleted(UI::Input::IEdgeGesture* sender, |
|
419 UI::Input::IEdgeGestureEventArgs* aArgs) |
|
420 { |
|
421 #ifdef DEBUG_INPUT |
|
422 LogFunction(); |
|
423 #endif |
|
424 WidgetSimpleGestureEvent geckoEvent(true, |
|
425 NS_SIMPLE_GESTURE_EDGE_COMPLETED, |
|
426 mWidget.Get()); |
|
427 mModifierKeyState.Update(); |
|
428 mModifierKeyState.InitInputEvent(geckoEvent); |
|
429 geckoEvent.time = ::GetMessageTime(); |
|
430 geckoEvent.inputSource = ProcessInputTypeForGesture(aArgs); |
|
431 |
|
432 // Safe |
|
433 DispatchEventIgnoreStatus(&geckoEvent); |
|
434 return S_OK; |
|
435 } |
|
436 |
|
437 /** |
|
438 * This helper function is used by our processing of PointerPressed, |
|
439 * PointerReleased, and PointerMoved events. |
|
440 * It dispatches a gecko event in response to the input received. This |
|
441 * function should only be called for non-touch (i.e. pen or mouse) input |
|
442 * events. |
|
443 * |
|
444 * @param aPoint the PointerPoint for the input event |
|
445 */ |
|
446 void |
|
447 MetroInput::OnPointerNonTouch(UI::Input::IPointerPoint* aPoint) { |
|
448 WRL::ComPtr<UI::Input::IPointerPointProperties> props; |
|
449 UI::Input::PointerUpdateKind pointerUpdateKind; |
|
450 |
|
451 aPoint->get_Properties(props.GetAddressOf()); |
|
452 props->get_PointerUpdateKind(&pointerUpdateKind); |
|
453 |
|
454 uint32_t message = NS_MOUSE_MOVE; |
|
455 int16_t button = 0; |
|
456 |
|
457 switch (pointerUpdateKind) { |
|
458 case UI::Input::PointerUpdateKind::PointerUpdateKind_LeftButtonPressed: |
|
459 button = WidgetMouseEvent::buttonType::eLeftButton; |
|
460 message = NS_MOUSE_BUTTON_DOWN; |
|
461 break; |
|
462 case UI::Input::PointerUpdateKind::PointerUpdateKind_MiddleButtonPressed: |
|
463 button = WidgetMouseEvent::buttonType::eMiddleButton; |
|
464 message = NS_MOUSE_BUTTON_DOWN; |
|
465 break; |
|
466 case UI::Input::PointerUpdateKind::PointerUpdateKind_RightButtonPressed: |
|
467 button = WidgetMouseEvent::buttonType::eRightButton; |
|
468 message = NS_MOUSE_BUTTON_DOWN; |
|
469 break; |
|
470 case UI::Input::PointerUpdateKind::PointerUpdateKind_LeftButtonReleased: |
|
471 button = WidgetMouseEvent::buttonType::eLeftButton; |
|
472 message = NS_MOUSE_BUTTON_UP; |
|
473 break; |
|
474 case UI::Input::PointerUpdateKind::PointerUpdateKind_MiddleButtonReleased: |
|
475 button = WidgetMouseEvent::buttonType::eMiddleButton; |
|
476 message = NS_MOUSE_BUTTON_UP; |
|
477 break; |
|
478 case UI::Input::PointerUpdateKind::PointerUpdateKind_RightButtonReleased: |
|
479 button = WidgetMouseEvent::buttonType::eRightButton; |
|
480 message = NS_MOUSE_BUTTON_UP; |
|
481 break; |
|
482 } |
|
483 |
|
484 UpdateInputLevel(LEVEL_PRECISE); |
|
485 |
|
486 WidgetMouseEvent* event = |
|
487 new WidgetMouseEvent(true, message, mWidget.Get(), |
|
488 WidgetMouseEvent::eReal, |
|
489 WidgetMouseEvent::eNormal); |
|
490 event->button = button; |
|
491 aPoint->get_PointerId(&event->pointerId); |
|
492 InitGeckoMouseEventFromPointerPoint(event, aPoint); |
|
493 DispatchAsyncEventIgnoreStatus(event); |
|
494 } |
|
495 |
|
496 void |
|
497 MetroInput::InitTouchEventTouchList(WidgetTouchEvent* aEvent) |
|
498 { |
|
499 MOZ_ASSERT(aEvent); |
|
500 mTouches.Enumerate(&AppendToTouchList, |
|
501 static_cast<void*>(&aEvent->touches)); |
|
502 } |
|
503 |
|
504 bool |
|
505 MetroInput::ShouldDeliverInputToRecognizer() |
|
506 { |
|
507 return mRecognizerWantsEvents; |
|
508 } |
|
509 |
|
510 void |
|
511 MetroInput::GetAllowedTouchBehavior(WidgetTouchEvent* aTransformedEvent, nsTArray<TouchBehaviorFlags>& aOutBehaviors) |
|
512 { |
|
513 mWidget->ApzcGetAllowedTouchBehavior(aTransformedEvent, aOutBehaviors); |
|
514 |
|
515 for (uint32_t i = 0; i < aOutBehaviors.Length(); i++) { |
|
516 if (aOutBehaviors[i] & AllowedTouchBehavior::UNKNOWN) { |
|
517 // performing hit testing fallback: asking content to perform hit testing itself |
|
518 // (in spite that this operation has high latency). |
|
519 aOutBehaviors[i] = mWidget->ContentGetAllowedTouchBehavior(aTransformedEvent->touches[i]->mRefPoint); |
|
520 } |
|
521 } |
|
522 } |
|
523 |
|
524 bool |
|
525 MetroInput::IsTouchBehaviorForbidden(const nsTArray<TouchBehaviorFlags>& aTouchBehaviors) |
|
526 { |
|
527 for (size_t i = 0; i < aTouchBehaviors.Length(); i++) { |
|
528 if (aTouchBehaviors[i] == AllowedTouchBehavior::NONE) |
|
529 return true; |
|
530 } |
|
531 |
|
532 return false; |
|
533 } |
|
534 |
|
535 // This event is raised when the user pushes the left mouse button, presses a |
|
536 // pen to the surface, or presses a touch screen. |
|
537 HRESULT |
|
538 MetroInput::OnPointerPressed(UI::Core::ICoreWindow* aSender, |
|
539 UI::Core::IPointerEventArgs* aArgs) |
|
540 { |
|
541 #ifdef DEBUG_INPUT |
|
542 LogFunction(); |
|
543 #endif |
|
544 |
|
545 WRL::ComPtr<UI::Input::IPointerPoint> currentPoint; |
|
546 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
547 Devices::Input::PointerDeviceType deviceType; |
|
548 |
|
549 aArgs->get_CurrentPoint(currentPoint.GetAddressOf()); |
|
550 currentPoint->get_PointerDevice(device.GetAddressOf()); |
|
551 device->get_PointerDeviceType(&deviceType); |
|
552 |
|
553 // For mouse and pen input, simply call our helper function |
|
554 if (deviceType != |
|
555 Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
556 OnPointerNonTouch(currentPoint.Get()); |
|
557 mGestureRecognizer->ProcessDownEvent(currentPoint.Get()); |
|
558 return S_OK; |
|
559 } |
|
560 |
|
561 // This is touch input. |
|
562 UpdateInputLevel(LEVEL_IMPRECISE); |
|
563 |
|
564 // Create the new touch point and add it to our event. |
|
565 uint32_t pointerId; |
|
566 currentPoint->get_PointerId(&pointerId); |
|
567 nsRefPtr<Touch> touch = CreateDOMTouch(currentPoint.Get()); |
|
568 touch->mChanged = true; |
|
569 mTouches.Put(pointerId, touch); |
|
570 |
|
571 WidgetTouchEvent* touchEvent = |
|
572 new WidgetTouchEvent(true, NS_TOUCH_START, mWidget.Get()); |
|
573 |
|
574 if (mTouches.Count() == 1) { |
|
575 // If this is the first touchstart of a touch session reset some |
|
576 // tracking flags. |
|
577 mContentConsumingTouch = false; |
|
578 mApzConsumingTouch = false; |
|
579 mRecognizerWantsEvents = true; |
|
580 mCancelable = true; |
|
581 mCanceledIds.Clear(); |
|
582 } else { |
|
583 mCancelable = false; |
|
584 } |
|
585 |
|
586 InitTouchEventTouchList(touchEvent); |
|
587 DispatchAsyncTouchEvent(touchEvent); |
|
588 |
|
589 if (ShouldDeliverInputToRecognizer()) { |
|
590 mGestureRecognizer->ProcessDownEvent(currentPoint.Get()); |
|
591 } |
|
592 return S_OK; |
|
593 } |
|
594 |
|
595 void |
|
596 MetroInput::AddPointerMoveDataToRecognizer(UI::Core::IPointerEventArgs* aArgs) |
|
597 { |
|
598 if (ShouldDeliverInputToRecognizer()) { |
|
599 WRL::ComPtr<Foundation::Collections::IVector<UI::Input::PointerPoint*>> |
|
600 pointerPoints; |
|
601 aArgs->GetIntermediatePoints(pointerPoints.GetAddressOf()); |
|
602 mGestureRecognizer->ProcessMoveEvents(pointerPoints.Get()); |
|
603 } |
|
604 } |
|
605 |
|
606 // This event is raised when the user moves the mouse, moves a pen that is |
|
607 // in contact with the surface, or moves a finger that is in contact with |
|
608 // a touch screen. |
|
609 HRESULT |
|
610 MetroInput::OnPointerMoved(UI::Core::ICoreWindow* aSender, |
|
611 UI::Core::IPointerEventArgs* aArgs) |
|
612 { |
|
613 #ifdef DEBUG_INPUT |
|
614 LogFunction(); |
|
615 #endif |
|
616 |
|
617 WRL::ComPtr<UI::Input::IPointerPoint> currentPoint; |
|
618 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
619 Devices::Input::PointerDeviceType deviceType; |
|
620 |
|
621 aArgs->get_CurrentPoint(currentPoint.GetAddressOf()); |
|
622 currentPoint->get_PointerDevice(device.GetAddressOf()); |
|
623 device->get_PointerDeviceType(&deviceType); |
|
624 |
|
625 // For mouse and pen input, simply call our helper function |
|
626 if (deviceType != |
|
627 Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
628 OnPointerNonTouch(currentPoint.Get()); |
|
629 AddPointerMoveDataToRecognizer(aArgs); |
|
630 return S_OK; |
|
631 } |
|
632 |
|
633 // This is touch input. |
|
634 UpdateInputLevel(LEVEL_IMPRECISE); |
|
635 |
|
636 // Get the touch associated with this touch point. |
|
637 uint32_t pointerId; |
|
638 currentPoint->get_PointerId(&pointerId); |
|
639 nsRefPtr<Touch> touch = mTouches.Get(pointerId); |
|
640 |
|
641 // Some old drivers cause us to receive a PointerMoved event for a touchId |
|
642 // after we've already received a PointerReleased event for that touchId. |
|
643 // To work around those busted drivers, we simply ignore TouchMoved events |
|
644 // for touchIds that we are not currently tracking. See bug 819223. |
|
645 if (!touch) { |
|
646 return S_OK; |
|
647 } |
|
648 |
|
649 AddPointerMoveDataToRecognizer(aArgs); |
|
650 |
|
651 // If the point hasn't moved, filter it out per the spec. Pres shell does |
|
652 // this as well, but we need to know when our first touchmove is going to |
|
653 // get delivered so we can check the result. |
|
654 if (!HasPointMoved(touch, currentPoint.Get())) { |
|
655 return S_OK; |
|
656 } |
|
657 |
|
658 touch = CreateDOMTouch(currentPoint.Get()); |
|
659 touch->mChanged = true; |
|
660 // replacing old touch point in mTouches map |
|
661 mTouches.Put(pointerId, touch); |
|
662 |
|
663 WidgetTouchEvent* touchEvent = |
|
664 new WidgetTouchEvent(true, NS_TOUCH_MOVE, mWidget.Get()); |
|
665 InitTouchEventTouchList(touchEvent); |
|
666 DispatchAsyncTouchEvent(touchEvent); |
|
667 |
|
668 return S_OK; |
|
669 } |
|
670 |
|
671 // This event is raised when the user lifts the left mouse button, lifts a |
|
672 // pen from the surface, or lifts her/his finger from a touch screen. |
|
673 HRESULT |
|
674 MetroInput::OnPointerReleased(UI::Core::ICoreWindow* aSender, |
|
675 UI::Core::IPointerEventArgs* aArgs) |
|
676 { |
|
677 #ifdef DEBUG_INPUT |
|
678 LogFunction(); |
|
679 #endif |
|
680 |
|
681 WRL::ComPtr<UI::Input::IPointerPoint> currentPoint; |
|
682 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
683 Devices::Input::PointerDeviceType deviceType; |
|
684 |
|
685 aArgs->get_CurrentPoint(currentPoint.GetAddressOf()); |
|
686 currentPoint->get_PointerDevice(device.GetAddressOf()); |
|
687 device->get_PointerDeviceType(&deviceType); |
|
688 |
|
689 // For mouse and pen input, simply call our helper function |
|
690 if (deviceType != |
|
691 Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
692 OnPointerNonTouch(currentPoint.Get()); |
|
693 mGestureRecognizer->ProcessUpEvent(currentPoint.Get()); |
|
694 return S_OK; |
|
695 } |
|
696 |
|
697 // This is touch input. |
|
698 UpdateInputLevel(LEVEL_IMPRECISE); |
|
699 |
|
700 // Get the touch associated with this touch point. |
|
701 uint32_t pointerId; |
|
702 currentPoint->get_PointerId(&pointerId); |
|
703 nsRefPtr<Touch> touch = mTouches.Get(pointerId); |
|
704 |
|
705 // Purge any pending moves for this pointer |
|
706 if (touch->mChanged) { |
|
707 WidgetTouchEvent* touchEvent = |
|
708 new WidgetTouchEvent(true, NS_TOUCH_MOVE, mWidget.Get()); |
|
709 InitTouchEventTouchList(touchEvent); |
|
710 DispatchAsyncTouchEvent(touchEvent); |
|
711 } |
|
712 |
|
713 // Remove this touch point from our map. Eventually all touch points are |
|
714 // removed for this session since we receive released events for every |
|
715 // point. |
|
716 mTouches.Remove(pointerId); |
|
717 |
|
718 // touchend events only have a single touch; the touch that has been removed |
|
719 WidgetTouchEvent* touchEvent = |
|
720 new WidgetTouchEvent(true, NS_TOUCH_END, mWidget.Get()); |
|
721 touchEvent->touches.AppendElement(CreateDOMTouch(currentPoint.Get())); |
|
722 DispatchAsyncTouchEvent(touchEvent); |
|
723 |
|
724 if (ShouldDeliverInputToRecognizer()) { |
|
725 mGestureRecognizer->ProcessUpEvent(currentPoint.Get()); |
|
726 } |
|
727 |
|
728 return S_OK; |
|
729 } |
|
730 |
|
731 // Tests for chrome vs. content target so we know whether input coordinates need |
|
732 // to be transformed through the apz. Eventually this hit testing should move |
|
733 // into the apz (bug 918288). |
|
734 bool |
|
735 MetroInput::HitTestChrome(const LayoutDeviceIntPoint& pt) |
|
736 { |
|
737 // Confirm this event targets content. We pick this up in browser's input.js. |
|
738 WidgetMouseEvent hittest(true, NS_MOUSE_MOZHITTEST, mWidget.Get(), |
|
739 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
740 hittest.refPoint = pt; |
|
741 nsEventStatus status; |
|
742 mWidget->DispatchEvent(&hittest, status); |
|
743 return (status == nsEventStatus_eConsumeNoDefault); |
|
744 } |
|
745 |
|
746 /** |
|
747 * Returns true if the position is in chrome, false otherwise. |
|
748 */ |
|
749 bool |
|
750 MetroInput::TransformRefPoint(const Foundation::Point& aPosition, LayoutDeviceIntPoint& aRefPointOut) |
|
751 { |
|
752 // If this event is destined for content we need to transform our ref point through |
|
753 // the apz so that zoom can be accounted for. |
|
754 aRefPointOut = LayoutDeviceIntPoint::FromUntyped(MetroUtils::LogToPhys(aPosition)); |
|
755 ScreenIntPoint spt; |
|
756 spt.x = aRefPointOut.x; |
|
757 spt.y = aRefPointOut.y; |
|
758 // This is currently a general contained rect hit test, it may produce a false positive for |
|
759 // overlay chrome elements. |
|
760 bool apzIntersect = mWidget->ApzHitTest(spt); |
|
761 if (!apzIntersect) { |
|
762 return true; |
|
763 } |
|
764 if (HitTestChrome(aRefPointOut)) { |
|
765 return true; |
|
766 } |
|
767 mWidget->ApzTransformGeckoCoordinate(spt, &aRefPointOut); |
|
768 return false; |
|
769 } |
|
770 |
|
771 void |
|
772 MetroInput::TransformTouchEvent(WidgetTouchEvent* aEvent) |
|
773 { |
|
774 nsTArray< nsRefPtr<dom::Touch> >& touches = aEvent->touches; |
|
775 for (uint32_t i = 0; i < touches.Length(); ++i) { |
|
776 dom::Touch* touch = touches[i]; |
|
777 if (touch) { |
|
778 LayoutDeviceIntPoint lpt; |
|
779 ScreenIntPoint spt; |
|
780 spt.x = touch->mRefPoint.x; |
|
781 spt.y = touch->mRefPoint.y; |
|
782 mWidget->ApzTransformGeckoCoordinate(spt, &lpt); |
|
783 touch->mRefPoint.x = lpt.x; |
|
784 touch->mRefPoint.y = lpt.y; |
|
785 } |
|
786 } |
|
787 } |
|
788 |
|
789 void |
|
790 MetroInput::InitGeckoMouseEventFromPointerPoint( |
|
791 WidgetMouseEvent* aEvent, |
|
792 UI::Input::IPointerPoint* aPointerPoint) |
|
793 { |
|
794 NS_ASSERTION(aPointerPoint, "InitGeckoMouseEventFromPointerPoint " |
|
795 "called with null PointerPoint!"); |
|
796 |
|
797 WRL::ComPtr<UI::Input::IPointerPointProperties> props; |
|
798 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
799 Devices::Input::PointerDeviceType deviceType; |
|
800 Foundation::Point position; |
|
801 uint64_t timestamp; |
|
802 float pressure; |
|
803 boolean canBeDoubleTap; |
|
804 float tiltX; |
|
805 float tiltY; |
|
806 |
|
807 aPointerPoint->get_Position(&position); |
|
808 aPointerPoint->get_Timestamp(×tamp); |
|
809 aPointerPoint->get_PointerDevice(device.GetAddressOf()); |
|
810 device->get_PointerDeviceType(&deviceType); |
|
811 aPointerPoint->get_Properties(props.GetAddressOf()); |
|
812 aPointerPoint->get_PointerId(&aEvent->pointerId); |
|
813 props->get_Pressure(&pressure); |
|
814 props->get_XTilt(&tiltX); |
|
815 props->get_YTilt(&tiltY); |
|
816 |
|
817 mGestureRecognizer->CanBeDoubleTap(aPointerPoint, &canBeDoubleTap); |
|
818 |
|
819 TransformRefPoint(position, aEvent->refPoint); |
|
820 |
|
821 if (!canBeDoubleTap) { |
|
822 aEvent->clickCount = 1; |
|
823 } else { |
|
824 aEvent->clickCount = 2; |
|
825 } |
|
826 aEvent->pressure = pressure; |
|
827 aEvent->tiltX = tiltX; |
|
828 aEvent->tiltY = tiltY; |
|
829 aEvent->buttons = ButtonsForPointerPoint(aPointerPoint); |
|
830 |
|
831 MozInputSourceFromDeviceType(deviceType, aEvent->inputSource); |
|
832 } |
|
833 |
|
834 // This event is raised when a precise pointer moves into the bounding box of |
|
835 // our window. For touch input, this will be raised before the PointerPressed |
|
836 // event. |
|
837 HRESULT |
|
838 MetroInput::OnPointerEntered(UI::Core::ICoreWindow* aSender, |
|
839 UI::Core::IPointerEventArgs* aArgs) |
|
840 { |
|
841 #ifdef DEBUG_INPUT |
|
842 LogFunction(); |
|
843 #endif |
|
844 |
|
845 WRL::ComPtr<UI::Input::IPointerPoint> currentPoint; |
|
846 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
847 Devices::Input::PointerDeviceType deviceType; |
|
848 |
|
849 aArgs->get_CurrentPoint(currentPoint.GetAddressOf()); |
|
850 currentPoint->get_PointerDevice(device.GetAddressOf()); |
|
851 device->get_PointerDeviceType(&deviceType); |
|
852 |
|
853 // We only dispatch mouseenter and mouseexit events for mouse and pen input. |
|
854 if (deviceType != |
|
855 Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
856 WidgetMouseEvent* event = |
|
857 new WidgetMouseEvent(true, NS_MOUSE_ENTER, mWidget.Get(), |
|
858 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
859 UpdateInputLevel(LEVEL_PRECISE); |
|
860 InitGeckoMouseEventFromPointerPoint(event, currentPoint.Get()); |
|
861 DispatchAsyncEventIgnoreStatus(event); |
|
862 return S_OK; |
|
863 } |
|
864 UpdateInputLevel(LEVEL_IMPRECISE); |
|
865 return S_OK; |
|
866 } |
|
867 |
|
868 // This event is raised when a precise pointer leaves the bounding box of |
|
869 // our window. For touch input, this will be raised before the |
|
870 // PointerReleased event. |
|
871 HRESULT |
|
872 MetroInput::OnPointerExited(UI::Core::ICoreWindow* aSender, |
|
873 UI::Core::IPointerEventArgs* aArgs) |
|
874 { |
|
875 #ifdef DEBUG_INPUT |
|
876 LogFunction(); |
|
877 #endif |
|
878 |
|
879 WRL::ComPtr<UI::Input::IPointerPoint> currentPoint; |
|
880 WRL::ComPtr<Devices::Input::IPointerDevice> device; |
|
881 Devices::Input::PointerDeviceType deviceType; |
|
882 |
|
883 aArgs->get_CurrentPoint(currentPoint.GetAddressOf()); |
|
884 currentPoint->get_PointerDevice(device.GetAddressOf()); |
|
885 device->get_PointerDeviceType(&deviceType); |
|
886 |
|
887 // We only dispatch mouseenter and mouseexit events for mouse and pen input. |
|
888 if (deviceType != |
|
889 Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
890 WidgetMouseEvent* event = |
|
891 new WidgetMouseEvent(true, NS_MOUSE_EXIT, mWidget.Get(), |
|
892 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
893 UpdateInputLevel(LEVEL_PRECISE); |
|
894 InitGeckoMouseEventFromPointerPoint(event, currentPoint.Get()); |
|
895 DispatchAsyncEventIgnoreStatus(event); |
|
896 return S_OK; |
|
897 } |
|
898 UpdateInputLevel(LEVEL_IMPRECISE); |
|
899 return S_OK; |
|
900 } |
|
901 |
|
902 // Gecko expects a "finished" event to be sent that has the cumulative |
|
903 // changes since the gesture began. The idea is that consumers could hook |
|
904 // only this last event and still effectively support magnification and |
|
905 // rotation. We accomplish sending this "finished" event by calling our |
|
906 // helper function with a cumulative "delta" value. |
|
907 // |
|
908 // After sending the "finished" event, this function detects and sends |
|
909 // swipe gestures. |
|
910 HRESULT |
|
911 MetroInput::OnManipulationCompleted( |
|
912 UI::Input::IGestureRecognizer* aSender, |
|
913 UI::Input::IManipulationCompletedEventArgs* aArgs) |
|
914 { |
|
915 #ifdef DEBUG_INPUT |
|
916 LogFunction(); |
|
917 #endif |
|
918 |
|
919 Devices::Input::PointerDeviceType deviceType; |
|
920 aArgs->get_PointerDeviceType(&deviceType); |
|
921 if (deviceType == |
|
922 Devices::Input::PointerDeviceType::PointerDeviceType_Mouse) { |
|
923 return S_OK; |
|
924 } |
|
925 |
|
926 UI::Input::ManipulationDelta delta; |
|
927 Foundation::Point position; |
|
928 |
|
929 aArgs->get_Position(&position); |
|
930 aArgs->get_Cumulative(&delta); |
|
931 |
|
932 // We check that the distance the user's finger traveled and the |
|
933 // velocity with which it traveled exceed our thresholds for |
|
934 // classifying the movement as a swipe. |
|
935 UI::Input::ManipulationVelocities velocities; |
|
936 aArgs->get_Velocities(&velocities); |
|
937 |
|
938 bool isHorizontalSwipe = |
|
939 abs(velocities.Linear.X) >= SWIPE_MIN_VELOCITY |
|
940 && abs(delta.Translation.X) >= SWIPE_MIN_DISTANCE; |
|
941 bool isVerticalSwipe = |
|
942 abs(velocities.Linear.Y) >= SWIPE_MIN_VELOCITY |
|
943 && abs(delta.Translation.Y) >= SWIPE_MIN_DISTANCE; |
|
944 |
|
945 // If our thresholds were exceeded for both a vertical and a horizontal |
|
946 // swipe, it means the user is flinging her/his finger around and we |
|
947 // should just ignore the input. |
|
948 if (isHorizontalSwipe && isVerticalSwipe) { |
|
949 return S_OK; |
|
950 } |
|
951 |
|
952 if (isHorizontalSwipe) { |
|
953 WidgetSimpleGestureEvent* swipeEvent = |
|
954 new WidgetSimpleGestureEvent(true, NS_SIMPLE_GESTURE_SWIPE, |
|
955 mWidget.Get()); |
|
956 swipeEvent->direction = delta.Translation.X > 0 |
|
957 ? nsIDOMSimpleGestureEvent::DIRECTION_RIGHT |
|
958 : nsIDOMSimpleGestureEvent::DIRECTION_LEFT; |
|
959 swipeEvent->delta = delta.Translation.X; |
|
960 swipeEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
961 swipeEvent->refPoint = LayoutDeviceIntPoint::FromUntyped(MetroUtils::LogToPhys(position)); |
|
962 DispatchAsyncEventIgnoreStatus(swipeEvent); |
|
963 } |
|
964 |
|
965 if (isVerticalSwipe) { |
|
966 WidgetSimpleGestureEvent* swipeEvent = |
|
967 new WidgetSimpleGestureEvent(true, NS_SIMPLE_GESTURE_SWIPE, |
|
968 mWidget.Get()); |
|
969 swipeEvent->direction = delta.Translation.Y > 0 |
|
970 ? nsIDOMSimpleGestureEvent::DIRECTION_DOWN |
|
971 : nsIDOMSimpleGestureEvent::DIRECTION_UP; |
|
972 swipeEvent->delta = delta.Translation.Y; |
|
973 swipeEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
974 swipeEvent->refPoint = LayoutDeviceIntPoint::FromUntyped(MetroUtils::LogToPhys(position)); |
|
975 DispatchAsyncEventIgnoreStatus(swipeEvent); |
|
976 } |
|
977 |
|
978 return S_OK; |
|
979 } |
|
980 |
|
981 // This event is raised when a sequence of pointer events has been |
|
982 // interpreted by the GestureRecognizer as a tap (this could be a mouse |
|
983 // click, a pen tap, or a tap on a touch surface). |
|
984 HRESULT |
|
985 MetroInput::OnTapped(UI::Input::IGestureRecognizer* aSender, |
|
986 UI::Input::ITappedEventArgs* aArgs) |
|
987 { |
|
988 #ifdef DEBUG_INPUT |
|
989 LogFunction(); |
|
990 #endif |
|
991 |
|
992 Devices::Input::PointerDeviceType deviceType; |
|
993 aArgs->get_PointerDeviceType(&deviceType); |
|
994 |
|
995 unsigned int tapCount; |
|
996 aArgs->get_TapCount(&tapCount); |
|
997 |
|
998 // For mouse and pen input, we send mousedown/mouseup/mousemove |
|
999 // events as soon as we detect the input event. For touch input, a set of |
|
1000 // mousedown/mouseup events will be sent only once a tap has been detected. |
|
1001 if (deviceType != Devices::Input::PointerDeviceType::PointerDeviceType_Touch) { |
|
1002 return S_OK; |
|
1003 } |
|
1004 |
|
1005 Foundation::Point position; |
|
1006 aArgs->get_Position(&position); |
|
1007 HandleTap(position, tapCount); |
|
1008 return S_OK; |
|
1009 } |
|
1010 |
|
1011 // This event is raised when a sequence of pointer events has been |
|
1012 // interpreted by the GestureRecognizer as a right tap. |
|
1013 // This could be a mouse right-click, a right-click on a pen, or |
|
1014 // a tap-and-hold on a touch surface. |
|
1015 HRESULT |
|
1016 MetroInput::OnRightTapped(UI::Input::IGestureRecognizer* aSender, |
|
1017 UI::Input::IRightTappedEventArgs* aArgs) |
|
1018 { |
|
1019 #ifdef DEBUG_INPUT |
|
1020 LogFunction(); |
|
1021 #endif |
|
1022 |
|
1023 Devices::Input::PointerDeviceType deviceType; |
|
1024 aArgs->get_PointerDeviceType(&deviceType); |
|
1025 |
|
1026 Foundation::Point position; |
|
1027 aArgs->get_Position(&position); |
|
1028 HandleLongTap(position); |
|
1029 |
|
1030 return S_OK; |
|
1031 } |
|
1032 |
|
1033 void |
|
1034 MetroInput::HandleTap(const Foundation::Point& aPoint, unsigned int aTapCount) |
|
1035 { |
|
1036 #ifdef DEBUG_INPUT |
|
1037 LogFunction(); |
|
1038 #endif |
|
1039 |
|
1040 LayoutDeviceIntPoint refPoint; |
|
1041 TransformRefPoint(aPoint, refPoint); |
|
1042 |
|
1043 WidgetMouseEvent* mouseEvent = |
|
1044 new WidgetMouseEvent(true, NS_MOUSE_MOVE, mWidget.Get(), |
|
1045 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
1046 mouseEvent->refPoint = refPoint; |
|
1047 mouseEvent->clickCount = aTapCount; |
|
1048 mouseEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
1049 DispatchAsyncEventIgnoreStatus(mouseEvent); |
|
1050 |
|
1051 mouseEvent = |
|
1052 new WidgetMouseEvent(true, NS_MOUSE_BUTTON_DOWN, mWidget.Get(), |
|
1053 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
1054 mouseEvent->refPoint = refPoint; |
|
1055 mouseEvent->clickCount = aTapCount; |
|
1056 mouseEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
1057 mouseEvent->button = WidgetMouseEvent::buttonType::eLeftButton; |
|
1058 DispatchAsyncEventIgnoreStatus(mouseEvent); |
|
1059 |
|
1060 mouseEvent = |
|
1061 new WidgetMouseEvent(true, NS_MOUSE_BUTTON_UP, mWidget.Get(), |
|
1062 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
1063 mouseEvent->refPoint = refPoint; |
|
1064 mouseEvent->clickCount = aTapCount; |
|
1065 mouseEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
1066 mouseEvent->button = WidgetMouseEvent::buttonType::eLeftButton; |
|
1067 DispatchAsyncEventIgnoreStatus(mouseEvent); |
|
1068 |
|
1069 // Make sure all gecko events are dispatched and the dom is up to date |
|
1070 // so that when ui automation comes in looking for focus info it gets |
|
1071 // the right information. |
|
1072 MetroAppShell::MarkEventQueueForPurge(); |
|
1073 } |
|
1074 |
|
1075 void |
|
1076 MetroInput::HandleLongTap(const Foundation::Point& aPoint) |
|
1077 { |
|
1078 #ifdef DEBUG_INPUT |
|
1079 LogFunction(); |
|
1080 #endif |
|
1081 LayoutDeviceIntPoint refPoint; |
|
1082 TransformRefPoint(aPoint, refPoint); |
|
1083 |
|
1084 WidgetMouseEvent* contextEvent = |
|
1085 new WidgetMouseEvent(true, NS_CONTEXTMENU, mWidget.Get(), |
|
1086 WidgetMouseEvent::eReal, WidgetMouseEvent::eNormal); |
|
1087 contextEvent->refPoint = refPoint; |
|
1088 contextEvent->inputSource = nsIDOMMouseEvent::MOZ_SOURCE_TOUCH; |
|
1089 DispatchAsyncEventIgnoreStatus(contextEvent); |
|
1090 } |
|
1091 |
|
1092 /** |
|
1093 * Implementation Details |
|
1094 */ |
|
1095 nsEventStatus MetroInput::sThrowawayStatus; |
|
1096 |
|
1097 void |
|
1098 MetroInput::DispatchAsyncEventIgnoreStatus(WidgetInputEvent* aEvent) |
|
1099 { |
|
1100 aEvent->time = ::GetMessageTime(); |
|
1101 mModifierKeyState.Update(); |
|
1102 mModifierKeyState.InitInputEvent(*aEvent); |
|
1103 mInputEventQueue.Push(aEvent); |
|
1104 nsCOMPtr<nsIRunnable> runnable = |
|
1105 NS_NewRunnableMethod(this, &MetroInput::DeliverNextQueuedEventIgnoreStatus); |
|
1106 NS_DispatchToCurrentThread(runnable); |
|
1107 } |
|
1108 |
|
1109 void |
|
1110 MetroInput::DeliverNextQueuedEventIgnoreStatus() |
|
1111 { |
|
1112 nsAutoPtr<WidgetGUIEvent> event = |
|
1113 static_cast<WidgetGUIEvent*>(mInputEventQueue.PopFront()); |
|
1114 MOZ_ASSERT(event.get()); |
|
1115 DispatchEventIgnoreStatus(event.get()); |
|
1116 |
|
1117 // Let app shell know we've delivered that last input we wanted purged |
|
1118 // via a call to MarkEventQueueForPurge(). |
|
1119 if (event->message == NS_MOUSE_BUTTON_UP) { |
|
1120 MetroAppShell::InputEventsDispatched(); |
|
1121 } |
|
1122 |
|
1123 // Clear :hover/:active states for mouse events generated by HandleTap |
|
1124 WidgetMouseEvent* mouseEvent = event.get()->AsMouseEvent(); |
|
1125 if (!mouseEvent) { |
|
1126 return; |
|
1127 } |
|
1128 if (mouseEvent->message != NS_MOUSE_BUTTON_UP || |
|
1129 mouseEvent->inputSource != nsIDOMMouseEvent::MOZ_SOURCE_TOUCH) { |
|
1130 return; |
|
1131 } |
|
1132 nsCOMPtr<nsIPresShell> presShell = mWidget->GetPresShell(); |
|
1133 if (presShell) { |
|
1134 EventStateManager* esm = presShell->GetPresContext()->EventStateManager(); |
|
1135 if (esm) { |
|
1136 esm->SetContentState(nullptr, NS_EVENT_STATE_HOVER); |
|
1137 } |
|
1138 } |
|
1139 } |
|
1140 |
|
1141 void |
|
1142 MetroInput::DispatchAsyncTouchEvent(WidgetTouchEvent* aEvent) |
|
1143 { |
|
1144 aEvent->time = ::GetMessageTime(); |
|
1145 mModifierKeyState.Update(); |
|
1146 mModifierKeyState.InitInputEvent(*aEvent); |
|
1147 mInputEventQueue.Push(aEvent); |
|
1148 nsCOMPtr<nsIRunnable> runnable = |
|
1149 NS_NewRunnableMethod(this, &MetroInput::DeliverNextQueuedTouchEvent); |
|
1150 NS_DispatchToCurrentThread(runnable); |
|
1151 } |
|
1152 |
|
1153 static void DumpTouchIds(const char* aTarget, WidgetTouchEvent* aEvent) |
|
1154 { |
|
1155 // comment out for touch moves |
|
1156 if (aEvent->message == NS_TOUCH_MOVE) { |
|
1157 return; |
|
1158 } |
|
1159 switch(aEvent->message) { |
|
1160 case NS_TOUCH_START: |
|
1161 WinUtils::Log("DumpTouchIds: NS_TOUCH_START block"); |
|
1162 break; |
|
1163 case NS_TOUCH_MOVE: |
|
1164 WinUtils::Log("DumpTouchIds: NS_TOUCH_MOVE block"); |
|
1165 break; |
|
1166 case NS_TOUCH_END: |
|
1167 WinUtils::Log("DumpTouchIds: NS_TOUCH_END block"); |
|
1168 break; |
|
1169 case NS_TOUCH_CANCEL: |
|
1170 WinUtils::Log("DumpTouchIds: NS_TOUCH_CANCEL block"); |
|
1171 break; |
|
1172 } |
|
1173 nsTArray< nsRefPtr<dom::Touch> >& touches = aEvent->touches; |
|
1174 for (uint32_t i = 0; i < touches.Length(); ++i) { |
|
1175 dom::Touch* touch = touches[i]; |
|
1176 if (!touch) { |
|
1177 continue; |
|
1178 } |
|
1179 int32_t id = touch->Identifier(); |
|
1180 WinUtils::Log(" id=%d target=%s", id, aTarget); |
|
1181 } |
|
1182 } |
|
1183 |
|
1184 static void DumpTouchBehavior(nsTArray<uint32_t>& aBehavior) |
|
1185 { |
|
1186 WinUtils::Log("DumpTouchBehavior: Touch behavior flags set for current touch session:"); |
|
1187 for (uint32_t i = 0; i < aBehavior.Length(); i++) { |
|
1188 if (mozilla::layers::AllowedTouchBehavior::VERTICAL_PAN & aBehavior[i]) { |
|
1189 WinUtils::Log("VERTICAL_PAN"); |
|
1190 } |
|
1191 |
|
1192 if (mozilla::layers::AllowedTouchBehavior::HORIZONTAL_PAN & aBehavior[i]) { |
|
1193 WinUtils::Log("HORIZONTAL_PAN"); |
|
1194 } |
|
1195 |
|
1196 if (mozilla::layers::AllowedTouchBehavior::UNKNOWN & aBehavior[i]) { |
|
1197 WinUtils::Log("UNKNOWN"); |
|
1198 } |
|
1199 |
|
1200 if ((mozilla::layers::AllowedTouchBehavior::NONE & aBehavior[i]) == 0) { |
|
1201 WinUtils::Log("NONE"); |
|
1202 } |
|
1203 } |
|
1204 } |
|
1205 |
|
1206 /* |
|
1207 * nsPreShell's processing of WidgetTouchEvent events: |
|
1208 * |
|
1209 * NS_TOUCH_START: |
|
1210 * Interprets a single touch point as the first touch point of a block and will reset its |
|
1211 * queue when it receives this. For multiple touch points it sets all points in its queue |
|
1212 * and marks new points as changed. |
|
1213 * NS_TOUCH_MOVE: |
|
1214 * Uses the equality tests in dom::Touch to test if a touch point has changed (moved). |
|
1215 * If a point has moved, keeps this touch point in the event, otherwise it removes |
|
1216 * the touch point. Note if no points have changed, it exits without sending a dom event. |
|
1217 * NS_TOUCH_CANCEL/NS_TOUCH_END |
|
1218 * Assumes any point in touchEvent->touches has been removed or canceled. |
|
1219 */ |
|
1220 |
|
1221 //#define DUMP_TOUCH_IDS(aTarget, aEvent) DumpTouchIds(aTarget, aEvent) |
|
1222 #define DUMP_TOUCH_IDS(...) |
|
1223 |
|
1224 //#define DUMP_ALLOWED_TOUCH_BEHAVIOR(aBehavior) DumpTouchBehavior(aBehavior) |
|
1225 #define DUMP_ALLOWED_TOUCH_BEHAVIOR(...) |
|
1226 |
|
1227 void |
|
1228 MetroInput::HandleFirstTouchStartEvent(WidgetTouchEvent* aEvent) |
|
1229 { |
|
1230 nsEventStatus contentStatus = nsEventStatus_eIgnore; |
|
1231 |
|
1232 WidgetTouchEvent transformedEvent(*aEvent); |
|
1233 DUMP_TOUCH_IDS("APZC(1)", aEvent); |
|
1234 mWidget->ApzReceiveInputEvent(&transformedEvent, &mTargetAPZCGuid); |
|
1235 |
|
1236 if (gTouchActionPropertyEnabled) { |
|
1237 nsTArray<TouchBehaviorFlags> touchBehaviors; |
|
1238 // Retrieving touch behaviors from apzctm and from the content (if needed) |
|
1239 // then setting it back to the apzc. The apzc we retrieved touch behaviors |
|
1240 // from and we're setting to may changes if there are multiple touches (in that |
|
1241 // case apzctm needs to take common ancestor of them). |
|
1242 GetAllowedTouchBehavior(&transformedEvent, touchBehaviors); |
|
1243 // Setting the touch behaviors to the apzc that will be responsible |
|
1244 // for interpreting it. It may be not the same apzc we retrieved touch |
|
1245 // action values from. E.g. for zooming we're taking parent apzc of a few ones |
|
1246 // that were touched but touch behaviors would be taken from childs. |
|
1247 DUMP_ALLOWED_TOUCH_BEHAVIOR(touchBehaviors); |
|
1248 mWidget->ApzcSetAllowedTouchBehavior(mTargetAPZCGuid, touchBehaviors); |
|
1249 if (IsTouchBehaviorForbidden(touchBehaviors)) { |
|
1250 mContentConsumingTouch = true; |
|
1251 } |
|
1252 } |
|
1253 |
|
1254 DUMP_TOUCH_IDS("DOM(2)", aEvent); |
|
1255 mWidget->DispatchEvent(&transformedEvent, contentStatus); |
|
1256 if (nsEventStatus_eConsumeNoDefault == contentStatus) { |
|
1257 mContentConsumingTouch = true; |
|
1258 } |
|
1259 |
|
1260 if (mContentConsumingTouch) { |
|
1261 mCancelable = false; |
|
1262 mWidget->ApzContentConsumingTouch(mTargetAPZCGuid); |
|
1263 DispatchTouchCancel(aEvent); |
|
1264 } |
|
1265 |
|
1266 // Disable gesture based events (taps, swipes, rotation) if |
|
1267 // preventDefault is called on touchstart. |
|
1268 mRecognizerWantsEvents = !(nsEventStatus_eConsumeNoDefault == contentStatus); |
|
1269 |
|
1270 // If content is consuming touch don't generate any gesture based |
|
1271 // input - clear the recognizer state without sending any events. |
|
1272 if (!ShouldDeliverInputToRecognizer()) { |
|
1273 mGestureRecognizer->CompleteGesture(); |
|
1274 } |
|
1275 } |
|
1276 |
|
1277 void |
|
1278 MetroInput::HandleFirstTouchMoveEvent(WidgetTouchEvent* aEvent) |
|
1279 { |
|
1280 mCancelable = false; |
|
1281 |
|
1282 nsEventStatus contentStatus = nsEventStatus_eIgnore; |
|
1283 nsEventStatus apzcStatus = nsEventStatus_eIgnore; |
|
1284 |
|
1285 WidgetTouchEvent transformedEvent(*aEvent); |
|
1286 DUMP_TOUCH_IDS("APZC(2)", aEvent); |
|
1287 apzcStatus = mWidget->ApzReceiveInputEvent(&transformedEvent, &mTargetAPZCGuid); |
|
1288 |
|
1289 // We need to dispatch here only touch event, not pointer one. |
|
1290 // That's because according to the spec pointer events doesn't imply pointermove event |
|
1291 // between pointerdown and pointercancel (If default touch behavior is triggered). |
|
1292 // But at the same time we need to dispatch at least touchmove event to let content to |
|
1293 // consume it (or not consume). |
|
1294 // TODO: determine how to dispatch only one kind of events: currently there are two options: |
|
1295 // 1) Create two separate instances of the WidgetTouchEvent and WidgetPointerEvent and |
|
1296 // dispatch them separately. |
|
1297 // 2) Add a boolean flag to the WidgetTouchEvent that states whether this event should produce |
|
1298 // both touch and pointer event or only touch one. |
|
1299 // Anyway it's worth to add this stuff only after patches from bug 822898 (Pointer events) are |
|
1300 // fully commited. |
|
1301 DUMP_TOUCH_IDS("DOM(3)", aEvent); |
|
1302 mWidget->DispatchEvent(&transformedEvent, contentStatus); |
|
1303 |
|
1304 // Checking content result first since content can override apzc wish and disallow apzc touch |
|
1305 // behavior (via preventDefault). |
|
1306 if (nsEventStatus_eConsumeNoDefault == contentStatus) { |
|
1307 // Touchmove handler consumed touch. |
|
1308 mContentConsumingTouch = true; |
|
1309 } else if (nsEventStatus_eConsumeNoDefault == apzcStatus) { |
|
1310 // Apzc triggered default behavior. |
|
1311 mApzConsumingTouch = true; |
|
1312 } |
|
1313 |
|
1314 // Let the apz know if content wants to consume touch events, or cancel |
|
1315 // the touch block for content. |
|
1316 if (mContentConsumingTouch) { |
|
1317 mWidget->ApzContentConsumingTouch(mTargetAPZCGuid); |
|
1318 DispatchTouchCancel(aEvent); |
|
1319 } else { |
|
1320 mWidget->ApzContentIgnoringTouch(mTargetAPZCGuid); |
|
1321 } |
|
1322 |
|
1323 if (mApzConsumingTouch) { |
|
1324 // Dispatching cancel to the content. |
|
1325 DispatchTouchCancel(&transformedEvent); |
|
1326 } |
|
1327 } |
|
1328 |
|
1329 void |
|
1330 MetroInput::DeliverNextQueuedTouchEvent() |
|
1331 { |
|
1332 /* |
|
1333 * We go through states here and make different decisions in each: |
|
1334 * |
|
1335 * 1) Hit test for apz on first touchstart |
|
1336 * If non-apzc content/chrome is the target simplify event delivery from |
|
1337 * that point on by directing all input to chrome, bypassing the apz. |
|
1338 * 2) Process first touchstart and touchmove events |
|
1339 * If touch behavior value associated with the TouchStart's touches doesn't |
|
1340 * allow zooming or panning we explicitly set mContentConsumingTouch to true. |
|
1341 * Otherwise check the result and set mContentConsumingTouch appropriately. |
|
1342 * Deliver touch events to the apz (ignoring return result) and to content. |
|
1343 * 3) If mContentConsumingTouch is true: deliver touch to content after |
|
1344 * transforming through the apz. Also let the apz know content is |
|
1345 * consuming touch and deliver cancel event to apz. |
|
1346 * 4) If mContentConsumingTouch is false: check the result from the apz and |
|
1347 * set mApzConsumingTouch appropriately. |
|
1348 * 5) If mApzConsumingTouch is true: send a touchcancel to content |
|
1349 * and deliver all events to the apz. If the apz is doing something with |
|
1350 * the events we can save ourselves the overhead of delivering dom events. |
|
1351 * |
|
1352 * Notes: |
|
1353 * - never rely on the contents of mTouches here, since this is a delayed |
|
1354 * callback. mTouches will likely have been modified. |
|
1355 */ |
|
1356 nsEventStatus status = nsEventStatus_eIgnore; |
|
1357 |
|
1358 WidgetTouchEvent* event = |
|
1359 static_cast<WidgetTouchEvent*>(mInputEventQueue.PopFront()); |
|
1360 MOZ_ASSERT(event); |
|
1361 |
|
1362 AutoDeleteEvent wrap(event); |
|
1363 |
|
1364 // Test for non-apz vs. apz target. To do this we only use the first touch |
|
1365 // point since that will be the input batch target. Cache this for touch events |
|
1366 // since HitTestChrome has to send a dom event. |
|
1367 if (mCancelable && event->message == NS_TOUCH_START) { |
|
1368 nsRefPtr<Touch> touch = event->touches[0]; |
|
1369 LayoutDeviceIntPoint pt = LayoutDeviceIntPoint::FromUntyped(touch->mRefPoint); |
|
1370 // This is currently a general contained rect hit test, it may produce a false |
|
1371 // positive for overlay chrome elements. Also, some content pages won't support |
|
1372 // apzc, so this may be false for content as well. |
|
1373 bool apzIntersect = mWidget->ApzHitTest(mozilla::ScreenIntPoint(pt.x, pt.y)); |
|
1374 mNonApzTargetForTouch = (!apzIntersect || HitTestChrome(pt)); |
|
1375 } |
|
1376 |
|
1377 // If this event is destined for dom, deliver it directly there bypassing |
|
1378 // the apz. |
|
1379 if (mNonApzTargetForTouch) { |
|
1380 DUMP_TOUCH_IDS("DOM(1)", event); |
|
1381 mWidget->DispatchEvent(event, status); |
|
1382 if (mCancelable) { |
|
1383 // Disable gesture based events (taps, swipes, rotation) if |
|
1384 // preventDefault is called on touchstart. |
|
1385 if (nsEventStatus_eConsumeNoDefault == status) { |
|
1386 mRecognizerWantsEvents = false; |
|
1387 mGestureRecognizer->CompleteGesture(); |
|
1388 } |
|
1389 if (event->message == NS_TOUCH_MOVE) { |
|
1390 mCancelable = false; |
|
1391 } |
|
1392 } |
|
1393 return; |
|
1394 } |
|
1395 |
|
1396 if (mCancelable && event->message == NS_TOUCH_START) { |
|
1397 HandleFirstTouchStartEvent(event); |
|
1398 return; |
|
1399 } else if (mCancelable && event->message == NS_TOUCH_MOVE) { |
|
1400 HandleFirstTouchMoveEvent(event); |
|
1401 return; |
|
1402 } |
|
1403 // Let TouchEnd events go through even if mCancelable is true since we |
|
1404 // don't need to check whether it is prevented by content or consumed |
|
1405 // by apzc. |
|
1406 |
|
1407 // If content is consuming touch, we may need to transform event coords |
|
1408 // through the apzc before sending to the dom. Otherwise send the event |
|
1409 // to apzc. |
|
1410 if (mContentConsumingTouch) { |
|
1411 // Only translate if we're dealing with web content that's transformed |
|
1412 // by the apzc. |
|
1413 TransformTouchEvent(event); |
|
1414 DUMP_TOUCH_IDS("DOM(4)", event); |
|
1415 mWidget->DispatchEvent(event, status); |
|
1416 return; |
|
1417 } |
|
1418 |
|
1419 DUMP_TOUCH_IDS("APZC(3)", event); |
|
1420 status = mWidget->ApzReceiveInputEvent(event, nullptr); |
|
1421 |
|
1422 // If we're getting a new touch (touch start) after some touch start/move |
|
1423 // events we need to reset touch behavior for touches. |
|
1424 if (gTouchActionPropertyEnabled && event->message == NS_TOUCH_START) { |
|
1425 nsTArray<TouchBehaviorFlags> touchBehaviors; |
|
1426 GetAllowedTouchBehavior(event, touchBehaviors); |
|
1427 DUMP_ALLOWED_TOUCH_BEHAVIOR(touchBehaviors); |
|
1428 mWidget->ApzcSetAllowedTouchBehavior(mTargetAPZCGuid, touchBehaviors); |
|
1429 } |
|
1430 |
|
1431 // Send the event to content unless APZC is consuming it. |
|
1432 if (!mApzConsumingTouch) { |
|
1433 if (status == nsEventStatus_eConsumeNoDefault) { |
|
1434 mApzConsumingTouch = true; |
|
1435 DispatchTouchCancel(event); |
|
1436 return; |
|
1437 } |
|
1438 TransformTouchEvent(event); |
|
1439 DUMP_TOUCH_IDS("DOM(5)", event); |
|
1440 mWidget->DispatchEvent(event, status); |
|
1441 } |
|
1442 } |
|
1443 |
|
1444 void |
|
1445 MetroInput::DispatchTouchCancel(WidgetTouchEvent* aEvent) |
|
1446 { |
|
1447 MOZ_ASSERT(aEvent); |
|
1448 // Send a touchcancel for each pointer id we have a corresponding start |
|
1449 // for. Note we can't rely on mTouches here since touchends remove points |
|
1450 // from it. |
|
1451 WidgetTouchEvent touchEvent(true, NS_TOUCH_CANCEL, mWidget.Get()); |
|
1452 nsTArray< nsRefPtr<dom::Touch> >& touches = aEvent->touches; |
|
1453 for (uint32_t i = 0; i < touches.Length(); ++i) { |
|
1454 dom::Touch* touch = touches[i]; |
|
1455 if (!touch) { |
|
1456 continue; |
|
1457 } |
|
1458 int32_t id = touch->Identifier(); |
|
1459 if (mCanceledIds.Contains(id)) { |
|
1460 continue; |
|
1461 } |
|
1462 mCanceledIds.AppendElement(id); |
|
1463 touchEvent.touches.AppendElement(touch); |
|
1464 } |
|
1465 if (!touchEvent.touches.Length()) { |
|
1466 return; |
|
1467 } |
|
1468 if (mContentConsumingTouch) { |
|
1469 DUMP_TOUCH_IDS("APZC(4)", &touchEvent); |
|
1470 mWidget->ApzReceiveInputEvent(&touchEvent, nullptr); |
|
1471 } else { |
|
1472 DUMP_TOUCH_IDS("DOM(6)", &touchEvent); |
|
1473 mWidget->DispatchEvent(&touchEvent, sThrowawayStatus); |
|
1474 } |
|
1475 } |
|
1476 |
|
1477 void |
|
1478 MetroInput::DispatchEventIgnoreStatus(WidgetGUIEvent *aEvent) |
|
1479 { |
|
1480 mWidget->DispatchEvent(aEvent, sThrowawayStatus); |
|
1481 } |
|
1482 |
|
1483 void |
|
1484 MetroInput::UnregisterInputEvents() { |
|
1485 // Unregister ourselves for the edge swipe event |
|
1486 WRL::ComPtr<UI::Input::IEdgeGestureStatics> edgeStatics; |
|
1487 if (SUCCEEDED(Foundation::GetActivationFactory( |
|
1488 WRL::Wrappers::HStringReference( |
|
1489 RuntimeClass_Windows_UI_Input_EdgeGesture).Get(), |
|
1490 edgeStatics.GetAddressOf()))) { |
|
1491 WRL::ComPtr<UI::Input::IEdgeGesture> edge; |
|
1492 if (SUCCEEDED(edgeStatics->GetForCurrentView(edge.GetAddressOf()))) { |
|
1493 edge->remove_Starting(mTokenEdgeStarted); |
|
1494 edge->remove_Canceled(mTokenEdgeCanceled); |
|
1495 edge->remove_Completed(mTokenEdgeCompleted); |
|
1496 } |
|
1497 } |
|
1498 // Unregister ourselves from the window events. This is extremely important; |
|
1499 // once this object is destroyed we don't want Windows to try to send events |
|
1500 // to it. |
|
1501 mWindow->remove_PointerPressed(mTokenPointerPressed); |
|
1502 mWindow->remove_PointerReleased(mTokenPointerReleased); |
|
1503 mWindow->remove_PointerMoved(mTokenPointerMoved); |
|
1504 mWindow->remove_PointerEntered(mTokenPointerEntered); |
|
1505 mWindow->remove_PointerExited(mTokenPointerExited); |
|
1506 |
|
1507 // Unregistering from the gesture recognizer events probably isn't as |
|
1508 // necessary since we're about to destroy the gesture recognizer, but |
|
1509 // it can't hurt. |
|
1510 mGestureRecognizer->remove_ManipulationCompleted( |
|
1511 mTokenManipulationCompleted); |
|
1512 mGestureRecognizer->remove_Tapped(mTokenTapped); |
|
1513 mGestureRecognizer->remove_RightTapped(mTokenRightTapped); |
|
1514 } |
|
1515 |
|
1516 void |
|
1517 MetroInput::RegisterInputEvents() |
|
1518 { |
|
1519 NS_ASSERTION(mWindow, "Must have a window to register for input events!"); |
|
1520 NS_ASSERTION(mGestureRecognizer, |
|
1521 "Must have a GestureRecognizer for input events!"); |
|
1522 // Register for edge swipe |
|
1523 WRL::ComPtr<UI::Input::IEdgeGestureStatics> edgeStatics; |
|
1524 Foundation::GetActivationFactory( |
|
1525 WRL::Wrappers::HStringReference( |
|
1526 RuntimeClass_Windows_UI_Input_EdgeGesture) |
|
1527 .Get(), |
|
1528 edgeStatics.GetAddressOf()); |
|
1529 WRL::ComPtr<UI::Input::IEdgeGesture> edge; |
|
1530 edgeStatics->GetForCurrentView(edge.GetAddressOf()); |
|
1531 |
|
1532 edge->add_Starting( |
|
1533 WRL::Callback<EdgeGestureHandler>( |
|
1534 this, |
|
1535 &MetroInput::OnEdgeGestureStarted).Get(), |
|
1536 &mTokenEdgeStarted); |
|
1537 |
|
1538 edge->add_Canceled( |
|
1539 WRL::Callback<EdgeGestureHandler>( |
|
1540 this, |
|
1541 &MetroInput::OnEdgeGestureCanceled).Get(), |
|
1542 &mTokenEdgeCanceled); |
|
1543 |
|
1544 edge->add_Completed( |
|
1545 WRL::Callback<EdgeGestureHandler>( |
|
1546 this, |
|
1547 &MetroInput::OnEdgeGestureCompleted).Get(), |
|
1548 &mTokenEdgeCompleted); |
|
1549 |
|
1550 // Set up our Gesture Recognizer to raise events for the gestures we |
|
1551 // care about |
|
1552 mGestureRecognizer->put_GestureSettings( |
|
1553 UI::Input::GestureSettings::GestureSettings_Tap |
|
1554 | UI::Input::GestureSettings::GestureSettings_DoubleTap |
|
1555 | UI::Input::GestureSettings::GestureSettings_RightTap |
|
1556 | UI::Input::GestureSettings::GestureSettings_Hold |
|
1557 | UI::Input::GestureSettings::GestureSettings_ManipulationTranslateX |
|
1558 | UI::Input::GestureSettings::GestureSettings_ManipulationTranslateY); |
|
1559 |
|
1560 // Register for the pointer events on our Window |
|
1561 mWindow->add_PointerPressed( |
|
1562 WRL::Callback<PointerEventHandler>( |
|
1563 this, |
|
1564 &MetroInput::OnPointerPressed).Get(), |
|
1565 &mTokenPointerPressed); |
|
1566 |
|
1567 mWindow->add_PointerReleased( |
|
1568 WRL::Callback<PointerEventHandler>( |
|
1569 this, |
|
1570 &MetroInput::OnPointerReleased).Get(), |
|
1571 &mTokenPointerReleased); |
|
1572 |
|
1573 mWindow->add_PointerMoved( |
|
1574 WRL::Callback<PointerEventHandler>( |
|
1575 this, |
|
1576 &MetroInput::OnPointerMoved).Get(), |
|
1577 &mTokenPointerMoved); |
|
1578 |
|
1579 mWindow->add_PointerEntered( |
|
1580 WRL::Callback<PointerEventHandler>( |
|
1581 this, |
|
1582 &MetroInput::OnPointerEntered).Get(), |
|
1583 &mTokenPointerEntered); |
|
1584 |
|
1585 mWindow->add_PointerExited( |
|
1586 WRL::Callback<PointerEventHandler>( |
|
1587 this, |
|
1588 &MetroInput::OnPointerExited).Get(), |
|
1589 &mTokenPointerExited); |
|
1590 |
|
1591 // Register for the events raised by our Gesture Recognizer |
|
1592 mGestureRecognizer->add_Tapped( |
|
1593 WRL::Callback<TappedEventHandler>( |
|
1594 this, |
|
1595 &MetroInput::OnTapped).Get(), |
|
1596 &mTokenTapped); |
|
1597 |
|
1598 mGestureRecognizer->add_RightTapped( |
|
1599 WRL::Callback<RightTappedEventHandler>( |
|
1600 this, |
|
1601 &MetroInput::OnRightTapped).Get(), |
|
1602 &mTokenRightTapped); |
|
1603 |
|
1604 mGestureRecognizer->add_ManipulationCompleted( |
|
1605 WRL::Callback<ManipulationCompletedEventHandler>( |
|
1606 this, |
|
1607 &MetroInput::OnManipulationCompleted).Get(), |
|
1608 &mTokenManipulationCompleted); |
|
1609 } |
|
1610 |
|
1611 } } } |