Ok gotcha. If your ultimate goal is to just get the same behavior as ArcRotateCamera
in Babylon React Native, then the best solution would just be ArcRotateCamera
properly functioning in Babylon React Native. For that, @PolygonalSun should have the latest info.
If you have a more broad goal of recognizing touch gestures (pan, pinch, twist, etc.) and doing some arbitrary behavior (such as manipulating a model), then I think something like what you have done above will be required. For that I would recommend using an existing JavaScript gesture recognition library. Ideally such a library would exist that is not tied to DOM APIs (where you could just call functions to communicate pointer state changes), but unfortunately I’m not aware of any existing libraries work this way. The next best bet would be to convert DeviceSourceManager
events into DOM events and feed those into an existing JavaScript gesture recognition library. This is the route that I have taken for one of the projects I work on, and I’m happy to share my code as an example:
import {
DeviceSourceManager,
DeviceSource,
DeviceType,
PointerInput
} from '@babylonjs/core';
import {
useState,
useRef
} from 'react';
import {
useEffectAsync
} from '../../utility/AsyncEffect';
import {
AsyncLock
} from '../../utility/AsyncLock';
import {
CancellationToken
} from "../../utility/CancellationToken";
declare const global: any;
// This stubs out the minimal surface area of the global document object to make Hammer.js functional.
const documentStub: Partial<Document> = {
createElement: (): HTMLElement => {
const style: Partial<CSSStyleDeclaration> = {
touchAction: "",
};
const element: Partial<HTMLElement> = {
style: style as CSSStyleDeclaration,
};
return element as HTMLElement;
}
};
// This class fulfills the contract of EventTarget which is the part of HTMLElement that Hammer.js actually cares about.
// It lets us route our own input through this contract so Hammer.js can receive raw input and translate it to gestures.
class SyntheticEventTarget implements EventTarget {
private readonly handlerMap = new Map<string, Set<EventListenerOrEventListenerObject>>();
// Hammer.js uses this to allow movement outside of an HTML element to continue contributing to a gesture.
// For our purpose, we can simply treat this EventTarget as its own parent window.
public get parentWindow(): any {
return this;
}
public dispatchEvent(event: Event): boolean {
const handlers = this.handlerMap.get(event.type);
if (handlers) {
handlers.forEach(handler => {
if ('handleEvent' in handler) {
handler.handleEvent(event);
} else {
handler(event);
}
});
}
return true;
}
public addEventListener(eventType: string, listener: EventListenerOrEventListenerObject): void {
let handlers = this.handlerMap.get(eventType);
if (!handlers) {
handlers = new Set<EventListenerOrEventListenerObject>();
this.handlerMap.set(eventType, handlers);
}
handlers.add(listener);
}
public removeEventListener(eventType: string, listener: EventListenerOrEventListenerObject): void {
const handlers = this.handlerMap.get(eventType);
if (handlers?.delete(listener) && handlers.size === 0) {
this.handlerMap.delete(eventType);
}
}
}
export enum Gesture {
tap,
press,
pan,
twist,
pinch,
}
export enum GestureStage {
start,
continue,
end,
}
export type GestureEvent<T extends Gesture> =
T extends Gesture.tap ? { gesture: T, x: number, y: number } :
T extends Gesture.press ? { gesture: T, stage: GestureStage.start | GestureStage.end, x: number, y: number } :
T extends Gesture.pan ? { gesture: T, stage: GestureStage, x: number, y: number, xDelta: number, yDelta: number } :
T extends Gesture.twist ? { gesture: T, stage: GestureStage, rotation: number, rotationDelta: number } :
T extends Gesture.pinch ? { gesture: T, stage: GestureStage, scale: number, scaleDelta: number } :
never;
export type GestureEventHandler = (gestureEvent: GestureEvent<Gesture>) => void;
export function useGestureRecognition(
deviceSourceManager: DeviceSourceManager | undefined,
onGestureRecognized: GestureEventHandler | undefined,
): boolean {
const asyncLock = useRef(new AsyncLock()).current;
const [isEnabled, setIsEnabled] = useState(false);
useEffectAsync(async (cancellationToken: CancellationToken) => {
if (deviceSourceManager && onGestureRecognized) {
// Hammer.js executes code at import time that expects to find the global document object.
// To deal with this, define a stubbed out global document, then dynamically import Hammer.js,
// then remove the stubbed out global document (as it would otherwise affect other libs, like Babylon).
global.document = documentStub;
const { TouchInput, Tap, Press, Pan, Pinch, Rotate } = await import('hammerjs');
global.document = undefined;
if (!cancellationToken.cancelled) {
const eventTarget: EventTarget = new SyntheticEventTarget();
// Configure the gestures we want to recognize.
const recognizers: RecognizerTuple[] = [
[Tap, { time: 100 }], // If the time between touch down and touch up is less than 100ms, consider it a tap.
[Press, { time: 100 }], // If a touch is down for more than 100ms, consider it a press.
[Pan],
[Rotate, {}, ['pan']], // Rotate is allowed to happen at the same time as pan.
[Pinch, {}, ['rotate', 'pan']], // Pinch is allowed to happen at the same time as rotate and pan.
];
// Instantiate Hammer, passing it the "synthetic" EventTarget as an HTMLElement, and configure it for touch input and with the recognizers defined above.
const hammer = new Hammer(eventTarget as unknown as HTMLElement, { inputClass: TouchInput, recognizers: recognizers });
// Tap handler
{
hammer.on("tap", (gestureEvent: HammerInput) => {
onGestureRecognized({ gesture: Gesture.tap, x: gestureEvent.center.x, y: gestureEvent.center.y });
});
}
// Press handler
{
hammer.on("press pressup", (gestureEvent: HammerInput) => {
const stage = gestureEvent.type === "press" ? GestureStage.start : GestureStage.end;
onGestureRecognized({ gesture: Gesture.press, stage: stage, x: gestureEvent.center.x, y: gestureEvent.center.y });
});
}
// Pan handler
{
let lastPan = { x: 0, y: 0 };
hammer.on("panstart panmove panend", (gestureEvent: HammerInput) => {
const stage =
gestureEvent.type === "panstart" ? GestureStage.start :
gestureEvent.type === "panend" ? GestureStage.end :
GestureStage.continue;
if (stage === GestureStage.start) {
lastPan = gestureEvent.center;
}
onGestureRecognized({ gesture: Gesture.pan, stage: stage, x: gestureEvent.center.x, y: gestureEvent.center.y, xDelta: gestureEvent.center.x - lastPan.x, yDelta: gestureEvent.center.y - lastPan.y });
lastPan = gestureEvent.center;
});
}
// Rotate handler
{
let firstRotate = 0;
let lastRotate = 0;
hammer.on("rotatestart rotatemove rotateend", (gestureEvent: HammerInput) => {
const stage =
gestureEvent.type === "rotatestart" ? GestureStage.start :
gestureEvent.type === "rotateend" ? GestureStage.end :
GestureStage.continue;
if (stage === GestureStage.start) {
firstRotate = lastRotate = gestureEvent.rotation;
}
onGestureRecognized({ gesture: Gesture.twist, stage: stage, rotation: gestureEvent.rotation - firstRotate, rotationDelta: gestureEvent.rotation - lastRotate });
lastRotate = gestureEvent.rotation;
});
}
// Pinch handler
{
let lastScale = 0;
hammer.on("pinchstart pinchmove pinchend", (gestureEvent: HammerInput) => {
const stage =
gestureEvent.type === "pinchstart" ? GestureStage.start :
gestureEvent.type === "pinchend" ? GestureStage.end :
GestureStage.continue;
if (stage === GestureStage.start) {
lastScale = gestureEvent.scale;
}
onGestureRecognized({ gesture: Gesture.pinch, stage: stage, scale: gestureEvent.scale, scaleDelta: gestureEvent.scale / lastScale });
lastScale = gestureEvent.scale;
});
}
const afterDeviceConnectedObserver = deviceSourceManager.onDeviceConnectedObservable.add(deviceEventData => {
if (deviceEventData !== undefined && deviceEventData.deviceType === DeviceType.Touch) {
const changedTouchDeviceSource: DeviceSource<DeviceType.Touch> = deviceSourceManager.getDeviceSource(deviceEventData.deviceType, deviceEventData.deviceSlot)!;
changedTouchDeviceSource.onInputChangedObservable.add(inputEventData => {
// 'Touch' is the contract for DOM touches.
const touches: Array<Partial<Touch>> = [];
const changedTouches: Array<Partial<Touch>> = [];
// We need to report all active touches, so enumerate all touch device sources.
for (const touchDeviceSource of deviceSourceManager.getDeviceSources(DeviceType.Touch)) {
const touch: Partial<Touch> = {
identifier: touchDeviceSource.deviceSlot,
clientX: touchDeviceSource.getInput(PointerInput.Horizontal),
clientY: touchDeviceSource.getInput(PointerInput.Vertical),
target: eventTarget,
};
// Changed touches should only include the source of the current touch event.
if (touchDeviceSource === changedTouchDeviceSource) {
changedTouches.push(touch);
}
// Touches should include all touches, including the source of the current touch event.
touches.push(touch);
}
const event: Partial<TouchEvent> = {
type: inputEventData.inputIndex === PointerInput.LeftClick ? (inputEventData.currentState === 0 ? "touchend" : "touchstart") : "touchmove",
touches: touches as unknown as TouchList,
changedTouches: changedTouches as unknown as TouchList,
};
eventTarget.dispatchEvent(event as Event);
});
}
});
setIsEnabled(true);
return (isMounted: boolean) => {
hammer.destroy();
deviceSourceManager.onDeviceConnectedObservable.remove(afterDeviceConnectedObserver);
if (isMounted) {
setIsEnabled(false);
}
};
}
}
return undefined;
}, asyncLock, [deviceSourceManager, onGestureRecognized]);
return isEnabled;
}
Some notes on this:
- It’s a custom React hook, and you just pass in a
DeviceSourceManager
and a gesture handler callback. - It uses some of our other constructs for dealing with asynchrony, but you could simplify it by replacing the
await import('hammerjs')
with arequire('hammerjs')
and just using a regular synchronoususeEffect
.