It is not possible by default. There is a check in alloy_browser_host_impl.cc
- Code: Select all
void AlloyBrowserHostImpl::SendTouchEvent(const CefTouchEvent& event) {
if (!IsWindowless()) {
DCHECK(false) << "Window rendering is not disabled";
return;
}
...
Removing this check makes that empty void CefBrowserPlatformDelegateNativeAura::SendTouchEvent(const CefTouchEvent& event){} is called.
I tried to implement this function to support touches and gestures in this way but it still does not work as real.
- Code: Select all
void CefBrowserPlatformDelegateNativeAura::SendTouchEvent( const CefTouchEvent& event) {
auto view = GetHostView();
gfx::Point location(event.x, event.y);
base::TimeTicks time_stamp = GetEventTimeStamp();
ui::PointerDetails pointer_details{ ui::EventPointerType::kTouch, event.id };//, event.radius_x, event.radius_y, event.pressure };
ui::EventType event_type;
if ( event.type == CEF_TET_PRESSED)
event_type = ui::EventType::kTouchPressed;
else if ( event.type == CEF_TET_MOVED)
event_type = ui::EventType::kTouchMoved;
else
event_type = ui::EventType::kTouchReleased;
ui::TouchEvent touch_event{event_type, location, time_stamp, pointer_details};
// gestures
ui::GestureRecognizer* gestureRecognizer = aura::Env::GetInstance()->gesture_recognizer();
if (!gestureRecognizer)
return;
if (!window_widget_)
return;
aura::Window* window = window_widget_->GetNativeWindow();
if (!window)
return;
if ( !gestureRecognizer->ProcessTouchEventPreDispatch( &touch_event, window ) )
return;
auto gestures = gestureRecognizer->AckTouchEvent( touch_event.unique_event_id(), ui::ER_UNHANDLED, false, window );
for(auto & g : gestures)
{
view->OnGestureEvent(g.get());
}
view->OnTouchEvent( &touch_event );
}
Is any better way to forward touch events?
BTW
In dev mode there is a simulation mode where mouse can simulate touch and it works great. I wish my implementation works so good as this.
