Bevy version: 0.16
Content
My game didn't received any touch input on Android, even though it was working just fine on desktop and iOS. But I was able to write a plugin that detects touch input in GameActivity and sends it to Bevy.
Using in Bevy
Add plugin:
...
app.add_plugins(bevy_android_input::AndroidInputPlugin);
...
Also, it will require adding android depencency and entry binding that will be used by GameActivity.
Android only dependency:
[target.'cfg(target_os = "android")'.dependencies]
jni = "0.21"
Function that will be used by java GameActivity
:
#[cfg(target_os = "android")]
#[no_mangle]
pub extern "system" fn Java_org_mycompany_idlerpg_MainActivity_setTouchState(
_env: jni::sys::JNIEnv,
_class: jni::objects::JClass,
id: jni::sys::jint,
x: jni::sys::jfloat,
y: jni::sys::jfloat,
state: jni::sys::jint,
) {
bevy_android_input::queue_input_update(id as i32, x as f32, y as f32, state as i32);
}
Notice name of the function, it should follow the pattern: Java_
+ ID of the application with _ instead of dots + _
+ activity class + _
+ method name. So, since I have org.mycompany.idlerpg
id with MainActivity
class and setTouchState
method, it should be named Java_org_mycompany_idlerpg_MainActivity_setTouchState
.
In the java MainActivity
class, you need to add the following code:
public class MainActivity extends GameActivity {
...
public native void setTouchState(int id, float x, float y, int state);
@Override
public boolean onTouchEvent(MotionEvent event) {
int action = event.getActionMasked();
int pointerIndex = event.getActionIndex();
int pointerId = event.getPointerId(pointerIndex);
// Yes, you can detect touch events here
float x = event.getX();
float y = event.getY();
switch (action) {
case MotionEvent.ACTION_DOWN:
case MotionEvent.ACTION_POINTER_DOWN:
setTouchState(pointerId, x,y, 0);
break;
case MotionEvent.ACTION_MOVE:
for (int i = 0; i < event.getPointerCount(); i++) {
int id = event.getPointerId(i);
float px = event.getX(i);
float py = event.getY(i);
setTouchState(id, px,py, 1);
}
break;
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_POINTER_UP:
setTouchState(pointerId, x,y, 2);
break;
}
return super.onTouchEvent(event);
}
...
}
Code
In this example there are external dependencies besides Bevy, here is the Cargo.toml
file of the plugin:
[package]
name = "bevy_android_input"
version = "0.1.0"
edition = "2021"
authors = ["Piotr Siuszko <siuszko@zoho.com>"]
license = "MIT OR Apache-2.0"
keywords = ["bevy", "android", "plugin"]
[dependencies]
once_cell = "1"
crossbeam = "0.8"
bevy = {version = "0.16", default_features = false, features = [
"bevy_ui","bevy_picking","bevy_window"]}
bevy_reflect = "0.16"
Plugin src/lib.rs
//! Android input handling plugin for Bevy applications.
//!
//! This module provides functionality to bridge Android touch input events
//! with Bevy's input system, handling coordinate transformations and camera
//! viewport adjustments.
use bevy::{
app::{First, Plugin, Startup, Update},
ecs::{
entity::{ContainsEntity, Entity},
event::EventWriter,
query::{Changed, With},
resource::Resource,
schedule::IntoScheduleConfigs,
system::{Query, Res, ResMut, Single},
},
input::touch::{TouchInput, TouchPhase},
math::Vec2,
prelude::ReflectResource,
reflect::Reflect,
render::camera::{Camera, NormalizedRenderTarget},
ui::IsDefaultUiCamera,
window::PrimaryWindow,
};
use crossbeam::queue::SegQueue;
use once_cell::sync::Lazy;
/// Thread-safe queue for storing Android touch input events.
/// This allows the Android native layer to push events that will be
/// processed by the Bevy systems.
static INPUT_QUEUE: Lazy<SegQueue<AndroidTouchData>> = Lazy::new(|| SegQueue::new());
/// Bevy plugin that integrates Android touch input handling.
///
/// This plugin sets up the necessary systems to:
/// - Track camera information for coordinate transformations
/// - Process queued Android touch events
/// - Convert them to Bevy input events
pub struct AndroidInputPlugin;
impl Plugin for AndroidInputPlugin {
fn build(&self, app: &mut bevy::app::App) {
app.init_resource::<AndroidCameraInformation>()
.register_type::<AndroidTouchData>()
.register_type::<AndroidCameraInformation>()
.add_systems(Startup, AndroidCameraInformation::read_info)
.add_systems(
Update,
AndroidCameraInformation::read_info
.run_if(AndroidCameraInformation::should_rebuild),
)
.add_systems(
First,
(AndroidCameraInformation::pass_android_input_events)
.in_set(bevy::ecs::event::EventUpdates),
);
}
}
/// Raw touch data received from the Android platform.
///
/// This struct represents a single touch event with its basic properties
/// before being converted to Bevy's touch input format.
#[derive(Debug, Clone, Reflect, Copy)]
pub struct AndroidTouchData {
/// Unique identifier for this touch point
id: i32,
/// Current phase of the touch (started, moved, ended, canceled)
phase: TouchPhase,
/// X coordinate
x: f32,
/// Y coordinate
y: f32,
}
impl Default for AndroidTouchData {
fn default() -> Self {
Self {
id: 0,
phase: TouchPhase::Canceled,
x: 0.0,
y: 0.0,
}
}
}
/// Resource containing camera information needed for input coordinate transformation.
///
/// This resource tracks the camera's viewport offset and scaling factor
/// to properly convert Android screen coordinates to Bevy world coordinates.
#[derive(Debug, Clone, Copy, Resource, Reflect)]
#[reflect(Resource)]
pub(crate) struct AndroidCameraInformation {
/// Viewport offset from the camera's physical viewport rectangle
pub offset: Vec2,
/// Scaling factor for input coordinates based on camera target scaling
pub input_multiplier: f32,
/// Reference to the primary window
pub window: Entity,
}
impl AndroidCameraInformation {
/// Determines if camera information should be rebuilt.
pub fn should_rebuild(camera: Query<(), (Changed<Camera>, With<IsDefaultUiCamera>)>) -> bool {
camera.iter().next().is_some()
}
/// Reads and updates camera information from the current camera and window state.
pub fn read_info(
mut res: ResMut<Self>,
camera: Option<Single<&Camera, With<IsDefaultUiCamera>>>,
window: Option<Single<Entity, With<PrimaryWindow>>>,
) {
let Some(camera) = camera else {
return;
};
let Some(window) = window else {
return;
};
let Some(NormalizedRenderTarget::Window(window_ref)) =
camera.target.normalize(Some(*window))
else {
res.offset = Vec2::ZERO;
res.input_multiplier = 1.0;
res.window = Entity::PLACEHOLDER;
return;
};
res.window = window_ref.entity();
let offset = camera.physical_viewport_rect().unwrap_or_default().min;
res.offset = Vec2::new(offset.x as f32, offset.y as f32);
res.input_multiplier = camera.target_scaling_factor().unwrap_or(1.0);
}
/// System that processes queued Android input events and converts them to Bevy events.
///
/// This system runs during the First schedule to ensure input events are available
/// for other systems in the same frame. It pops events from the INPUT_QUEUE and
/// sends them as both TouchInput and WindowEvent.
pub fn pass_android_input_events(
info: Res<Self>,
mut window_writer: EventWriter<bevy::window::WindowEvent>,
mut touch_writer: EventWriter<TouchInput>,
) {
while let Some(event) = INPUT_QUEUE.pop() {
let touch_input = info.to_bevy_input(event);
touch_writer.write(touch_input);
window_writer.write(touch_input.into());
}
}
/// Converts raw Android touch data into a Bevy TouchInput event.
pub(crate) fn to_bevy_input(&self, touch: AndroidTouchData) -> TouchInput {
TouchInput {
window: self.window.clone(),
phase: touch.phase,
position: self.adjust_pos(touch.x, touch.y),
id: touch.id as u64,
force: None,
}
}
/// Adjusts raw screen coordinates to account for camera viewport and scaling.
#[inline]
pub fn adjust_pos(&self, x: f32, y: f32) -> Vec2 {
Vec2::new(x, y) / self.input_multiplier + self.offset
}
}
impl Default for AndroidCameraInformation {
fn default() -> Self {
Self {
offset: Vec2::ZERO,
input_multiplier: 1.0,
window: Entity::PLACEHOLDER,
}
}
}
/// Entry point for Android native code to send touch input events.
///
/// This function is called from the Android native layer (typically JNI)
/// to queue touch events for processing by the Bevy input system.
///
/// # Arguments
/// * `id` - Unique identifier for the touch point
/// * `x` - X coordinate in screen space
/// * `y` - Y coordinate in screen space
/// * `state` - Touch state (0=Started, 1=Moved, 2=Ended, other=Canceled)
pub fn queue_input_update(id: i32, x: f32, y: f32, state: i32) {
let phase = match state {
0 => TouchPhase::Started,
1 => TouchPhase::Moved,
2 => TouchPhase::Ended,
_ => TouchPhase::Canceled,
};
INPUT_QUEUE.push(AndroidTouchData { phase, id, x, y });
}