Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_ui/src/picking_backend.rs
6598 views
1
//! A picking backend for UI nodes.
2
//!
3
//! # Usage
4
//!
5
//! This backend does not require markers on cameras or entities to function. It will look for any
6
//! pointers using the same render target as the UI camera, and run hit tests on the UI node tree.
7
//!
8
//! ## Important Note
9
//!
10
//! This backend completely ignores [`FocusPolicy`](crate::FocusPolicy). The design of `bevy_ui`'s
11
//! focus systems and the picking plugin are not compatible. Instead, use the optional [`Pickable`] component
12
//! to override how an entity responds to picking focus. Nodes without the [`Pickable`] component
13
//! will still trigger events and block items below it from being hovered.
14
//!
15
//! ## Implementation Notes
16
//!
17
//! - `bevy_ui` can render on any camera with a flag, it is special, and is not tied to a particular
18
//! camera.
19
//! - To correctly sort picks, the order of `bevy_ui` is set to be the camera order plus 0.5.
20
//! - The `position` reported in `HitData` is normalized relative to the node, with
21
//! `(-0.5, -0.5, 0.)` at the top left and `(0.5, 0.5, 0.)` in the bottom right. Coordinates are
22
//! relative to the entire node, not just the visible region. This backend does not provide a `normal`.
23
24
#![deny(missing_docs)]
25
26
use crate::{clip_check_recursive, prelude::*, ui_transform::UiGlobalTransform, UiStack};
27
use bevy_app::prelude::*;
28
use bevy_camera::{visibility::InheritedVisibility, Camera};
29
use bevy_ecs::{prelude::*, query::QueryData};
30
use bevy_math::Vec2;
31
use bevy_platform::collections::HashMap;
32
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
33
use bevy_window::PrimaryWindow;
34
35
use bevy_picking::backend::prelude::*;
36
37
/// An optional component that marks cameras that should be used in the [`UiPickingPlugin`].
38
///
39
/// Only needed if [`UiPickingSettings::require_markers`] is set to `true`, and ignored
40
/// otherwise.
41
#[derive(Debug, Clone, Default, Component, Reflect)]
42
#[reflect(Debug, Default, Component)]
43
pub struct UiPickingCamera;
44
45
/// Runtime settings for the [`UiPickingPlugin`].
46
#[derive(Resource, Reflect)]
47
#[reflect(Resource, Default)]
48
pub struct UiPickingSettings {
49
/// When set to `true` UI picking will only consider cameras marked with
50
/// [`UiPickingCamera`] and entities marked with [`Pickable`]. `false` by default.
51
///
52
/// This setting is provided to give you fine-grained control over which cameras and entities
53
/// should be used by the UI picking backend at runtime.
54
pub require_markers: bool,
55
}
56
57
#[expect(
58
clippy::allow_attributes,
59
reason = "clippy::derivable_impls is not always linted"
60
)]
61
#[allow(
62
clippy::derivable_impls,
63
reason = "Known false positive with clippy: <https://github.com/rust-lang/rust-clippy/issues/13160>"
64
)]
65
impl Default for UiPickingSettings {
66
fn default() -> Self {
67
Self {
68
require_markers: false,
69
}
70
}
71
}
72
73
/// A plugin that adds picking support for UI nodes.
74
///
75
/// This is included by default in [`UiPlugin`](crate::UiPlugin).
76
#[derive(Clone)]
77
pub struct UiPickingPlugin;
78
impl Plugin for UiPickingPlugin {
79
fn build(&self, app: &mut App) {
80
app.init_resource::<UiPickingSettings>()
81
.add_systems(PreUpdate, ui_picking.in_set(PickingSystems::Backend));
82
}
83
}
84
85
/// Main query from bevy's `ui_focus_system`
86
#[derive(QueryData)]
87
#[query_data(mutable)]
88
pub struct NodeQuery {
89
entity: Entity,
90
node: &'static ComputedNode,
91
transform: &'static UiGlobalTransform,
92
pickable: Option<&'static Pickable>,
93
inherited_visibility: Option<&'static InheritedVisibility>,
94
target_camera: &'static ComputedUiTargetCamera,
95
}
96
97
/// Computes the UI node entities under each pointer.
98
///
99
/// Bevy's [`UiStack`] orders all nodes in the order they will be rendered, which is the same order
100
/// we need for determining picking.
101
pub fn ui_picking(
102
pointers: Query<(&PointerId, &PointerLocation)>,
103
camera_query: Query<(Entity, &Camera, Has<UiPickingCamera>)>,
104
primary_window: Query<Entity, With<PrimaryWindow>>,
105
settings: Res<UiPickingSettings>,
106
ui_stack: Res<UiStack>,
107
node_query: Query<NodeQuery>,
108
mut output: EventWriter<PointerHits>,
109
clipping_query: Query<(&ComputedNode, &UiGlobalTransform, &Node)>,
110
child_of_query: Query<&ChildOf, Without<OverrideClip>>,
111
) {
112
// Map from each camera to its active pointers and their positions in viewport space
113
let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::default();
114
115
for (pointer_id, pointer_location) in
116
pointers.iter().filter_map(|(pointer, pointer_location)| {
117
Some(*pointer).zip(pointer_location.location().cloned())
118
})
119
{
120
// This pointer is associated with a render target, which could be used by multiple
121
// cameras. We want to ensure we return all cameras with a matching target.
122
for camera in camera_query
123
.iter()
124
.filter(|(_, _, cam_can_pick)| !settings.require_markers || *cam_can_pick)
125
.map(|(entity, camera, _)| {
126
(
127
entity,
128
camera.target.normalize(primary_window.single().ok()),
129
)
130
})
131
.filter_map(|(entity, target)| Some(entity).zip(target))
132
.filter(|(_entity, target)| target == &pointer_location.target)
133
.map(|(cam_entity, _target)| cam_entity)
134
{
135
let Ok((_, camera_data, _)) = camera_query.get(camera) else {
136
continue;
137
};
138
let mut pointer_pos =
139
pointer_location.position * camera_data.target_scaling_factor().unwrap_or(1.);
140
if let Some(viewport) = camera_data.physical_viewport_rect() {
141
if !viewport.as_rect().contains(pointer_pos) {
142
// The pointer is outside the viewport, skip it
143
continue;
144
}
145
pointer_pos -= viewport.min.as_vec2();
146
}
147
pointer_pos_by_camera
148
.entry(camera)
149
.or_default()
150
.insert(pointer_id, pointer_pos);
151
}
152
}
153
154
// The list of node entities hovered for each (camera, pointer) combo
155
let mut hit_nodes = HashMap::<(Entity, PointerId), Vec<(Entity, Vec2)>>::default();
156
157
// prepare an iterator that contains all the nodes that have the cursor in their rect,
158
// from the top node to the bottom one. this will also reset the interaction to `None`
159
// for all nodes encountered that are no longer hovered.
160
for node_entity in ui_stack
161
.uinodes
162
.iter()
163
// reverse the iterator to traverse the tree from closest nodes to furthest
164
.rev()
165
{
166
let Ok(node) = node_query.get(*node_entity) else {
167
continue;
168
};
169
170
if settings.require_markers && node.pickable.is_none() {
171
continue;
172
}
173
174
// Nodes that are not rendered should not be interactable
175
if node
176
.inherited_visibility
177
.map(|inherited_visibility| inherited_visibility.get())
178
!= Some(true)
179
{
180
continue;
181
}
182
let Some(camera_entity) = node.target_camera.get() else {
183
continue;
184
};
185
186
// Nodes with Display::None have a (0., 0.) logical rect and can be ignored
187
if node.node.size() == Vec2::ZERO {
188
continue;
189
}
190
191
let pointers_on_this_cam = pointer_pos_by_camera.get(&camera_entity);
192
193
// Find the normalized cursor position relative to the node.
194
// (±0., 0.) is the center with the corners at points (±0.5, ±0.5).
195
// Coordinates are relative to the entire node, not just the visible region.
196
for (pointer_id, cursor_position) in pointers_on_this_cam.iter().flat_map(|h| h.iter()) {
197
if node.node.contains_point(*node.transform, *cursor_position)
198
&& clip_check_recursive(
199
*cursor_position,
200
*node_entity,
201
&clipping_query,
202
&child_of_query,
203
)
204
{
205
hit_nodes
206
.entry((camera_entity, *pointer_id))
207
.or_default()
208
.push((
209
*node_entity,
210
node.transform.inverse().transform_point2(*cursor_position)
211
/ node.node.size(),
212
));
213
}
214
}
215
}
216
217
for ((camera, pointer), hovered) in hit_nodes.iter() {
218
// As soon as a node with a `Block` focus policy is detected, the iteration will stop on it
219
// because it "captures" the interaction.
220
let mut picks = Vec::new();
221
let mut depth = 0.0;
222
223
for (hovered_node, position) in hovered {
224
let node = node_query.get(*hovered_node).unwrap();
225
226
let Some(camera_entity) = node.target_camera.get() else {
227
continue;
228
};
229
230
picks.push((
231
node.entity,
232
HitData::new(camera_entity, depth, Some(position.extend(0.0)), None),
233
));
234
235
if let Some(pickable) = node.pickable {
236
// If an entity has a `Pickable` component, we will use that as the source of truth.
237
if pickable.should_block_lower {
238
break;
239
}
240
} else {
241
// If the `Pickable` component doesn't exist, default behavior is to block.
242
break;
243
}
244
245
depth += 0.00001; // keep depth near 0 for precision
246
}
247
248
let order = camera_query
249
.get(*camera)
250
.map(|(_, cam, _)| cam.order)
251
.unwrap_or_default() as f32
252
+ 0.5; // bevy ui can run on any camera, it's a special case
253
254
output.write(PointerHits::new(*pointer, picks, order));
255
}
256
}
257
258