Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_ui/src/picking_backend.rs
9366 views
1
//! A picking backend for UI nodes.
2
//!
3
//! # Usage
4
//!
5
//! This backend does not require markers on cameras or entities to function. It will look for any
6
//! pointers using the same render target as the UI camera, and run hit tests on the UI node tree.
7
//!
8
//! ## Important Note
9
//!
10
//! This backend completely ignores [`FocusPolicy`](crate::FocusPolicy). The design of `bevy_ui`'s
11
//! focus systems and the picking plugin are not compatible. Instead, use the optional [`Pickable`] component
12
//! to override how an entity responds to picking focus. Nodes without the [`Pickable`] component
13
//! will still trigger events and block items below it from being hovered.
14
//!
15
//! ## Implementation Notes
16
//!
17
//! - `bevy_ui` can render on any camera with a flag, it is special, and is not tied to a particular
18
//! camera.
19
//! - To correctly sort picks, the order of `bevy_ui` is set to be the camera order plus 0.5.
20
//! - The `position` reported in `HitData` is normalized relative to the node, with
21
//! `(-0.5, -0.5, 0.)` at the top left and `(0.5, 0.5, 0.)` in the bottom right. Coordinates are
22
//! relative to the entire node, not just the visible region. This backend does not provide a `normal`.
23
24
use crate::{clip_check_recursive, prelude::*, ui_transform::UiGlobalTransform, UiStack};
25
use bevy_app::prelude::*;
26
use bevy_camera::{visibility::InheritedVisibility, Camera, RenderTarget};
27
use bevy_ecs::{prelude::*, query::QueryData};
28
use bevy_math::Vec2;
29
use bevy_platform::collections::HashMap;
30
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
31
use bevy_text::{ComputedTextBlock, TextLayoutInfo};
32
use bevy_window::PrimaryWindow;
33
34
use bevy_picking::backend::prelude::*;
35
36
/// An optional component that marks cameras that should be used in the [`UiPickingPlugin`].
37
///
38
/// Only needed if [`UiPickingSettings::require_markers`] is set to `true`, and ignored
39
/// otherwise.
40
#[derive(Debug, Clone, Default, Component, Reflect)]
41
#[reflect(Debug, Default, Component)]
42
pub struct UiPickingCamera;
43
44
/// Runtime settings for the [`UiPickingPlugin`].
45
#[derive(Resource, Reflect)]
46
#[reflect(Resource, Default)]
47
pub struct UiPickingSettings {
48
/// When set to `true` UI picking will only consider cameras marked with
49
/// [`UiPickingCamera`] and entities marked with [`Pickable`]. `false` by default.
50
///
51
/// This setting is provided to give you fine-grained control over which cameras and entities
52
/// should be used by the UI picking backend at runtime.
53
pub require_markers: bool,
54
}
55
56
#[expect(
57
clippy::allow_attributes,
58
reason = "clippy::derivable_impls is not always linted"
59
)]
60
#[allow(
61
clippy::derivable_impls,
62
reason = "Known false positive with clippy: <https://github.com/rust-lang/rust-clippy/issues/13160>"
63
)]
64
impl Default for UiPickingSettings {
65
fn default() -> Self {
66
Self {
67
require_markers: false,
68
}
69
}
70
}
71
72
/// A plugin that adds picking support for UI nodes.
73
///
74
/// This is included by default in [`UiPlugin`](crate::UiPlugin).
75
#[derive(Clone)]
76
pub struct UiPickingPlugin;
77
impl Plugin for UiPickingPlugin {
78
fn build(&self, app: &mut App) {
79
app.init_resource::<UiPickingSettings>()
80
.add_systems(PreUpdate, ui_picking.in_set(PickingSystems::Backend));
81
}
82
}
83
84
/// Main query from bevy's `ui_focus_system`
85
#[derive(QueryData)]
86
#[query_data(mutable)]
87
pub struct NodeQuery {
88
entity: Entity,
89
node: &'static ComputedNode,
90
transform: &'static UiGlobalTransform,
91
pickable: Option<&'static Pickable>,
92
inherited_visibility: Option<&'static InheritedVisibility>,
93
target_camera: &'static ComputedUiTargetCamera,
94
text_node: Option<(&'static TextLayoutInfo, &'static ComputedTextBlock)>,
95
}
96
97
/// Computes the UI node entities under each pointer.
98
///
99
/// Bevy's [`UiStack`] orders all nodes in the order they will be rendered, which is the same order
100
/// we need for determining picking.
101
pub fn ui_picking(
102
pointers: Query<(&PointerId, &PointerLocation)>,
103
camera_query: Query<(Entity, &Camera, &RenderTarget, Has<UiPickingCamera>)>,
104
primary_window: Query<Entity, With<PrimaryWindow>>,
105
settings: Res<UiPickingSettings>,
106
ui_stack: Res<UiStack>,
107
node_query: Query<NodeQuery>,
108
mut output: MessageWriter<PointerHits>,
109
clipping_query: Query<(&ComputedNode, &UiGlobalTransform, &Node)>,
110
child_of_query: Query<&ChildOf, Without<OverrideClip>>,
111
pickable_query: Query<&Pickable>,
112
) {
113
// Map from each camera to its active pointers and their positions in viewport space
114
let mut pointer_pos_by_camera = HashMap::<Entity, HashMap<PointerId, Vec2>>::default();
115
116
for (pointer_id, pointer_location) in
117
pointers.iter().filter_map(|(pointer, pointer_location)| {
118
Some(*pointer).zip(pointer_location.location().cloned())
119
})
120
{
121
// This pointer is associated with a render target, which could be used by multiple
122
// cameras. We want to ensure we return all cameras with a matching target.
123
for (entity, camera, _, _) in
124
camera_query
125
.iter()
126
.filter(|(_, _, render_target, cam_can_pick)| {
127
(!settings.require_markers || *cam_can_pick)
128
&& render_target
129
.normalize(primary_window.single().ok())
130
.is_some_and(|target| target == pointer_location.target)
131
})
132
{
133
let mut pointer_pos =
134
pointer_location.position * camera.target_scaling_factor().unwrap_or(1.);
135
if let Some(viewport) = camera.physical_viewport_rect() {
136
if !viewport.as_rect().contains(pointer_pos) {
137
// The pointer is outside the viewport, skip it
138
continue;
139
}
140
pointer_pos -= viewport.min.as_vec2();
141
}
142
pointer_pos_by_camera
143
.entry(entity)
144
.or_default()
145
.insert(pointer_id, pointer_pos);
146
}
147
}
148
149
// The list of node entities hovered for each (camera, pointer) combo
150
let mut hit_nodes =
151
HashMap::<(Entity, PointerId), Vec<(Entity, Entity, Option<Pickable>, Vec2)>>::default();
152
153
// prepare an iterator that contains all the nodes that have the cursor in their rect,
154
// from the top node to the bottom one. this will also reset the interaction to `None`
155
// for all nodes encountered that are no longer hovered.
156
// Reverse the iterator to traverse the tree from closest slice to furthest
157
for uinodes in ui_stack
158
.partition
159
.iter()
160
.rev()
161
.map(|range| &ui_stack.uinodes[range.clone()])
162
{
163
// Retrieve the first node and resolve its camera target.
164
// Only need to do this once per slice, as all the nodes in the same slice share the same camera.
165
let Ok(uinode) = node_query.get(uinodes[0]) else {
166
continue;
167
};
168
169
let Some(camera_entity) = uinode.target_camera.get() else {
170
continue;
171
};
172
173
let Some(pointers_on_this_cam) = pointer_pos_by_camera.get(&camera_entity) else {
174
continue;
175
};
176
177
// Reverse the iterator to traverse the tree from closest nodes to furthest
178
for node_entity in uinodes.iter().rev().cloned() {
179
let Ok(node) = node_query.get(node_entity) else {
180
continue;
181
};
182
183
// Nodes with Display::None have a (0., 0.) logical rect and can be ignored
184
if node.node.size() == Vec2::ZERO {
185
continue;
186
}
187
188
// Nodes that are not rendered should not be interactable
189
if node
190
.inherited_visibility
191
.map(|inherited_visibility| inherited_visibility.get())
192
!= Some(true)
193
{
194
continue;
195
}
196
197
// If this is a text node, need to do this check per section.
198
if node.text_node.is_none() && settings.require_markers && node.pickable.is_none() {
199
continue;
200
}
201
202
// Find the normalized cursor position relative to the node.
203
// (±0., 0.) is the center with the corners at points (±0.5, ±0.5).
204
// Coordinates are relative to the entire node, not just the visible region.
205
for (pointer_id, cursor_position) in pointers_on_this_cam.iter() {
206
if let Some((text_layout_info, text_block)) = node.text_node {
207
if let Some(text_entity) = pick_ui_text_section(
208
node.node,
209
node.transform,
210
*cursor_position,
211
text_layout_info,
212
text_block,
213
) && clip_check_recursive(
214
*cursor_position,
215
node_entity,
216
&clipping_query,
217
&child_of_query,
218
) {
219
if settings.require_markers && !pickable_query.contains(text_entity) {
220
continue;
221
}
222
223
hit_nodes
224
.entry((camera_entity, *pointer_id))
225
.or_default()
226
.push((
227
text_entity,
228
camera_entity,
229
node.pickable.cloned(),
230
node.transform.inverse().transform_point2(*cursor_position)
231
/ node.node.size(),
232
));
233
}
234
} else if node.node.contains_point(*node.transform, *cursor_position)
235
&& clip_check_recursive(
236
*cursor_position,
237
node_entity,
238
&clipping_query,
239
&child_of_query,
240
)
241
{
242
hit_nodes
243
.entry((camera_entity, *pointer_id))
244
.or_default()
245
.push((
246
node_entity,
247
camera_entity,
248
node.pickable.cloned(),
249
node.transform.inverse().transform_point2(*cursor_position)
250
/ node.node.size(),
251
));
252
}
253
}
254
}
255
}
256
257
for ((camera, pointer), hovered) in hit_nodes.iter() {
258
// As soon as a node with a `Block` focus policy is detected, the iteration will stop on it
259
// because it "captures" the interaction.
260
let mut picks = Vec::new();
261
let mut depth = 0.0;
262
263
for (hovered_node, camera_entity, pickable, position) in hovered {
264
picks.push((
265
*hovered_node,
266
HitData::new(*camera_entity, depth, Some(position.extend(0.0)), None),
267
));
268
269
if let Some(pickable) = pickable {
270
// If an entity has a `Pickable` component, we will use that as the source of truth.
271
if pickable.should_block_lower {
272
break;
273
}
274
} else {
275
// If the `Pickable` component doesn't exist, default behavior is to block.
276
break;
277
}
278
279
depth += 0.00001; // keep depth near 0 for precision
280
}
281
282
let order = camera_query
283
.get(*camera)
284
.map(|(_, cam, _, _)| cam.order)
285
.unwrap_or_default() as f32
286
+ 0.5; // bevy ui can run on any camera, it's a special case
287
288
output.write(PointerHits::new(*pointer, picks, order));
289
}
290
}
291
292
fn pick_ui_text_section(
293
uinode: &ComputedNode,
294
global_transform: &UiGlobalTransform,
295
point: Vec2,
296
text_layout_info: &TextLayoutInfo,
297
text_block: &ComputedTextBlock,
298
) -> Option<Entity> {
299
let local_point = global_transform
300
.try_inverse()
301
.map(|transform| transform.transform_point2(point) + 0.5 * uinode.size())?;
302
303
for run in text_layout_info.run_geometry.iter() {
304
if run.bounds.contains(local_point) {
305
return text_block.entities().get(run.span_index).map(|e| e.entity);
306
}
307
}
308
None
309
}
310
311