Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_ui_render/src/lib.rs
9331 views
1
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
2
#![cfg_attr(docsrs, feature(doc_cfg))]
3
#![doc(
4
html_logo_url = "https://bevyengine.org/assets/icon.png",
5
html_favicon_url = "https://bevyengine.org/assets/icon.png"
6
)]
7
8
//! Provides rendering functionality for `bevy_ui`.
9
10
pub mod box_shadow;
11
mod color_space;
12
mod gradient;
13
mod pipeline;
14
mod render_pass;
15
pub mod ui_material;
16
mod ui_material_pipeline;
17
pub mod ui_texture_slice_pipeline;
18
19
#[cfg(feature = "bevy_ui_debug")]
20
mod debug_overlay;
21
22
use bevy_camera::visibility::InheritedVisibility;
23
use bevy_camera::{Camera, Camera2d, Camera3d, Hdr, RenderTarget};
24
use bevy_reflect::prelude::ReflectDefault;
25
use bevy_reflect::Reflect;
26
use bevy_shader::load_shader_library;
27
use bevy_sprite_render::SpriteAssetEvents;
28
use bevy_ui::widget::{ImageNode, TextShadow, ViewportNode};
29
use bevy_ui::{
30
BackgroundColor, BorderColor, CalculatedClip, ComputedNode, ComputedUiTargetCamera, Display,
31
Node, Outline, ResolvedBorderRadius, UiGlobalTransform,
32
};
33
34
use bevy_app::prelude::*;
35
use bevy_asset::{AssetEvent, AssetId, Assets};
36
use bevy_color::{Alpha, ColorToComponents, LinearRgba};
37
use bevy_core_pipeline::schedule::{Core2d, Core2dSystems, Core3d, Core3dSystems};
38
use bevy_core_pipeline::upscaling::upscaling;
39
use bevy_ecs::prelude::*;
40
use bevy_ecs::schedule::IntoScheduleConfigs;
41
use bevy_ecs::system::SystemParam;
42
use bevy_image::{prelude::*, TRANSPARENT_IMAGE_HANDLE};
43
use bevy_math::{Affine2, FloatOrd, Mat4, Rect, UVec4, Vec2};
44
use bevy_render::{
45
render_asset::RenderAssets,
46
render_phase::{
47
sort_phase_system, AddRenderCommand, DrawFunctions, PhaseItem, PhaseItemExtraIndex,
48
ViewSortedRenderPhases,
49
},
50
render_resource::*,
51
renderer::{RenderDevice, RenderQueue},
52
sync_world::{MainEntity, RenderEntity, TemporaryRenderEntity},
53
texture::GpuImage,
54
view::{ExtractedView, RetainedViewEntity, ViewUniforms},
55
Extract, ExtractSchedule, Render, RenderApp, RenderStartup, RenderSystems,
56
};
57
use bevy_sprite::BorderRect;
58
#[cfg(feature = "bevy_ui_debug")]
59
pub use debug_overlay::UiDebugOptions;
60
61
use color_space::ColorSpacePlugin;
62
use gradient::GradientPlugin;
63
64
use bevy_platform::collections::{HashMap, HashSet};
65
use bevy_text::{
66
ComputedTextBlock, PositionedGlyph, Strikethrough, StrikethroughColor, TextBackgroundColor,
67
TextColor, TextLayoutInfo, Underline, UnderlineColor,
68
};
69
use bevy_transform::components::GlobalTransform;
70
use box_shadow::BoxShadowPlugin;
71
use bytemuck::{Pod, Zeroable};
72
use core::ops::Range;
73
74
pub use pipeline::*;
75
pub use render_pass::*;
76
pub use ui_material_pipeline::*;
77
use ui_texture_slice_pipeline::UiTextureSlicerPlugin;
78
79
pub mod prelude {
80
#[cfg(feature = "bevy_ui_debug")]
81
pub use crate::debug_overlay::UiDebugOptions;
82
83
pub use crate::{
84
ui_material::*, ui_material_pipeline::UiMaterialPlugin, BoxShadowSamples, UiAntiAlias,
85
};
86
}
87
88
/// Local Z offsets of "extracted nodes" for a given entity. These exist to allow rendering multiple "extracted nodes"
89
/// for a given source entity (ex: render both a background color _and_ a custom material for a given node).
90
///
91
/// When possible these offsets should be defined in _this_ module to ensure z-index coordination across contexts.
92
/// When this is _not_ possible, pick a suitably unique index unlikely to clash with other things (ex: `0.1826823` not `0.1`).
93
///
94
/// Offsets should be unique for a given node entity to avoid z fighting.
95
/// These should pretty much _always_ be larger than -0.5 and smaller than 0.5 to avoid clipping into nodes
96
/// above / below the current node in the stack.
97
///
98
/// A z-index of 0.0 is the baseline, which is used as the primary "background color" of the node.
99
///
100
/// Note that nodes "stack" on each other, so a negative offset on the node above could clip _into_
101
/// a positive offset on a node below.
102
pub mod stack_z_offsets {
103
pub const BOX_SHADOW: f32 = -0.1;
104
pub const BACKGROUND_COLOR: f32 = 0.0;
105
pub const BORDER: f32 = 0.01;
106
pub const GRADIENT: f32 = 0.02;
107
pub const BORDER_GRADIENT: f32 = 0.03;
108
pub const IMAGE: f32 = 0.04;
109
pub const MATERIAL: f32 = 0.05;
110
pub const TEXT: f32 = 0.06;
111
pub const TEXT_STRIKETHROUGH: f32 = 0.07;
112
}
113
114
#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemSet)]
115
pub enum RenderUiSystems {
116
ExtractCameraViews,
117
ExtractBoxShadows,
118
ExtractBackgrounds,
119
ExtractImages,
120
ExtractTextureSlice,
121
ExtractBorders,
122
ExtractViewportNodes,
123
ExtractTextBackgrounds,
124
ExtractTextShadows,
125
ExtractText,
126
ExtractDebug,
127
ExtractGradient,
128
}
129
130
/// Marker for controlling whether UI is rendered with or without anti-aliasing
131
/// in a camera. By default, UI is always anti-aliased.
132
///
133
/// **Note:** This does not affect text anti-aliasing. For that, use the `font_smoothing` property of the [`TextFont`](bevy_text::TextFont) component.
134
///
135
/// ```
136
/// use bevy_camera::prelude::*;
137
/// use bevy_ecs::prelude::*;
138
/// use bevy_ui::prelude::*;
139
/// use bevy_ui_render::prelude::*;
140
///
141
/// fn spawn_camera(mut commands: Commands) {
142
/// commands.spawn((
143
/// Camera2d,
144
/// // This will cause all UI in this camera to be rendered without
145
/// // anti-aliasing
146
/// UiAntiAlias::Off,
147
/// ));
148
/// }
149
/// ```
150
#[derive(Component, Clone, Copy, Default, Debug, Reflect, Eq, PartialEq)]
151
#[reflect(Component, Default, PartialEq, Clone)]
152
pub enum UiAntiAlias {
153
/// UI will render with anti-aliasing
154
#[default]
155
On,
156
/// UI will render without anti-aliasing
157
Off,
158
}
159
160
/// Number of shadow samples.
161
/// A larger value will result in higher quality shadows.
162
/// Default is 4, values higher than ~10 offer diminishing returns.
163
///
164
/// ```
165
/// use bevy_camera::prelude::*;
166
/// use bevy_ecs::prelude::*;
167
/// use bevy_ui::prelude::*;
168
/// use bevy_ui_render::prelude::*;
169
///
170
/// fn spawn_camera(mut commands: Commands) {
171
/// commands.spawn((
172
/// Camera2d,
173
/// BoxShadowSamples(6),
174
/// ));
175
/// }
176
/// ```
177
#[derive(Component, Clone, Copy, Debug, Reflect, Eq, PartialEq)]
178
#[reflect(Component, Default, PartialEq, Clone)]
179
pub struct BoxShadowSamples(pub u32);
180
181
impl Default for BoxShadowSamples {
182
fn default() -> Self {
183
Self(4)
184
}
185
}
186
187
#[derive(Default)]
188
pub struct UiRenderPlugin;
189
190
impl Plugin for UiRenderPlugin {
191
fn build(&self, app: &mut App) {
192
load_shader_library!(app, "ui.wgsl");
193
194
#[cfg(feature = "bevy_ui_debug")]
195
app.init_resource::<UiDebugOptions>();
196
197
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
198
return;
199
};
200
201
render_app
202
.init_resource::<SpecializedRenderPipelines<UiPipeline>>()
203
.init_resource::<ImageNodeBindGroups>()
204
.init_resource::<UiMeta>()
205
.init_resource::<ExtractedUiNodes>()
206
.allow_ambiguous_resource::<ExtractedUiNodes>()
207
.init_resource::<DrawFunctions<TransparentUi>>()
208
.init_resource::<ViewSortedRenderPhases<TransparentUi>>()
209
.add_render_command::<TransparentUi, DrawUi>()
210
.configure_sets(
211
ExtractSchedule,
212
(
213
RenderUiSystems::ExtractCameraViews,
214
RenderUiSystems::ExtractBoxShadows,
215
RenderUiSystems::ExtractBackgrounds,
216
RenderUiSystems::ExtractImages,
217
RenderUiSystems::ExtractTextureSlice,
218
RenderUiSystems::ExtractBorders,
219
RenderUiSystems::ExtractTextBackgrounds,
220
RenderUiSystems::ExtractTextShadows,
221
RenderUiSystems::ExtractText,
222
RenderUiSystems::ExtractDebug,
223
)
224
.chain(),
225
)
226
.add_systems(RenderStartup, init_ui_pipeline)
227
.add_systems(
228
ExtractSchedule,
229
(
230
extract_ui_camera_view.in_set(RenderUiSystems::ExtractCameraViews),
231
extract_uinode_background_colors.in_set(RenderUiSystems::ExtractBackgrounds),
232
extract_uinode_images.in_set(RenderUiSystems::ExtractImages),
233
extract_uinode_borders.in_set(RenderUiSystems::ExtractBorders),
234
extract_viewport_nodes.in_set(RenderUiSystems::ExtractViewportNodes),
235
extract_text_decorations.in_set(RenderUiSystems::ExtractTextBackgrounds),
236
extract_text_shadows.in_set(RenderUiSystems::ExtractTextShadows),
237
extract_text_sections.in_set(RenderUiSystems::ExtractText),
238
#[cfg(feature = "bevy_ui_debug")]
239
debug_overlay::extract_debug_overlay.in_set(RenderUiSystems::ExtractDebug),
240
),
241
)
242
.add_systems(
243
Render,
244
(
245
queue_uinodes.in_set(RenderSystems::Queue),
246
sort_phase_system::<TransparentUi>.in_set(RenderSystems::PhaseSort),
247
prepare_uinodes.in_set(RenderSystems::PrepareBindGroups),
248
),
249
)
250
.add_systems(
251
Core2d,
252
ui_pass.after(Core2dSystems::PostProcess).before(upscaling),
253
)
254
.add_systems(
255
Core3d,
256
ui_pass.after(Core3dSystems::PostProcess).before(upscaling),
257
);
258
259
app.add_plugins(UiTextureSlicerPlugin);
260
app.add_plugins(ColorSpacePlugin);
261
app.add_plugins(GradientPlugin);
262
app.add_plugins(BoxShadowPlugin);
263
}
264
}
265
266
#[derive(SystemParam)]
267
pub struct UiCameraMap<'w, 's> {
268
mapping: Query<'w, 's, RenderEntity>,
269
}
270
271
impl<'w, 's> UiCameraMap<'w, 's> {
272
/// Creates a [`UiCameraMapper`] for performing repeated camera-to-render-entity lookups.
273
///
274
/// The last successful mapping is cached to avoid redundant queries.
275
pub fn get_mapper(&'w self) -> UiCameraMapper<'w, 's> {
276
UiCameraMapper {
277
mapping: &self.mapping,
278
camera_entity: Entity::PLACEHOLDER,
279
render_entity: Entity::PLACEHOLDER,
280
}
281
}
282
}
283
284
/// Helper for mapping UI target camera entities to their corresponding render entities,
285
/// with caching to avoid repeated lookups for the same camera.
286
pub struct UiCameraMapper<'w, 's> {
287
mapping: &'w Query<'w, 's, RenderEntity>,
288
/// Cached camera entity from the last successful `map` call.
289
camera_entity: Entity,
290
/// Cached camera entity from the last successful `map` call.
291
render_entity: Entity,
292
}
293
294
impl<'w, 's> UiCameraMapper<'w, 's> {
295
/// Returns the render entity corresponding to the given [`ComputedUiTargetCamera`]'s camera, or none if no corresponding entity was found.
296
pub fn map(&mut self, computed_target: &ComputedUiTargetCamera) -> Option<Entity> {
297
let camera_entity = computed_target.get()?;
298
if self.camera_entity != camera_entity {
299
let new_render_camera_entity = self.mapping.get(camera_entity).ok()?;
300
self.render_entity = new_render_camera_entity;
301
self.camera_entity = camera_entity;
302
}
303
304
Some(self.render_entity)
305
}
306
307
/// Returns the cached camera entity from the last successful `map` call.
308
pub fn current_camera(&self) -> Entity {
309
self.camera_entity
310
}
311
}
312
313
pub struct ExtractedUiNode {
314
pub z_order: f32,
315
pub image: AssetId<Image>,
316
pub clip: Option<Rect>,
317
/// Render world entity of the extracted camera corresponding to this node's target camera.
318
pub extracted_camera_entity: Entity,
319
pub item: ExtractedUiItem,
320
pub main_entity: MainEntity,
321
pub render_entity: Entity,
322
pub transform: Affine2,
323
}
324
325
/// The type of UI node.
326
/// This is used to determine how to render the UI node.
327
#[derive(Clone, Copy, Debug, PartialEq)]
328
pub enum NodeType {
329
Rect,
330
Border(u32), // shader flags
331
}
332
333
pub enum ExtractedUiItem {
334
Node {
335
color: LinearRgba,
336
rect: Rect,
337
atlas_scaling: Option<Vec2>,
338
flip_x: bool,
339
flip_y: bool,
340
/// Border radius of the UI node.
341
/// Ordering: top left, top right, bottom right, bottom left.
342
border_radius: ResolvedBorderRadius,
343
/// Border thickness of the UI node.
344
/// Ordering: left, top, right, bottom.
345
border: BorderRect,
346
node_type: NodeType,
347
},
348
/// A contiguous sequence of text glyphs from the same section
349
Glyphs {
350
/// Indices into [`ExtractedUiNodes::glyphs`]
351
range: Range<usize>,
352
},
353
}
354
355
pub struct ExtractedGlyph {
356
pub color: LinearRgba,
357
pub translation: Vec2,
358
pub rect: Rect,
359
}
360
361
#[derive(Resource, Default)]
362
pub struct ExtractedUiNodes {
363
pub uinodes: Vec<ExtractedUiNode>,
364
pub glyphs: Vec<ExtractedGlyph>,
365
}
366
367
impl ExtractedUiNodes {
368
pub fn clear(&mut self) {
369
self.uinodes.clear();
370
self.glyphs.clear();
371
}
372
}
373
374
pub fn extract_uinode_background_colors(
375
mut commands: Commands,
376
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
377
uinode_query: Extract<
378
Query<(
379
Entity,
380
&ComputedNode,
381
&UiGlobalTransform,
382
&InheritedVisibility,
383
Option<&CalculatedClip>,
384
&ComputedUiTargetCamera,
385
&BackgroundColor,
386
)>,
387
>,
388
camera_map: Extract<UiCameraMap>,
389
) {
390
let mut camera_mapper = camera_map.get_mapper();
391
392
for (entity, uinode, transform, inherited_visibility, clip, camera, background_color) in
393
&uinode_query
394
{
395
// Skip invisible backgrounds
396
if !inherited_visibility.get()
397
|| background_color.0.is_fully_transparent()
398
|| uinode.is_empty()
399
{
400
continue;
401
}
402
403
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
404
continue;
405
};
406
407
extracted_uinodes.uinodes.push(ExtractedUiNode {
408
render_entity: commands.spawn(TemporaryRenderEntity).id(),
409
z_order: uinode.stack_index as f32 + stack_z_offsets::BACKGROUND_COLOR,
410
clip: clip.map(|clip| clip.clip),
411
image: AssetId::default(),
412
extracted_camera_entity,
413
transform: transform.into(),
414
item: ExtractedUiItem::Node {
415
color: background_color.0.into(),
416
rect: Rect {
417
min: Vec2::ZERO,
418
max: uinode.size,
419
},
420
atlas_scaling: None,
421
flip_x: false,
422
flip_y: false,
423
border: uinode.border(),
424
border_radius: uinode.border_radius(),
425
node_type: NodeType::Rect,
426
},
427
main_entity: entity.into(),
428
});
429
}
430
}
431
432
pub fn extract_uinode_images(
433
mut commands: Commands,
434
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
435
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
436
uinode_query: Extract<
437
Query<(
438
Entity,
439
&ComputedNode,
440
&UiGlobalTransform,
441
&InheritedVisibility,
442
Option<&CalculatedClip>,
443
&ComputedUiTargetCamera,
444
&ImageNode,
445
)>,
446
>,
447
camera_map: Extract<UiCameraMap>,
448
) {
449
let mut camera_mapper = camera_map.get_mapper();
450
for (entity, uinode, transform, inherited_visibility, clip, camera, image) in &uinode_query {
451
// Skip invisible images
452
if !inherited_visibility.get()
453
|| image.color.is_fully_transparent()
454
|| image.image.id() == TRANSPARENT_IMAGE_HANDLE.id()
455
|| image.image_mode.uses_slices()
456
|| uinode.is_empty()
457
{
458
continue;
459
}
460
461
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
462
continue;
463
};
464
465
let atlas_rect = image
466
.texture_atlas
467
.as_ref()
468
.and_then(|s| s.texture_rect(&texture_atlases))
469
.map(|r| r.as_rect());
470
471
let mut rect = match (atlas_rect, image.rect) {
472
(None, None) => Rect {
473
min: Vec2::ZERO,
474
max: uinode.size,
475
},
476
(None, Some(image_rect)) => image_rect,
477
(Some(atlas_rect), None) => atlas_rect,
478
(Some(atlas_rect), Some(mut image_rect)) => {
479
image_rect.min += atlas_rect.min;
480
image_rect.max += atlas_rect.min;
481
image_rect
482
}
483
};
484
485
let atlas_scaling = if atlas_rect.is_some() || image.rect.is_some() {
486
let atlas_scaling = uinode.size() / rect.size();
487
rect.min *= atlas_scaling;
488
rect.max *= atlas_scaling;
489
Some(atlas_scaling)
490
} else {
491
None
492
};
493
494
extracted_uinodes.uinodes.push(ExtractedUiNode {
495
z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
496
render_entity: commands.spawn(TemporaryRenderEntity).id(),
497
clip: clip.map(|clip| clip.clip),
498
image: image.image.id(),
499
extracted_camera_entity,
500
transform: transform.into(),
501
item: ExtractedUiItem::Node {
502
color: image.color.into(),
503
rect,
504
atlas_scaling,
505
flip_x: image.flip_x,
506
flip_y: image.flip_y,
507
border: uinode.border,
508
border_radius: uinode.border_radius,
509
node_type: NodeType::Rect,
510
},
511
main_entity: entity.into(),
512
});
513
}
514
}
515
516
pub fn extract_uinode_borders(
517
mut commands: Commands,
518
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
519
uinode_query: Extract<
520
Query<(
521
Entity,
522
&Node,
523
&ComputedNode,
524
&UiGlobalTransform,
525
&InheritedVisibility,
526
Option<&CalculatedClip>,
527
&ComputedUiTargetCamera,
528
AnyOf<(&BorderColor, &Outline)>,
529
)>,
530
>,
531
camera_map: Extract<UiCameraMap>,
532
) {
533
let image = AssetId::<Image>::default();
534
let mut camera_mapper = camera_map.get_mapper();
535
536
for (
537
entity,
538
node,
539
computed_node,
540
transform,
541
inherited_visibility,
542
maybe_clip,
543
camera,
544
(maybe_border_color, maybe_outline),
545
) in &uinode_query
546
{
547
// Skip invisible borders and removed nodes
548
if !inherited_visibility.get() || node.display == Display::None {
549
continue;
550
}
551
552
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
553
continue;
554
};
555
556
// Don't extract borders with zero width along all edges
557
if computed_node.border() != BorderRect::ZERO
558
&& let Some(border_color) = maybe_border_color
559
{
560
let border_colors = [
561
border_color.left.to_linear(),
562
border_color.top.to_linear(),
563
border_color.right.to_linear(),
564
border_color.bottom.to_linear(),
565
];
566
567
const BORDER_FLAGS: [u32; 4] = [
568
shader_flags::BORDER_LEFT,
569
shader_flags::BORDER_TOP,
570
shader_flags::BORDER_RIGHT,
571
shader_flags::BORDER_BOTTOM,
572
];
573
let mut completed_flags = 0;
574
575
for (i, &color) in border_colors.iter().enumerate() {
576
if color.is_fully_transparent() {
577
continue;
578
}
579
580
let mut border_flags = BORDER_FLAGS[i];
581
582
if completed_flags & border_flags != 0 {
583
continue;
584
}
585
586
for j in i + 1..4 {
587
if color == border_colors[j] {
588
border_flags |= BORDER_FLAGS[j];
589
}
590
}
591
completed_flags |= border_flags;
592
593
extracted_uinodes.uinodes.push(ExtractedUiNode {
594
z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
595
image,
596
clip: maybe_clip.map(|clip| clip.clip),
597
extracted_camera_entity,
598
transform: transform.into(),
599
item: ExtractedUiItem::Node {
600
color,
601
rect: Rect {
602
max: computed_node.size(),
603
..Default::default()
604
},
605
atlas_scaling: None,
606
flip_x: false,
607
flip_y: false,
608
border: computed_node.border(),
609
border_radius: computed_node.border_radius(),
610
node_type: NodeType::Border(border_flags),
611
},
612
main_entity: entity.into(),
613
render_entity: commands.spawn(TemporaryRenderEntity).id(),
614
});
615
}
616
}
617
618
if computed_node.outline_width() <= 0. {
619
continue;
620
}
621
622
if let Some(outline) = maybe_outline.filter(|outline| !outline.color.is_fully_transparent())
623
{
624
let outline_size = computed_node.outlined_node_size();
625
extracted_uinodes.uinodes.push(ExtractedUiNode {
626
z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
627
render_entity: commands.spawn(TemporaryRenderEntity).id(),
628
image,
629
clip: maybe_clip.map(|clip| clip.clip),
630
extracted_camera_entity,
631
transform: transform.into(),
632
item: ExtractedUiItem::Node {
633
color: outline.color.into(),
634
rect: Rect {
635
max: outline_size,
636
..Default::default()
637
},
638
atlas_scaling: None,
639
flip_x: false,
640
flip_y: false,
641
border: BorderRect::all(computed_node.outline_width()),
642
border_radius: computed_node.outline_radius(),
643
node_type: NodeType::Border(shader_flags::BORDER_ALL),
644
},
645
main_entity: entity.into(),
646
});
647
}
648
}
649
}
650
651
/// The UI camera is "moved back" by this many units (plus the [`UI_CAMERA_TRANSFORM_OFFSET`]) and also has a view
652
/// distance of this many units. This ensures that with a left-handed projection,
653
/// as ui elements are "stacked on top of each other", they are within the camera's view
654
/// and have room to grow.
655
// TODO: Consider computing this value at runtime based on the maximum z-value.
656
const UI_CAMERA_FAR: f32 = 1000.0;
657
658
// This value is subtracted from the far distance for the camera's z-position to ensure nodes at z == 0.0 are rendered
659
// TODO: Evaluate if we still need this.
660
const UI_CAMERA_TRANSFORM_OFFSET: f32 = -0.1;
661
662
/// The ID of the subview associated with a camera on which UI is to be drawn.
663
///
664
/// When UI is present, cameras extract to two views: the main 2D/3D one and a
665
/// UI one. The main 2D or 3D camera gets subview 0, and the corresponding UI
666
/// camera gets this subview, 1.
667
const UI_CAMERA_SUBVIEW: u32 = 1;
668
669
/// A render-world component that lives on the main render target view and
670
/// specifies the corresponding UI view.
671
///
672
/// For example, if UI is being rendered to a 3D camera, this component lives on
673
/// the 3D camera and contains the entity corresponding to the UI view.
674
#[derive(Component)]
675
/// Entity id of the temporary render entity with the corresponding extracted UI view.
676
pub struct UiCameraView(pub Entity);
677
678
/// A render-world component that lives on the UI view and specifies the
679
/// corresponding main render target view.
680
///
681
/// For example, if the UI is being rendered to a 3D camera, this component
682
/// lives on the UI view and contains the entity corresponding to the 3D camera.
683
///
684
/// This is the inverse of [`UiCameraView`].
685
#[derive(Component)]
686
pub struct UiViewTarget(pub Entity);
687
688
/// Extracts all UI elements associated with a camera into the render world.
689
pub fn extract_ui_camera_view(
690
mut commands: Commands,
691
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
692
query: Extract<
693
Query<
694
(
695
Entity,
696
RenderEntity,
697
&Camera,
698
Has<Hdr>,
699
Option<&UiAntiAlias>,
700
Option<&BoxShadowSamples>,
701
),
702
Or<(With<Camera2d>, With<Camera3d>)>,
703
>,
704
>,
705
mut live_entities: Local<HashSet<RetainedViewEntity>>,
706
) {
707
live_entities.clear();
708
709
for (main_entity, render_entity, camera, hdr, ui_anti_alias, shadow_samples) in &query {
710
// ignore inactive cameras
711
if !camera.is_active {
712
commands
713
.get_entity(render_entity)
714
.expect("Camera entity wasn't synced.")
715
.remove::<(UiCameraView, UiAntiAlias, BoxShadowSamples)>();
716
continue;
717
}
718
719
if let Some(physical_viewport_rect) = camera.physical_viewport_rect() {
720
// use a projection matrix with the origin in the top left instead of the bottom left that comes with OrthographicProjection
721
let projection_matrix = Mat4::orthographic_rh(
722
0.0,
723
physical_viewport_rect.width() as f32,
724
physical_viewport_rect.height() as f32,
725
0.0,
726
0.0,
727
UI_CAMERA_FAR,
728
);
729
// We use `UI_CAMERA_SUBVIEW` here so as not to conflict with the
730
// main 3D or 2D camera, which will have subview index 0.
731
let retained_view_entity =
732
RetainedViewEntity::new(main_entity.into(), None, UI_CAMERA_SUBVIEW);
733
// Creates the UI view.
734
let ui_camera_view = commands
735
.spawn((
736
ExtractedView {
737
retained_view_entity,
738
clip_from_view: projection_matrix,
739
world_from_view: GlobalTransform::from_xyz(
740
0.0,
741
0.0,
742
UI_CAMERA_FAR + UI_CAMERA_TRANSFORM_OFFSET,
743
),
744
clip_from_world: None,
745
hdr,
746
viewport: UVec4::from((
747
physical_viewport_rect.min,
748
physical_viewport_rect.size(),
749
)),
750
color_grading: Default::default(),
751
invert_culling: false,
752
},
753
// Link to the main camera view.
754
UiViewTarget(render_entity),
755
TemporaryRenderEntity,
756
))
757
.id();
758
759
let mut entity_commands = commands
760
.get_entity(render_entity)
761
.expect("Camera entity wasn't synced.");
762
// Link from the main 2D/3D camera view to the UI view.
763
entity_commands.insert(UiCameraView(ui_camera_view));
764
if let Some(ui_anti_alias) = ui_anti_alias {
765
entity_commands.insert(*ui_anti_alias);
766
}
767
if let Some(shadow_samples) = shadow_samples {
768
entity_commands.insert(*shadow_samples);
769
}
770
transparent_render_phases.insert_or_clear(retained_view_entity);
771
772
live_entities.insert(retained_view_entity);
773
}
774
}
775
776
transparent_render_phases.retain(|entity, _| live_entities.contains(entity));
777
}
778
779
pub fn extract_viewport_nodes(
780
mut commands: Commands,
781
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
782
camera_query: Extract<Query<(&Camera, &RenderTarget)>>,
783
uinode_query: Extract<
784
Query<(
785
Entity,
786
&ComputedNode,
787
&UiGlobalTransform,
788
&InheritedVisibility,
789
Option<&CalculatedClip>,
790
&ComputedUiTargetCamera,
791
&ViewportNode,
792
)>,
793
>,
794
camera_map: Extract<UiCameraMap>,
795
) {
796
let mut camera_mapper = camera_map.get_mapper();
797
for (entity, uinode, transform, inherited_visibility, clip, camera, viewport_node) in
798
&uinode_query
799
{
800
// Skip invisible images
801
if !inherited_visibility.get() || uinode.is_empty() {
802
continue;
803
}
804
805
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
806
continue;
807
};
808
809
let Some(image) = camera_query
810
.get(viewport_node.camera)
811
.ok()
812
.and_then(|(_, render_target)| render_target.as_image())
813
else {
814
continue;
815
};
816
817
extracted_uinodes.uinodes.push(ExtractedUiNode {
818
z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
819
render_entity: commands.spawn(TemporaryRenderEntity).id(),
820
clip: clip.map(|clip| clip.clip),
821
image: image.id(),
822
extracted_camera_entity,
823
transform: transform.into(),
824
item: ExtractedUiItem::Node {
825
color: LinearRgba::WHITE,
826
rect: Rect {
827
min: Vec2::ZERO,
828
max: uinode.size,
829
},
830
atlas_scaling: None,
831
flip_x: false,
832
flip_y: false,
833
border: uinode.border(),
834
border_radius: uinode.border_radius(),
835
node_type: NodeType::Rect,
836
},
837
main_entity: entity.into(),
838
});
839
}
840
}
841
842
pub fn extract_text_sections(
843
mut commands: Commands,
844
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
845
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
846
uinode_query: Extract<
847
Query<(
848
Entity,
849
&ComputedNode,
850
&UiGlobalTransform,
851
&InheritedVisibility,
852
Option<&CalculatedClip>,
853
&ComputedUiTargetCamera,
854
&ComputedTextBlock,
855
&TextColor,
856
&TextLayoutInfo,
857
)>,
858
>,
859
text_styles: Extract<Query<&TextColor>>,
860
camera_map: Extract<UiCameraMap>,
861
) {
862
let mut start = extracted_uinodes.glyphs.len();
863
let mut end = start + 1;
864
865
let mut camera_mapper = camera_map.get_mapper();
866
for (
867
entity,
868
uinode,
869
transform,
870
inherited_visibility,
871
clip,
872
camera,
873
computed_block,
874
text_color,
875
text_layout_info,
876
) in &uinode_query
877
{
878
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
879
if !inherited_visibility.get() || uinode.is_empty() {
880
continue;
881
}
882
883
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
884
continue;
885
};
886
887
let transform = Affine2::from(*transform) * Affine2::from_translation(-0.5 * uinode.size());
888
889
let mut color = text_color.0.to_linear();
890
891
let mut current_span_index = 0;
892
893
for (
894
i,
895
PositionedGlyph {
896
position,
897
atlas_info,
898
span_index,
899
..
900
},
901
) in text_layout_info.glyphs.iter().enumerate()
902
{
903
if current_span_index != *span_index
904
&& let Some(span_entity) =
905
computed_block.entities().get(*span_index).map(|t| t.entity)
906
{
907
color = text_styles
908
.get(span_entity)
909
.map(|text_color| LinearRgba::from(text_color.0))
910
.unwrap_or_default();
911
current_span_index = *span_index;
912
}
913
914
let rect = texture_atlases
915
.get(atlas_info.texture_atlas)
916
.unwrap()
917
.textures[atlas_info.location.glyph_index]
918
.as_rect();
919
extracted_uinodes.glyphs.push(ExtractedGlyph {
920
color,
921
translation: *position,
922
rect,
923
});
924
925
if text_layout_info
926
.glyphs
927
.get(i + 1)
928
.is_none_or(|info| info.atlas_info.texture != atlas_info.texture)
929
{
930
extracted_uinodes.uinodes.push(ExtractedUiNode {
931
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
932
render_entity: commands.spawn(TemporaryRenderEntity).id(),
933
image: atlas_info.texture,
934
clip: clip.map(|clip| clip.clip),
935
extracted_camera_entity,
936
item: ExtractedUiItem::Glyphs { range: start..end },
937
main_entity: entity.into(),
938
transform,
939
});
940
start = end;
941
}
942
943
end += 1;
944
}
945
}
946
}
947
948
pub fn extract_text_shadows(
949
mut commands: Commands,
950
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
951
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
952
uinode_query: Extract<
953
Query<(
954
Entity,
955
&ComputedNode,
956
&UiGlobalTransform,
957
&ComputedUiTargetCamera,
958
&InheritedVisibility,
959
Option<&CalculatedClip>,
960
&TextLayoutInfo,
961
&TextShadow,
962
&ComputedTextBlock,
963
)>,
964
>,
965
text_decoration_query: Extract<Query<(Has<Strikethrough>, Has<Underline>)>>,
966
camera_map: Extract<UiCameraMap>,
967
) {
968
let mut start = extracted_uinodes.glyphs.len();
969
let mut end = start + 1;
970
971
let mut camera_mapper = camera_map.get_mapper();
972
for (
973
entity,
974
uinode,
975
transform,
976
target,
977
inherited_visibility,
978
clip,
979
text_layout_info,
980
shadow,
981
computed_block,
982
) in &uinode_query
983
{
984
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
985
if !inherited_visibility.get() || uinode.is_empty() {
986
continue;
987
}
988
989
let Some(extracted_camera_entity) = camera_mapper.map(target) else {
990
continue;
991
};
992
993
let node_transform = Affine2::from(*transform)
994
* Affine2::from_translation(
995
-0.5 * uinode.size() + shadow.offset / uinode.inverse_scale_factor(),
996
);
997
998
for (
999
i,
1000
PositionedGlyph {
1001
position,
1002
atlas_info,
1003
span_index,
1004
..
1005
},
1006
) in text_layout_info.glyphs.iter().enumerate()
1007
{
1008
let rect = texture_atlases
1009
.get(atlas_info.texture_atlas)
1010
.unwrap()
1011
.textures[atlas_info.location.glyph_index]
1012
.as_rect();
1013
extracted_uinodes.glyphs.push(ExtractedGlyph {
1014
color: shadow.color.into(),
1015
translation: *position,
1016
rect,
1017
});
1018
1019
if text_layout_info.glyphs.get(i + 1).is_none_or(|info| {
1020
info.span_index != *span_index || info.atlas_info.texture != atlas_info.texture
1021
}) {
1022
extracted_uinodes.uinodes.push(ExtractedUiNode {
1023
transform: node_transform,
1024
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1025
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1026
image: atlas_info.texture,
1027
clip: clip.map(|clip| clip.clip),
1028
extracted_camera_entity,
1029
item: ExtractedUiItem::Glyphs { range: start..end },
1030
main_entity: entity.into(),
1031
});
1032
start = end;
1033
}
1034
1035
end += 1;
1036
}
1037
1038
for run in text_layout_info.run_geometry.iter() {
1039
let section_entity = computed_block.entities()[run.span_index].entity;
1040
let Ok((has_strikethrough, has_underline)) = text_decoration_query.get(section_entity)
1041
else {
1042
continue;
1043
};
1044
1045
if has_strikethrough {
1046
extracted_uinodes.uinodes.push(ExtractedUiNode {
1047
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1048
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1049
clip: clip.map(|clip| clip.clip),
1050
image: AssetId::default(),
1051
extracted_camera_entity,
1052
transform: node_transform
1053
* Affine2::from_translation(run.strikethrough_position()),
1054
item: ExtractedUiItem::Node {
1055
color: shadow.color.into(),
1056
rect: Rect {
1057
min: Vec2::ZERO,
1058
max: run.strikethrough_size(),
1059
},
1060
atlas_scaling: None,
1061
flip_x: false,
1062
flip_y: false,
1063
border: BorderRect::ZERO,
1064
border_radius: ResolvedBorderRadius::ZERO,
1065
node_type: NodeType::Rect,
1066
},
1067
main_entity: entity.into(),
1068
});
1069
}
1070
1071
if has_underline {
1072
extracted_uinodes.uinodes.push(ExtractedUiNode {
1073
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1074
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1075
clip: clip.map(|clip| clip.clip),
1076
image: AssetId::default(),
1077
extracted_camera_entity,
1078
transform: node_transform * Affine2::from_translation(run.underline_position()),
1079
item: ExtractedUiItem::Node {
1080
color: shadow.color.into(),
1081
rect: Rect {
1082
min: Vec2::ZERO,
1083
max: run.underline_size(),
1084
},
1085
atlas_scaling: None,
1086
flip_x: false,
1087
flip_y: false,
1088
border: BorderRect::ZERO,
1089
border_radius: ResolvedBorderRadius::ZERO,
1090
node_type: NodeType::Rect,
1091
},
1092
main_entity: entity.into(),
1093
});
1094
}
1095
}
1096
}
1097
}
1098
1099
pub fn extract_text_decorations(
1100
mut commands: Commands,
1101
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1102
uinode_query: Extract<
1103
Query<(
1104
Entity,
1105
&ComputedNode,
1106
&ComputedTextBlock,
1107
&UiGlobalTransform,
1108
&InheritedVisibility,
1109
Option<&CalculatedClip>,
1110
&ComputedUiTargetCamera,
1111
&TextLayoutInfo,
1112
)>,
1113
>,
1114
text_background_colors_query: Extract<
1115
Query<(
1116
AnyOf<(&TextBackgroundColor, &Strikethrough, &Underline)>,
1117
&TextColor,
1118
Option<&StrikethroughColor>,
1119
Option<&UnderlineColor>,
1120
)>,
1121
>,
1122
camera_map: Extract<UiCameraMap>,
1123
) {
1124
let mut camera_mapper = camera_map.get_mapper();
1125
for (
1126
entity,
1127
uinode,
1128
computed_block,
1129
global_transform,
1130
inherited_visibility,
1131
clip,
1132
camera,
1133
text_layout_info,
1134
) in &uinode_query
1135
{
1136
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
1137
if !inherited_visibility.get() || uinode.is_empty() {
1138
continue;
1139
}
1140
1141
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
1142
continue;
1143
};
1144
1145
let transform =
1146
Affine2::from(global_transform) * Affine2::from_translation(-0.5 * uinode.size());
1147
1148
for run in text_layout_info.run_geometry.iter() {
1149
let section_entity = computed_block.entities()[run.span_index].entity;
1150
let Ok((
1151
(text_background_color, maybe_strikethrough, maybe_underline),
1152
text_color,
1153
maybe_strikethrough_color,
1154
maybe_underline_color,
1155
)) = text_background_colors_query.get(section_entity)
1156
else {
1157
continue;
1158
};
1159
1160
if let Some(text_background_color) = text_background_color {
1161
extracted_uinodes.uinodes.push(ExtractedUiNode {
1162
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1163
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1164
clip: clip.map(|clip| clip.clip),
1165
image: AssetId::default(),
1166
extracted_camera_entity,
1167
transform: transform * Affine2::from_translation(run.bounds.center()),
1168
item: ExtractedUiItem::Node {
1169
color: text_background_color.0.to_linear(),
1170
rect: Rect {
1171
min: Vec2::ZERO,
1172
max: run.bounds.size(),
1173
},
1174
atlas_scaling: None,
1175
flip_x: false,
1176
flip_y: false,
1177
border: uinode.border(),
1178
border_radius: uinode.border_radius(),
1179
node_type: NodeType::Rect,
1180
},
1181
main_entity: entity.into(),
1182
});
1183
}
1184
1185
if maybe_strikethrough.is_some() {
1186
let color = maybe_strikethrough_color
1187
.map(|sc| sc.0)
1188
.unwrap_or(text_color.0)
1189
.to_linear();
1190
1191
extracted_uinodes.uinodes.push(ExtractedUiNode {
1192
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT_STRIKETHROUGH,
1193
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1194
clip: clip.map(|clip| clip.clip),
1195
image: AssetId::default(),
1196
extracted_camera_entity,
1197
transform: transform * Affine2::from_translation(run.strikethrough_position()),
1198
item: ExtractedUiItem::Node {
1199
color,
1200
rect: Rect {
1201
min: Vec2::ZERO,
1202
max: run.strikethrough_size(),
1203
},
1204
atlas_scaling: None,
1205
flip_x: false,
1206
flip_y: false,
1207
border: BorderRect::ZERO,
1208
border_radius: ResolvedBorderRadius::ZERO,
1209
node_type: NodeType::Rect,
1210
},
1211
main_entity: entity.into(),
1212
});
1213
}
1214
1215
if maybe_underline.is_some() {
1216
let color = maybe_underline_color
1217
.map(|uc| uc.0)
1218
.unwrap_or(text_color.0)
1219
.to_linear();
1220
1221
extracted_uinodes.uinodes.push(ExtractedUiNode {
1222
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT_STRIKETHROUGH,
1223
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1224
clip: clip.map(|clip| clip.clip),
1225
image: AssetId::default(),
1226
extracted_camera_entity,
1227
transform: transform * Affine2::from_translation(run.underline_position()),
1228
item: ExtractedUiItem::Node {
1229
color,
1230
rect: Rect {
1231
min: Vec2::ZERO,
1232
max: run.underline_size(),
1233
},
1234
atlas_scaling: None,
1235
flip_x: false,
1236
flip_y: false,
1237
border: BorderRect::ZERO,
1238
border_radius: ResolvedBorderRadius::ZERO,
1239
node_type: NodeType::Rect,
1240
},
1241
main_entity: entity.into(),
1242
});
1243
}
1244
}
1245
}
1246
}
1247
1248
#[repr(C)]
1249
#[derive(Copy, Clone, Pod, Zeroable)]
1250
struct UiVertex {
1251
pub position: [f32; 3],
1252
pub uv: [f32; 2],
1253
pub color: [f32; 4],
1254
/// Shader flags to determine how to render the UI node.
1255
/// See [`shader_flags`] for possible values.
1256
pub flags: u32,
1257
/// Border radius of the UI node.
1258
/// Ordering: top left, top right, bottom right, bottom left.
1259
pub radius: [f32; 4],
1260
/// Border thickness of the UI node.
1261
/// Ordering: left, top, right, bottom.
1262
pub border: [f32; 4],
1263
/// Size of the UI node.
1264
pub size: [f32; 2],
1265
/// Position relative to the center of the UI node.
1266
pub point: [f32; 2],
1267
}
1268
1269
#[derive(Resource)]
1270
pub struct UiMeta {
1271
vertices: RawBufferVec<UiVertex>,
1272
indices: RawBufferVec<u32>,
1273
view_bind_group: Option<BindGroup>,
1274
}
1275
1276
impl Default for UiMeta {
1277
fn default() -> Self {
1278
Self {
1279
vertices: RawBufferVec::new(BufferUsages::VERTEX),
1280
indices: RawBufferVec::new(BufferUsages::INDEX),
1281
view_bind_group: None,
1282
}
1283
}
1284
}
1285
1286
pub(crate) const QUAD_VERTEX_POSITIONS: [Vec2; 4] = [
1287
Vec2::new(-0.5, -0.5),
1288
Vec2::new(0.5, -0.5),
1289
Vec2::new(0.5, 0.5),
1290
Vec2::new(-0.5, 0.5),
1291
];
1292
1293
pub(crate) const QUAD_INDICES: [usize; 6] = [0, 2, 3, 0, 1, 2];
1294
1295
#[derive(Component)]
1296
pub struct UiBatch {
1297
pub range: Range<u32>,
1298
pub image: AssetId<Image>,
1299
}
1300
1301
/// The values here should match the values for the constants in `ui.wgsl`
1302
pub mod shader_flags {
1303
/// Texture should be ignored
1304
pub const UNTEXTURED: u32 = 0;
1305
/// Textured
1306
pub const TEXTURED: u32 = 1;
1307
/// Ordering: top left, top right, bottom right, bottom left.
1308
pub const CORNERS: [u32; 4] = [0, 2, 2 | 4, 4];
1309
pub const RADIAL: u32 = 16;
1310
pub const FILL_START: u32 = 32;
1311
pub const FILL_END: u32 = 64;
1312
pub const CONIC: u32 = 128;
1313
pub const BORDER_LEFT: u32 = 256;
1314
pub const BORDER_TOP: u32 = 512;
1315
pub const BORDER_RIGHT: u32 = 1024;
1316
pub const BORDER_BOTTOM: u32 = 2048;
1317
pub const BORDER_ALL: u32 = BORDER_LEFT + BORDER_TOP + BORDER_RIGHT + BORDER_BOTTOM;
1318
}
1319
1320
pub fn queue_uinodes(
1321
extracted_uinodes: Res<ExtractedUiNodes>,
1322
ui_pipeline: Res<UiPipeline>,
1323
mut pipelines: ResMut<SpecializedRenderPipelines<UiPipeline>>,
1324
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1325
render_views: Query<(&UiCameraView, Option<&UiAntiAlias>), With<ExtractedView>>,
1326
camera_views: Query<&ExtractedView>,
1327
pipeline_cache: Res<PipelineCache>,
1328
draw_functions: Res<DrawFunctions<TransparentUi>>,
1329
) {
1330
let draw_function = draw_functions.read().id::<DrawUi>();
1331
let mut current_camera_entity = Entity::PLACEHOLDER;
1332
let mut current_phase = None;
1333
1334
for (index, extracted_uinode) in extracted_uinodes.uinodes.iter().enumerate() {
1335
if current_camera_entity != extracted_uinode.extracted_camera_entity {
1336
current_phase = render_views
1337
.get(extracted_uinode.extracted_camera_entity)
1338
.ok()
1339
.and_then(|(default_camera_view, ui_anti_alias)| {
1340
camera_views
1341
.get(default_camera_view.0)
1342
.ok()
1343
.and_then(|view| {
1344
transparent_render_phases
1345
.get_mut(&view.retained_view_entity)
1346
.map(|transparent_phase| (view, ui_anti_alias, transparent_phase))
1347
})
1348
});
1349
current_camera_entity = extracted_uinode.extracted_camera_entity;
1350
}
1351
1352
let Some((view, ui_anti_alias, transparent_phase)) = current_phase.as_mut() else {
1353
continue;
1354
};
1355
1356
let pipeline = pipelines.specialize(
1357
&pipeline_cache,
1358
&ui_pipeline,
1359
UiPipelineKey {
1360
hdr: view.hdr,
1361
anti_alias: matches!(ui_anti_alias, None | Some(UiAntiAlias::On)),
1362
},
1363
);
1364
1365
transparent_phase.add(TransparentUi {
1366
draw_function,
1367
pipeline,
1368
entity: (extracted_uinode.render_entity, extracted_uinode.main_entity),
1369
sort_key: FloatOrd(extracted_uinode.z_order),
1370
index,
1371
// batch_range will be calculated in prepare_uinodes
1372
batch_range: 0..0,
1373
extra_index: PhaseItemExtraIndex::None,
1374
indexed: true,
1375
});
1376
}
1377
}
1378
1379
#[derive(Resource, Default)]
1380
pub struct ImageNodeBindGroups {
1381
pub values: HashMap<AssetId<Image>, BindGroup>,
1382
}
1383
1384
pub fn prepare_uinodes(
1385
mut commands: Commands,
1386
render_device: Res<RenderDevice>,
1387
render_queue: Res<RenderQueue>,
1388
pipeline_cache: Res<PipelineCache>,
1389
mut ui_meta: ResMut<UiMeta>,
1390
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1391
view_uniforms: Res<ViewUniforms>,
1392
ui_pipeline: Res<UiPipeline>,
1393
mut image_bind_groups: ResMut<ImageNodeBindGroups>,
1394
gpu_images: Res<RenderAssets<GpuImage>>,
1395
mut phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1396
events: Res<SpriteAssetEvents>,
1397
mut previous_len: Local<usize>,
1398
) {
1399
// If an image has changed, the GpuImage has (probably) changed
1400
for event in &events.images {
1401
match event {
1402
AssetEvent::Added { .. } |
1403
AssetEvent::Unused { .. } |
1404
// Images don't have dependencies
1405
AssetEvent::LoadedWithDependencies { .. } => {}
1406
AssetEvent::Modified { id } | AssetEvent::Removed { id } => {
1407
image_bind_groups.values.remove(id);
1408
}
1409
};
1410
}
1411
1412
if let Some(view_binding) = view_uniforms.uniforms.binding() {
1413
let mut batches: Vec<(Entity, UiBatch)> = Vec::with_capacity(*previous_len);
1414
1415
ui_meta.vertices.clear();
1416
ui_meta.indices.clear();
1417
ui_meta.view_bind_group = Some(render_device.create_bind_group(
1418
"ui_view_bind_group",
1419
&pipeline_cache.get_bind_group_layout(&ui_pipeline.view_layout),
1420
&BindGroupEntries::single(view_binding),
1421
));
1422
1423
// Buffer indexes
1424
let mut vertices_index = 0;
1425
let mut indices_index = 0;
1426
1427
for ui_phase in phases.values_mut() {
1428
let mut batch_item_index = 0;
1429
let mut batch_image_handle = AssetId::invalid();
1430
1431
for item_index in 0..ui_phase.items.len() {
1432
let item = &mut ui_phase.items[item_index];
1433
let Some(extracted_uinode) = extracted_uinodes
1434
.uinodes
1435
.get(item.index)
1436
.filter(|n| item.entity() == n.render_entity)
1437
else {
1438
batch_image_handle = AssetId::invalid();
1439
continue;
1440
};
1441
1442
let mut existing_batch = batches.last_mut();
1443
1444
if batch_image_handle == AssetId::invalid()
1445
|| existing_batch.is_none()
1446
|| (batch_image_handle != AssetId::default()
1447
&& extracted_uinode.image != AssetId::default()
1448
&& batch_image_handle != extracted_uinode.image)
1449
{
1450
if let Some(gpu_image) = gpu_images.get(extracted_uinode.image) {
1451
batch_item_index = item_index;
1452
batch_image_handle = extracted_uinode.image;
1453
1454
let new_batch = UiBatch {
1455
range: vertices_index..vertices_index,
1456
image: extracted_uinode.image,
1457
};
1458
1459
batches.push((item.entity(), new_batch));
1460
1461
image_bind_groups
1462
.values
1463
.entry(batch_image_handle)
1464
.or_insert_with(|| {
1465
render_device.create_bind_group(
1466
"ui_material_bind_group",
1467
&pipeline_cache
1468
.get_bind_group_layout(&ui_pipeline.image_layout),
1469
&BindGroupEntries::sequential((
1470
&gpu_image.texture_view,
1471
&gpu_image.sampler,
1472
)),
1473
)
1474
});
1475
1476
existing_batch = batches.last_mut();
1477
} else {
1478
continue;
1479
}
1480
} else if batch_image_handle == AssetId::default()
1481
&& extracted_uinode.image != AssetId::default()
1482
{
1483
if let Some(ref mut existing_batch) = existing_batch
1484
&& let Some(gpu_image) = gpu_images.get(extracted_uinode.image)
1485
{
1486
batch_image_handle = extracted_uinode.image;
1487
existing_batch.1.image = extracted_uinode.image;
1488
1489
image_bind_groups
1490
.values
1491
.entry(batch_image_handle)
1492
.or_insert_with(|| {
1493
render_device.create_bind_group(
1494
"ui_material_bind_group",
1495
&pipeline_cache
1496
.get_bind_group_layout(&ui_pipeline.image_layout),
1497
&BindGroupEntries::sequential((
1498
&gpu_image.texture_view,
1499
&gpu_image.sampler,
1500
)),
1501
)
1502
});
1503
} else {
1504
continue;
1505
}
1506
}
1507
match &extracted_uinode.item {
1508
ExtractedUiItem::Node {
1509
atlas_scaling,
1510
flip_x,
1511
flip_y,
1512
border_radius,
1513
border,
1514
node_type,
1515
rect,
1516
color,
1517
} => {
1518
let mut flags = if extracted_uinode.image != AssetId::default() {
1519
shader_flags::TEXTURED
1520
} else {
1521
shader_flags::UNTEXTURED
1522
};
1523
1524
let mut uinode_rect = *rect;
1525
1526
let rect_size = uinode_rect.size();
1527
1528
let transform = extracted_uinode.transform;
1529
1530
// Specify the corners of the node
1531
let positions = QUAD_VERTEX_POSITIONS
1532
.map(|pos| transform.transform_point2(pos * rect_size).extend(0.));
1533
let points = QUAD_VERTEX_POSITIONS.map(|pos| pos * rect_size);
1534
1535
// Calculate the effect of clipping
1536
// Note: this won't work with rotation/scaling, but that's much more complex (may need more that 2 quads)
1537
let mut positions_diff = if let Some(clip) = extracted_uinode.clip {
1538
[
1539
Vec2::new(
1540
f32::max(clip.min.x - positions[0].x, 0.),
1541
f32::max(clip.min.y - positions[0].y, 0.),
1542
),
1543
Vec2::new(
1544
f32::min(clip.max.x - positions[1].x, 0.),
1545
f32::max(clip.min.y - positions[1].y, 0.),
1546
),
1547
Vec2::new(
1548
f32::min(clip.max.x - positions[2].x, 0.),
1549
f32::min(clip.max.y - positions[2].y, 0.),
1550
),
1551
Vec2::new(
1552
f32::max(clip.min.x - positions[3].x, 0.),
1553
f32::min(clip.max.y - positions[3].y, 0.),
1554
),
1555
]
1556
} else {
1557
[Vec2::ZERO; 4]
1558
};
1559
1560
let positions_clipped = [
1561
positions[0] + positions_diff[0].extend(0.),
1562
positions[1] + positions_diff[1].extend(0.),
1563
positions[2] + positions_diff[2].extend(0.),
1564
positions[3] + positions_diff[3].extend(0.),
1565
];
1566
1567
let points = [
1568
points[0] + positions_diff[0],
1569
points[1] + positions_diff[1],
1570
points[2] + positions_diff[2],
1571
points[3] + positions_diff[3],
1572
];
1573
1574
let transformed_rect_size = transform.transform_vector2(rect_size);
1575
1576
// Don't try to cull nodes that have a rotation
1577
// In a rotation around the Z-axis, this value is 0.0 for an angle of 0.0 or π
1578
// In those two cases, the culling check can proceed normally as corners will be on
1579
// horizontal / vertical lines
1580
// For all other angles, bypass the culling check
1581
// This does not properly handles all rotations on all axis
1582
if transform.x_axis[1] == 0.0 {
1583
// Cull nodes that are completely clipped
1584
if positions_diff[0].x - positions_diff[1].x >= transformed_rect_size.x
1585
|| positions_diff[1].y - positions_diff[2].y
1586
>= transformed_rect_size.y
1587
{
1588
continue;
1589
}
1590
}
1591
let uvs = if flags == shader_flags::UNTEXTURED {
1592
[Vec2::ZERO, Vec2::X, Vec2::ONE, Vec2::Y]
1593
} else {
1594
let image = gpu_images
1595
.get(extracted_uinode.image)
1596
.expect("Image was checked during batching and should still exist");
1597
// Rescale atlases. This is done here because we need texture data that might not be available in Extract.
1598
let atlas_extent = atlas_scaling
1599
.map(|scaling| image.size_2d().as_vec2() * scaling)
1600
.unwrap_or(uinode_rect.max);
1601
if *flip_x {
1602
core::mem::swap(&mut uinode_rect.max.x, &mut uinode_rect.min.x);
1603
positions_diff[0].x *= -1.;
1604
positions_diff[1].x *= -1.;
1605
positions_diff[2].x *= -1.;
1606
positions_diff[3].x *= -1.;
1607
}
1608
if *flip_y {
1609
core::mem::swap(&mut uinode_rect.max.y, &mut uinode_rect.min.y);
1610
positions_diff[0].y *= -1.;
1611
positions_diff[1].y *= -1.;
1612
positions_diff[2].y *= -1.;
1613
positions_diff[3].y *= -1.;
1614
}
1615
[
1616
Vec2::new(
1617
uinode_rect.min.x + positions_diff[0].x,
1618
uinode_rect.min.y + positions_diff[0].y,
1619
),
1620
Vec2::new(
1621
uinode_rect.max.x + positions_diff[1].x,
1622
uinode_rect.min.y + positions_diff[1].y,
1623
),
1624
Vec2::new(
1625
uinode_rect.max.x + positions_diff[2].x,
1626
uinode_rect.max.y + positions_diff[2].y,
1627
),
1628
Vec2::new(
1629
uinode_rect.min.x + positions_diff[3].x,
1630
uinode_rect.max.y + positions_diff[3].y,
1631
),
1632
]
1633
.map(|pos| pos / atlas_extent)
1634
};
1635
1636
let color = color.to_f32_array();
1637
if let NodeType::Border(border_flags) = *node_type {
1638
flags |= border_flags;
1639
}
1640
1641
for i in 0..4 {
1642
ui_meta.vertices.push(UiVertex {
1643
position: positions_clipped[i].into(),
1644
uv: uvs[i].into(),
1645
color,
1646
flags: flags | shader_flags::CORNERS[i],
1647
radius: (*border_radius).into(),
1648
border: [
1649
border.min_inset.x,
1650
border.min_inset.y,
1651
border.max_inset.x,
1652
border.max_inset.y,
1653
],
1654
size: rect_size.into(),
1655
point: points[i].into(),
1656
});
1657
}
1658
1659
for &i in &QUAD_INDICES {
1660
ui_meta.indices.push(indices_index + i as u32);
1661
}
1662
1663
vertices_index += 6;
1664
indices_index += 4;
1665
}
1666
ExtractedUiItem::Glyphs { range } => {
1667
let image = gpu_images
1668
.get(extracted_uinode.image)
1669
.expect("Image was checked during batching and should still exist");
1670
1671
let atlas_extent = image.size_2d().as_vec2();
1672
1673
for glyph in &extracted_uinodes.glyphs[range.clone()] {
1674
let color = glyph.color.to_f32_array();
1675
let glyph_rect = glyph.rect;
1676
let rect_size = glyph_rect.size();
1677
1678
// Specify the corners of the glyph
1679
let positions = QUAD_VERTEX_POSITIONS.map(|pos| {
1680
extracted_uinode
1681
.transform
1682
.transform_point2(glyph.translation + pos * glyph_rect.size())
1683
.extend(0.)
1684
});
1685
1686
let positions_diff = if let Some(clip) = extracted_uinode.clip {
1687
[
1688
Vec2::new(
1689
f32::max(clip.min.x - positions[0].x, 0.),
1690
f32::max(clip.min.y - positions[0].y, 0.),
1691
),
1692
Vec2::new(
1693
f32::min(clip.max.x - positions[1].x, 0.),
1694
f32::max(clip.min.y - positions[1].y, 0.),
1695
),
1696
Vec2::new(
1697
f32::min(clip.max.x - positions[2].x, 0.),
1698
f32::min(clip.max.y - positions[2].y, 0.),
1699
),
1700
Vec2::new(
1701
f32::max(clip.min.x - positions[3].x, 0.),
1702
f32::min(clip.max.y - positions[3].y, 0.),
1703
),
1704
]
1705
} else {
1706
[Vec2::ZERO; 4]
1707
};
1708
1709
let positions_clipped = [
1710
positions[0] + positions_diff[0].extend(0.),
1711
positions[1] + positions_diff[1].extend(0.),
1712
positions[2] + positions_diff[2].extend(0.),
1713
positions[3] + positions_diff[3].extend(0.),
1714
];
1715
1716
// cull nodes that are completely clipped
1717
let transformed_rect_size =
1718
extracted_uinode.transform.transform_vector2(rect_size);
1719
if positions_diff[0].x - positions_diff[1].x
1720
>= transformed_rect_size.x.abs()
1721
|| positions_diff[1].y - positions_diff[2].y
1722
>= transformed_rect_size.y.abs()
1723
{
1724
continue;
1725
}
1726
1727
let uvs = [
1728
Vec2::new(
1729
glyph.rect.min.x + positions_diff[0].x,
1730
glyph.rect.min.y + positions_diff[0].y,
1731
),
1732
Vec2::new(
1733
glyph.rect.max.x + positions_diff[1].x,
1734
glyph.rect.min.y + positions_diff[1].y,
1735
),
1736
Vec2::new(
1737
glyph.rect.max.x + positions_diff[2].x,
1738
glyph.rect.max.y + positions_diff[2].y,
1739
),
1740
Vec2::new(
1741
glyph.rect.min.x + positions_diff[3].x,
1742
glyph.rect.max.y + positions_diff[3].y,
1743
),
1744
]
1745
.map(|pos| pos / atlas_extent);
1746
1747
for i in 0..4 {
1748
ui_meta.vertices.push(UiVertex {
1749
position: positions_clipped[i].into(),
1750
uv: uvs[i].into(),
1751
color,
1752
flags: shader_flags::TEXTURED | shader_flags::CORNERS[i],
1753
radius: [0.0; 4],
1754
border: [0.0; 4],
1755
size: rect_size.into(),
1756
point: [0.0; 2],
1757
});
1758
}
1759
1760
for &i in &QUAD_INDICES {
1761
ui_meta.indices.push(indices_index + i as u32);
1762
}
1763
1764
vertices_index += 6;
1765
indices_index += 4;
1766
}
1767
}
1768
}
1769
existing_batch.unwrap().1.range.end = vertices_index;
1770
ui_phase.items[batch_item_index].batch_range_mut().end += 1;
1771
}
1772
}
1773
1774
ui_meta.vertices.write_buffer(&render_device, &render_queue);
1775
ui_meta.indices.write_buffer(&render_device, &render_queue);
1776
*previous_len = batches.len();
1777
commands.try_insert_batch(batches);
1778
}
1779
extracted_uinodes.clear();
1780
}
1781
1782