Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_ui_render/src/lib.rs
6596 views
1
#![expect(missing_docs, reason = "Not all docs are written yet, see #3492.")]
2
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
3
#![doc(
4
html_logo_url = "https://bevyengine.org/assets/icon.png",
5
html_favicon_url = "https://bevyengine.org/assets/icon.png"
6
)]
7
8
//! Provides rendering functionality for `bevy_ui`.
9
10
pub mod box_shadow;
11
mod gradient;
12
mod pipeline;
13
mod render_pass;
14
pub mod ui_material;
15
mod ui_material_pipeline;
16
pub mod ui_texture_slice_pipeline;
17
18
#[cfg(feature = "bevy_ui_debug")]
19
mod debug_overlay;
20
21
use bevy_camera::visibility::InheritedVisibility;
22
use bevy_camera::{Camera, Camera2d, Camera3d};
23
use bevy_reflect::prelude::ReflectDefault;
24
use bevy_reflect::Reflect;
25
use bevy_shader::load_shader_library;
26
use bevy_sprite_render::SpriteAssetEvents;
27
use bevy_ui::widget::{ImageNode, TextShadow, ViewportNode};
28
use bevy_ui::{
29
BackgroundColor, BorderColor, CalculatedClip, ComputedNode, ComputedUiTargetCamera, Display,
30
Node, Outline, ResolvedBorderRadius, UiGlobalTransform,
31
};
32
33
use bevy_app::prelude::*;
34
use bevy_asset::{AssetEvent, AssetId, Assets};
35
use bevy_color::{Alpha, ColorToComponents, LinearRgba};
36
use bevy_core_pipeline::core_2d::graph::{Core2d, Node2d};
37
use bevy_core_pipeline::core_3d::graph::{Core3d, Node3d};
38
use bevy_ecs::prelude::*;
39
use bevy_ecs::system::SystemParam;
40
use bevy_image::{prelude::*, TRANSPARENT_IMAGE_HANDLE};
41
use bevy_math::{Affine2, FloatOrd, Mat4, Rect, UVec4, Vec2};
42
use bevy_render::{
43
render_asset::RenderAssets,
44
render_graph::{Node as RenderGraphNode, NodeRunError, RenderGraph, RenderGraphContext},
45
render_phase::{
46
sort_phase_system, AddRenderCommand, DrawFunctions, PhaseItem, PhaseItemExtraIndex,
47
ViewSortedRenderPhases,
48
},
49
render_resource::*,
50
renderer::{RenderContext, RenderDevice, RenderQueue},
51
sync_world::{MainEntity, RenderEntity, TemporaryRenderEntity},
52
texture::GpuImage,
53
view::{ExtractedView, Hdr, RetainedViewEntity, ViewUniforms},
54
Extract, ExtractSchedule, Render, RenderApp, RenderStartup, RenderSystems,
55
};
56
use bevy_sprite::BorderRect;
57
#[cfg(feature = "bevy_ui_debug")]
58
pub use debug_overlay::UiDebugOptions;
59
use gradient::GradientPlugin;
60
61
use bevy_platform::collections::{HashMap, HashSet};
62
use bevy_text::{
63
ComputedTextBlock, PositionedGlyph, TextBackgroundColor, TextColor, TextLayoutInfo,
64
};
65
use bevy_transform::components::GlobalTransform;
66
use box_shadow::BoxShadowPlugin;
67
use bytemuck::{Pod, Zeroable};
68
use core::ops::Range;
69
70
use graph::{NodeUi, SubGraphUi};
71
pub use pipeline::*;
72
pub use render_pass::*;
73
pub use ui_material_pipeline::*;
74
use ui_texture_slice_pipeline::UiTextureSlicerPlugin;
75
76
pub mod graph {
77
use bevy_render::render_graph::{RenderLabel, RenderSubGraph};
78
79
#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderSubGraph)]
80
pub struct SubGraphUi;
81
82
#[derive(Debug, Hash, PartialEq, Eq, Clone, RenderLabel)]
83
pub enum NodeUi {
84
UiPass,
85
}
86
}
87
88
pub mod prelude {
89
#[cfg(feature = "bevy_ui_debug")]
90
pub use crate::debug_overlay::UiDebugOptions;
91
92
pub use crate::{
93
ui_material::*, ui_material_pipeline::UiMaterialPlugin, BoxShadowSamples, UiAntiAlias,
94
};
95
}
96
97
/// Local Z offsets of "extracted nodes" for a given entity. These exist to allow rendering multiple "extracted nodes"
98
/// for a given source entity (ex: render both a background color _and_ a custom material for a given node).
99
///
100
/// When possible these offsets should be defined in _this_ module to ensure z-index coordination across contexts.
101
/// When this is _not_ possible, pick a suitably unique index unlikely to clash with other things (ex: `0.1826823` not `0.1`).
102
///
103
/// Offsets should be unique for a given node entity to avoid z fighting.
104
/// These should pretty much _always_ be larger than -0.5 and smaller than 0.5 to avoid clipping into nodes
105
/// above / below the current node in the stack.
106
///
107
/// A z-index of 0.0 is the baseline, which is used as the primary "background color" of the node.
108
///
109
/// Note that nodes "stack" on each other, so a negative offset on the node above could clip _into_
110
/// a positive offset on a node below.
111
pub mod stack_z_offsets {
112
pub const BOX_SHADOW: f32 = -0.1;
113
pub const BACKGROUND_COLOR: f32 = 0.0;
114
pub const BORDER: f32 = 0.01;
115
pub const GRADIENT: f32 = 0.02;
116
pub const BORDER_GRADIENT: f32 = 0.03;
117
pub const IMAGE: f32 = 0.04;
118
pub const MATERIAL: f32 = 0.05;
119
pub const TEXT: f32 = 0.06;
120
}
121
122
#[derive(Debug, Hash, PartialEq, Eq, Clone, SystemSet)]
123
pub enum RenderUiSystems {
124
ExtractCameraViews,
125
ExtractBoxShadows,
126
ExtractBackgrounds,
127
ExtractImages,
128
ExtractTextureSlice,
129
ExtractBorders,
130
ExtractViewportNodes,
131
ExtractTextBackgrounds,
132
ExtractTextShadows,
133
ExtractText,
134
ExtractDebug,
135
ExtractGradient,
136
}
137
138
/// Marker for controlling whether UI is rendered with or without anti-aliasing
139
/// in a camera. By default, UI is always anti-aliased.
140
///
141
/// **Note:** This does not affect text anti-aliasing. For that, use the `font_smoothing` property of the [`TextFont`](bevy_text::TextFont) component.
142
///
143
/// ```
144
/// use bevy_camera::prelude::*;
145
/// use bevy_ecs::prelude::*;
146
/// use bevy_ui::prelude::*;
147
/// use bevy_ui_render::prelude::*;
148
///
149
/// fn spawn_camera(mut commands: Commands) {
150
/// commands.spawn((
151
/// Camera2d,
152
/// // This will cause all UI in this camera to be rendered without
153
/// // anti-aliasing
154
/// UiAntiAlias::Off,
155
/// ));
156
/// }
157
/// ```
158
#[derive(Component, Clone, Copy, Default, Debug, Reflect, Eq, PartialEq)]
159
#[reflect(Component, Default, PartialEq, Clone)]
160
pub enum UiAntiAlias {
161
/// UI will render with anti-aliasing
162
#[default]
163
On,
164
/// UI will render without anti-aliasing
165
Off,
166
}
167
168
/// Number of shadow samples.
169
/// A larger value will result in higher quality shadows.
170
/// Default is 4, values higher than ~10 offer diminishing returns.
171
///
172
/// ```
173
/// use bevy_camera::prelude::*;
174
/// use bevy_ecs::prelude::*;
175
/// use bevy_ui::prelude::*;
176
/// use bevy_ui_render::prelude::*;
177
///
178
/// fn spawn_camera(mut commands: Commands) {
179
/// commands.spawn((
180
/// Camera2d,
181
/// BoxShadowSamples(6),
182
/// ));
183
/// }
184
/// ```
185
#[derive(Component, Clone, Copy, Debug, Reflect, Eq, PartialEq)]
186
#[reflect(Component, Default, PartialEq, Clone)]
187
pub struct BoxShadowSamples(pub u32);
188
189
impl Default for BoxShadowSamples {
190
fn default() -> Self {
191
Self(4)
192
}
193
}
194
195
/// Deprecated alias for [`RenderUiSystems`].
196
#[deprecated(since = "0.17.0", note = "Renamed to `RenderUiSystems`.")]
197
pub type RenderUiSystem = RenderUiSystems;
198
199
#[derive(Default)]
200
pub struct UiRenderPlugin;
201
202
impl Plugin for UiRenderPlugin {
203
fn build(&self, app: &mut App) {
204
load_shader_library!(app, "ui.wgsl");
205
206
#[cfg(feature = "bevy_ui_debug")]
207
app.init_resource::<UiDebugOptions>();
208
209
let Some(render_app) = app.get_sub_app_mut(RenderApp) else {
210
return;
211
};
212
213
render_app
214
.init_resource::<SpecializedRenderPipelines<UiPipeline>>()
215
.init_resource::<ImageNodeBindGroups>()
216
.init_resource::<UiMeta>()
217
.init_resource::<ExtractedUiNodes>()
218
.allow_ambiguous_resource::<ExtractedUiNodes>()
219
.init_resource::<DrawFunctions<TransparentUi>>()
220
.init_resource::<ViewSortedRenderPhases<TransparentUi>>()
221
.add_render_command::<TransparentUi, DrawUi>()
222
.configure_sets(
223
ExtractSchedule,
224
(
225
RenderUiSystems::ExtractCameraViews,
226
RenderUiSystems::ExtractBoxShadows,
227
RenderUiSystems::ExtractBackgrounds,
228
RenderUiSystems::ExtractImages,
229
RenderUiSystems::ExtractTextureSlice,
230
RenderUiSystems::ExtractBorders,
231
RenderUiSystems::ExtractTextBackgrounds,
232
RenderUiSystems::ExtractTextShadows,
233
RenderUiSystems::ExtractText,
234
RenderUiSystems::ExtractDebug,
235
)
236
.chain(),
237
)
238
.add_systems(RenderStartup, init_ui_pipeline)
239
.add_systems(
240
ExtractSchedule,
241
(
242
extract_ui_camera_view.in_set(RenderUiSystems::ExtractCameraViews),
243
extract_uinode_background_colors.in_set(RenderUiSystems::ExtractBackgrounds),
244
extract_uinode_images.in_set(RenderUiSystems::ExtractImages),
245
extract_uinode_borders.in_set(RenderUiSystems::ExtractBorders),
246
extract_viewport_nodes.in_set(RenderUiSystems::ExtractViewportNodes),
247
extract_text_background_colors.in_set(RenderUiSystems::ExtractTextBackgrounds),
248
extract_text_shadows.in_set(RenderUiSystems::ExtractTextShadows),
249
extract_text_sections.in_set(RenderUiSystems::ExtractText),
250
#[cfg(feature = "bevy_ui_debug")]
251
debug_overlay::extract_debug_overlay.in_set(RenderUiSystems::ExtractDebug),
252
),
253
)
254
.add_systems(
255
Render,
256
(
257
queue_uinodes.in_set(RenderSystems::Queue),
258
sort_phase_system::<TransparentUi>.in_set(RenderSystems::PhaseSort),
259
prepare_uinodes.in_set(RenderSystems::PrepareBindGroups),
260
),
261
);
262
263
// Render graph
264
render_app
265
.world_mut()
266
.resource_scope(|world, mut graph: Mut<RenderGraph>| {
267
if let Some(graph_2d) = graph.get_sub_graph_mut(Core2d) {
268
let ui_graph_2d = new_ui_graph(world);
269
graph_2d.add_sub_graph(SubGraphUi, ui_graph_2d);
270
graph_2d.add_node(NodeUi::UiPass, RunUiSubgraphOnUiViewNode);
271
graph_2d.add_node_edge(Node2d::EndMainPass, NodeUi::UiPass);
272
graph_2d.add_node_edge(Node2d::EndMainPassPostProcessing, NodeUi::UiPass);
273
graph_2d.add_node_edge(NodeUi::UiPass, Node2d::Upscaling);
274
}
275
276
if let Some(graph_3d) = graph.get_sub_graph_mut(Core3d) {
277
let ui_graph_3d = new_ui_graph(world);
278
graph_3d.add_sub_graph(SubGraphUi, ui_graph_3d);
279
graph_3d.add_node(NodeUi::UiPass, RunUiSubgraphOnUiViewNode);
280
graph_3d.add_node_edge(Node3d::EndMainPass, NodeUi::UiPass);
281
graph_3d.add_node_edge(Node3d::EndMainPassPostProcessing, NodeUi::UiPass);
282
graph_3d.add_node_edge(NodeUi::UiPass, Node3d::Upscaling);
283
}
284
});
285
286
app.add_plugins(UiTextureSlicerPlugin);
287
app.add_plugins(GradientPlugin);
288
app.add_plugins(BoxShadowPlugin);
289
}
290
}
291
292
fn new_ui_graph(world: &mut World) -> RenderGraph {
293
let ui_pass_node = UiPassNode::new(world);
294
let mut ui_graph = RenderGraph::default();
295
ui_graph.add_node(NodeUi::UiPass, ui_pass_node);
296
ui_graph
297
}
298
299
#[derive(SystemParam)]
300
pub struct UiCameraMap<'w, 's> {
301
mapping: Query<'w, 's, RenderEntity>,
302
}
303
304
impl<'w, 's> UiCameraMap<'w, 's> {
305
/// Creates a [`UiCameraMapper`] for performing repeated camera-to-render-entity lookups.
306
///
307
/// The last successful mapping is cached to avoid redundant queries.
308
pub fn get_mapper(&'w self) -> UiCameraMapper<'w, 's> {
309
UiCameraMapper {
310
mapping: &self.mapping,
311
camera_entity: Entity::PLACEHOLDER,
312
render_entity: Entity::PLACEHOLDER,
313
}
314
}
315
}
316
317
/// Helper for mapping UI target camera entities to their corresponding render entities,
318
/// with caching to avoid repeated lookups for the same camera.
319
pub struct UiCameraMapper<'w, 's> {
320
mapping: &'w Query<'w, 's, RenderEntity>,
321
/// Cached camera entity from the last successful `map` call.
322
camera_entity: Entity,
323
/// Cached camera entity from the last successful `map` call.
324
render_entity: Entity,
325
}
326
327
impl<'w, 's> UiCameraMapper<'w, 's> {
328
/// Returns the render entity corresponding to the given [`ComputedUiTargetCamera`]'s camera, or none if no corresponding entity was found.
329
pub fn map(&mut self, computed_target: &ComputedUiTargetCamera) -> Option<Entity> {
330
let camera_entity = computed_target.get()?;
331
if self.camera_entity != camera_entity {
332
let new_render_camera_entity = self.mapping.get(camera_entity).ok()?;
333
self.render_entity = new_render_camera_entity;
334
self.camera_entity = camera_entity;
335
}
336
337
Some(self.render_entity)
338
}
339
340
/// Returns the cached camera entity from the last successful `map` call.
341
pub fn current_camera(&self) -> Entity {
342
self.camera_entity
343
}
344
}
345
346
pub struct ExtractedUiNode {
347
pub z_order: f32,
348
pub image: AssetId<Image>,
349
pub clip: Option<Rect>,
350
/// Render world entity of the extracted camera corresponding to this node's target camera.
351
pub extracted_camera_entity: Entity,
352
pub item: ExtractedUiItem,
353
pub main_entity: MainEntity,
354
pub render_entity: Entity,
355
pub transform: Affine2,
356
}
357
358
/// The type of UI node.
359
/// This is used to determine how to render the UI node.
360
#[derive(Clone, Copy, Debug, PartialEq)]
361
pub enum NodeType {
362
Rect,
363
Border(u32), // shader flags
364
}
365
366
pub enum ExtractedUiItem {
367
Node {
368
color: LinearRgba,
369
rect: Rect,
370
atlas_scaling: Option<Vec2>,
371
flip_x: bool,
372
flip_y: bool,
373
/// Border radius of the UI node.
374
/// Ordering: top left, top right, bottom right, bottom left.
375
border_radius: ResolvedBorderRadius,
376
/// Border thickness of the UI node.
377
/// Ordering: left, top, right, bottom.
378
border: BorderRect,
379
node_type: NodeType,
380
},
381
/// A contiguous sequence of text glyphs from the same section
382
Glyphs {
383
/// Indices into [`ExtractedUiNodes::glyphs`]
384
range: Range<usize>,
385
},
386
}
387
388
pub struct ExtractedGlyph {
389
pub color: LinearRgba,
390
pub translation: Vec2,
391
pub rect: Rect,
392
}
393
394
#[derive(Resource, Default)]
395
pub struct ExtractedUiNodes {
396
pub uinodes: Vec<ExtractedUiNode>,
397
pub glyphs: Vec<ExtractedGlyph>,
398
}
399
400
impl ExtractedUiNodes {
401
pub fn clear(&mut self) {
402
self.uinodes.clear();
403
self.glyphs.clear();
404
}
405
}
406
407
/// A [`RenderGraphNode`] that executes the UI rendering subgraph on the UI
408
/// view.
409
struct RunUiSubgraphOnUiViewNode;
410
411
impl RenderGraphNode for RunUiSubgraphOnUiViewNode {
412
fn run<'w>(
413
&self,
414
graph: &mut RenderGraphContext,
415
_: &mut RenderContext<'w>,
416
world: &'w World,
417
) -> Result<(), NodeRunError> {
418
// Fetch the UI view.
419
let Some(mut render_views) = world.try_query::<&UiCameraView>() else {
420
return Ok(());
421
};
422
let Ok(ui_camera_view) = render_views.get(world, graph.view_entity()) else {
423
return Ok(());
424
};
425
426
// Run the subgraph on the UI view.
427
graph.run_sub_graph(SubGraphUi, vec![], Some(ui_camera_view.0))?;
428
Ok(())
429
}
430
}
431
432
pub fn extract_uinode_background_colors(
433
mut commands: Commands,
434
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
435
uinode_query: Extract<
436
Query<(
437
Entity,
438
&ComputedNode,
439
&UiGlobalTransform,
440
&InheritedVisibility,
441
Option<&CalculatedClip>,
442
&ComputedUiTargetCamera,
443
&BackgroundColor,
444
)>,
445
>,
446
camera_map: Extract<UiCameraMap>,
447
) {
448
let mut camera_mapper = camera_map.get_mapper();
449
450
for (entity, uinode, transform, inherited_visibility, clip, camera, background_color) in
451
&uinode_query
452
{
453
// Skip invisible backgrounds
454
if !inherited_visibility.get()
455
|| background_color.0.is_fully_transparent()
456
|| uinode.is_empty()
457
{
458
continue;
459
}
460
461
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
462
continue;
463
};
464
465
extracted_uinodes.uinodes.push(ExtractedUiNode {
466
render_entity: commands.spawn(TemporaryRenderEntity).id(),
467
z_order: uinode.stack_index as f32 + stack_z_offsets::BACKGROUND_COLOR,
468
clip: clip.map(|clip| clip.clip),
469
image: AssetId::default(),
470
extracted_camera_entity,
471
transform: transform.into(),
472
item: ExtractedUiItem::Node {
473
color: background_color.0.into(),
474
rect: Rect {
475
min: Vec2::ZERO,
476
max: uinode.size,
477
},
478
atlas_scaling: None,
479
flip_x: false,
480
flip_y: false,
481
border: uinode.border(),
482
border_radius: uinode.border_radius(),
483
node_type: NodeType::Rect,
484
},
485
main_entity: entity.into(),
486
});
487
}
488
}
489
490
pub fn extract_uinode_images(
491
mut commands: Commands,
492
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
493
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
494
uinode_query: Extract<
495
Query<(
496
Entity,
497
&ComputedNode,
498
&UiGlobalTransform,
499
&InheritedVisibility,
500
Option<&CalculatedClip>,
501
&ComputedUiTargetCamera,
502
&ImageNode,
503
)>,
504
>,
505
camera_map: Extract<UiCameraMap>,
506
) {
507
let mut camera_mapper = camera_map.get_mapper();
508
for (entity, uinode, transform, inherited_visibility, clip, camera, image) in &uinode_query {
509
// Skip invisible images
510
if !inherited_visibility.get()
511
|| image.color.is_fully_transparent()
512
|| image.image.id() == TRANSPARENT_IMAGE_HANDLE.id()
513
|| image.image_mode.uses_slices()
514
|| uinode.is_empty()
515
{
516
continue;
517
}
518
519
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
520
continue;
521
};
522
523
let atlas_rect = image
524
.texture_atlas
525
.as_ref()
526
.and_then(|s| s.texture_rect(&texture_atlases))
527
.map(|r| r.as_rect());
528
529
let mut rect = match (atlas_rect, image.rect) {
530
(None, None) => Rect {
531
min: Vec2::ZERO,
532
max: uinode.size,
533
},
534
(None, Some(image_rect)) => image_rect,
535
(Some(atlas_rect), None) => atlas_rect,
536
(Some(atlas_rect), Some(mut image_rect)) => {
537
image_rect.min += atlas_rect.min;
538
image_rect.max += atlas_rect.min;
539
image_rect
540
}
541
};
542
543
let atlas_scaling = if atlas_rect.is_some() || image.rect.is_some() {
544
let atlas_scaling = uinode.size() / rect.size();
545
rect.min *= atlas_scaling;
546
rect.max *= atlas_scaling;
547
Some(atlas_scaling)
548
} else {
549
None
550
};
551
552
extracted_uinodes.uinodes.push(ExtractedUiNode {
553
z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
554
render_entity: commands.spawn(TemporaryRenderEntity).id(),
555
clip: clip.map(|clip| clip.clip),
556
image: image.image.id(),
557
extracted_camera_entity,
558
transform: transform.into(),
559
item: ExtractedUiItem::Node {
560
color: image.color.into(),
561
rect,
562
atlas_scaling,
563
flip_x: image.flip_x,
564
flip_y: image.flip_y,
565
border: uinode.border,
566
border_radius: uinode.border_radius,
567
node_type: NodeType::Rect,
568
},
569
main_entity: entity.into(),
570
});
571
}
572
}
573
574
pub fn extract_uinode_borders(
575
mut commands: Commands,
576
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
577
uinode_query: Extract<
578
Query<(
579
Entity,
580
&Node,
581
&ComputedNode,
582
&UiGlobalTransform,
583
&InheritedVisibility,
584
Option<&CalculatedClip>,
585
&ComputedUiTargetCamera,
586
AnyOf<(&BorderColor, &Outline)>,
587
)>,
588
>,
589
camera_map: Extract<UiCameraMap>,
590
) {
591
let image = AssetId::<Image>::default();
592
let mut camera_mapper = camera_map.get_mapper();
593
594
for (
595
entity,
596
node,
597
computed_node,
598
transform,
599
inherited_visibility,
600
maybe_clip,
601
camera,
602
(maybe_border_color, maybe_outline),
603
) in &uinode_query
604
{
605
// Skip invisible borders and removed nodes
606
if !inherited_visibility.get() || node.display == Display::None {
607
continue;
608
}
609
610
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
611
continue;
612
};
613
614
// Don't extract borders with zero width along all edges
615
if computed_node.border() != BorderRect::ZERO
616
&& let Some(border_color) = maybe_border_color
617
{
618
let border_colors = [
619
border_color.left.to_linear(),
620
border_color.top.to_linear(),
621
border_color.right.to_linear(),
622
border_color.bottom.to_linear(),
623
];
624
625
const BORDER_FLAGS: [u32; 4] = [
626
shader_flags::BORDER_LEFT,
627
shader_flags::BORDER_TOP,
628
shader_flags::BORDER_RIGHT,
629
shader_flags::BORDER_BOTTOM,
630
];
631
let mut completed_flags = 0;
632
633
for (i, &color) in border_colors.iter().enumerate() {
634
if color.is_fully_transparent() {
635
continue;
636
}
637
638
let mut border_flags = BORDER_FLAGS[i];
639
640
if completed_flags & border_flags != 0 {
641
continue;
642
}
643
644
for j in i + 1..4 {
645
if color == border_colors[j] {
646
border_flags |= BORDER_FLAGS[j];
647
}
648
}
649
completed_flags |= border_flags;
650
651
extracted_uinodes.uinodes.push(ExtractedUiNode {
652
z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
653
image,
654
clip: maybe_clip.map(|clip| clip.clip),
655
extracted_camera_entity,
656
transform: transform.into(),
657
item: ExtractedUiItem::Node {
658
color,
659
rect: Rect {
660
max: computed_node.size(),
661
..Default::default()
662
},
663
atlas_scaling: None,
664
flip_x: false,
665
flip_y: false,
666
border: computed_node.border(),
667
border_radius: computed_node.border_radius(),
668
node_type: NodeType::Border(border_flags),
669
},
670
main_entity: entity.into(),
671
render_entity: commands.spawn(TemporaryRenderEntity).id(),
672
});
673
}
674
}
675
676
if computed_node.outline_width() <= 0. {
677
continue;
678
}
679
680
if let Some(outline) = maybe_outline.filter(|outline| !outline.color.is_fully_transparent())
681
{
682
let outline_size = computed_node.outlined_node_size();
683
extracted_uinodes.uinodes.push(ExtractedUiNode {
684
z_order: computed_node.stack_index as f32 + stack_z_offsets::BORDER,
685
render_entity: commands.spawn(TemporaryRenderEntity).id(),
686
image,
687
clip: maybe_clip.map(|clip| clip.clip),
688
extracted_camera_entity,
689
transform: transform.into(),
690
item: ExtractedUiItem::Node {
691
color: outline.color.into(),
692
rect: Rect {
693
max: outline_size,
694
..Default::default()
695
},
696
atlas_scaling: None,
697
flip_x: false,
698
flip_y: false,
699
border: BorderRect::all(computed_node.outline_width()),
700
border_radius: computed_node.outline_radius(),
701
node_type: NodeType::Border(shader_flags::BORDER_ALL),
702
},
703
main_entity: entity.into(),
704
});
705
}
706
}
707
}
708
709
/// The UI camera is "moved back" by this many units (plus the [`UI_CAMERA_TRANSFORM_OFFSET`]) and also has a view
710
/// distance of this many units. This ensures that with a left-handed projection,
711
/// as ui elements are "stacked on top of each other", they are within the camera's view
712
/// and have room to grow.
713
// TODO: Consider computing this value at runtime based on the maximum z-value.
714
const UI_CAMERA_FAR: f32 = 1000.0;
715
716
// This value is subtracted from the far distance for the camera's z-position to ensure nodes at z == 0.0 are rendered
717
// TODO: Evaluate if we still need this.
718
const UI_CAMERA_TRANSFORM_OFFSET: f32 = -0.1;
719
720
/// The ID of the subview associated with a camera on which UI is to be drawn.
721
///
722
/// When UI is present, cameras extract to two views: the main 2D/3D one and a
723
/// UI one. The main 2D or 3D camera gets subview 0, and the corresponding UI
724
/// camera gets this subview, 1.
725
const UI_CAMERA_SUBVIEW: u32 = 1;
726
727
/// A render-world component that lives on the main render target view and
728
/// specifies the corresponding UI view.
729
///
730
/// For example, if UI is being rendered to a 3D camera, this component lives on
731
/// the 3D camera and contains the entity corresponding to the UI view.
732
#[derive(Component)]
733
/// Entity id of the temporary render entity with the corresponding extracted UI view.
734
pub struct UiCameraView(pub Entity);
735
736
/// A render-world component that lives on the UI view and specifies the
737
/// corresponding main render target view.
738
///
739
/// For example, if the UI is being rendered to a 3D camera, this component
740
/// lives on the UI view and contains the entity corresponding to the 3D camera.
741
///
742
/// This is the inverse of [`UiCameraView`].
743
#[derive(Component)]
744
pub struct UiViewTarget(pub Entity);
745
746
/// Extracts all UI elements associated with a camera into the render world.
747
pub fn extract_ui_camera_view(
748
mut commands: Commands,
749
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
750
query: Extract<
751
Query<
752
(
753
Entity,
754
RenderEntity,
755
&Camera,
756
Has<Hdr>,
757
Option<&UiAntiAlias>,
758
Option<&BoxShadowSamples>,
759
),
760
Or<(With<Camera2d>, With<Camera3d>)>,
761
>,
762
>,
763
mut live_entities: Local<HashSet<RetainedViewEntity>>,
764
) {
765
live_entities.clear();
766
767
for (main_entity, render_entity, camera, hdr, ui_anti_alias, shadow_samples) in &query {
768
// ignore inactive cameras
769
if !camera.is_active {
770
commands
771
.get_entity(render_entity)
772
.expect("Camera entity wasn't synced.")
773
.remove::<(UiCameraView, UiAntiAlias, BoxShadowSamples)>();
774
continue;
775
}
776
777
if let Some(physical_viewport_rect) = camera.physical_viewport_rect() {
778
// use a projection matrix with the origin in the top left instead of the bottom left that comes with OrthographicProjection
779
let projection_matrix = Mat4::orthographic_rh(
780
0.0,
781
physical_viewport_rect.width() as f32,
782
physical_viewport_rect.height() as f32,
783
0.0,
784
0.0,
785
UI_CAMERA_FAR,
786
);
787
// We use `UI_CAMERA_SUBVIEW` here so as not to conflict with the
788
// main 3D or 2D camera, which will have subview index 0.
789
let retained_view_entity =
790
RetainedViewEntity::new(main_entity.into(), None, UI_CAMERA_SUBVIEW);
791
// Creates the UI view.
792
let ui_camera_view = commands
793
.spawn((
794
ExtractedView {
795
retained_view_entity,
796
clip_from_view: projection_matrix,
797
world_from_view: GlobalTransform::from_xyz(
798
0.0,
799
0.0,
800
UI_CAMERA_FAR + UI_CAMERA_TRANSFORM_OFFSET,
801
),
802
clip_from_world: None,
803
hdr,
804
viewport: UVec4::from((
805
physical_viewport_rect.min,
806
physical_viewport_rect.size(),
807
)),
808
color_grading: Default::default(),
809
},
810
// Link to the main camera view.
811
UiViewTarget(render_entity),
812
TemporaryRenderEntity,
813
))
814
.id();
815
816
let mut entity_commands = commands
817
.get_entity(render_entity)
818
.expect("Camera entity wasn't synced.");
819
// Link from the main 2D/3D camera view to the UI view.
820
entity_commands.insert(UiCameraView(ui_camera_view));
821
if let Some(ui_anti_alias) = ui_anti_alias {
822
entity_commands.insert(*ui_anti_alias);
823
}
824
if let Some(shadow_samples) = shadow_samples {
825
entity_commands.insert(*shadow_samples);
826
}
827
transparent_render_phases.insert_or_clear(retained_view_entity);
828
829
live_entities.insert(retained_view_entity);
830
}
831
}
832
833
transparent_render_phases.retain(|entity, _| live_entities.contains(entity));
834
}
835
836
pub fn extract_viewport_nodes(
837
mut commands: Commands,
838
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
839
camera_query: Extract<Query<&Camera>>,
840
uinode_query: Extract<
841
Query<(
842
Entity,
843
&ComputedNode,
844
&UiGlobalTransform,
845
&InheritedVisibility,
846
Option<&CalculatedClip>,
847
&ComputedUiTargetCamera,
848
&ViewportNode,
849
)>,
850
>,
851
camera_map: Extract<UiCameraMap>,
852
) {
853
let mut camera_mapper = camera_map.get_mapper();
854
for (entity, uinode, transform, inherited_visibility, clip, camera, viewport_node) in
855
&uinode_query
856
{
857
// Skip invisible images
858
if !inherited_visibility.get() || uinode.is_empty() {
859
continue;
860
}
861
862
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
863
continue;
864
};
865
866
let Some(image) = camera_query
867
.get(viewport_node.camera)
868
.ok()
869
.and_then(|camera| camera.target.as_image())
870
else {
871
continue;
872
};
873
874
extracted_uinodes.uinodes.push(ExtractedUiNode {
875
z_order: uinode.stack_index as f32 + stack_z_offsets::IMAGE,
876
render_entity: commands.spawn(TemporaryRenderEntity).id(),
877
clip: clip.map(|clip| clip.clip),
878
image: image.id(),
879
extracted_camera_entity,
880
transform: transform.into(),
881
item: ExtractedUiItem::Node {
882
color: LinearRgba::WHITE,
883
rect: Rect {
884
min: Vec2::ZERO,
885
max: uinode.size,
886
},
887
atlas_scaling: None,
888
flip_x: false,
889
flip_y: false,
890
border: uinode.border(),
891
border_radius: uinode.border_radius(),
892
node_type: NodeType::Rect,
893
},
894
main_entity: entity.into(),
895
});
896
}
897
}
898
899
pub fn extract_text_sections(
900
mut commands: Commands,
901
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
902
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
903
uinode_query: Extract<
904
Query<(
905
Entity,
906
&ComputedNode,
907
&UiGlobalTransform,
908
&InheritedVisibility,
909
Option<&CalculatedClip>,
910
&ComputedUiTargetCamera,
911
&ComputedTextBlock,
912
&TextColor,
913
&TextLayoutInfo,
914
)>,
915
>,
916
text_styles: Extract<Query<&TextColor>>,
917
camera_map: Extract<UiCameraMap>,
918
) {
919
let mut start = extracted_uinodes.glyphs.len();
920
let mut end = start + 1;
921
922
let mut camera_mapper = camera_map.get_mapper();
923
for (
924
entity,
925
uinode,
926
transform,
927
inherited_visibility,
928
clip,
929
camera,
930
computed_block,
931
text_color,
932
text_layout_info,
933
) in &uinode_query
934
{
935
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
936
if !inherited_visibility.get() || uinode.is_empty() {
937
continue;
938
}
939
940
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
941
continue;
942
};
943
944
let transform = Affine2::from(*transform) * Affine2::from_translation(-0.5 * uinode.size());
945
946
let mut color = text_color.0.to_linear();
947
948
let mut current_span_index = 0;
949
950
for (
951
i,
952
PositionedGlyph {
953
position,
954
atlas_info,
955
span_index,
956
..
957
},
958
) in text_layout_info.glyphs.iter().enumerate()
959
{
960
if current_span_index != *span_index
961
&& let Some(span_entity) =
962
computed_block.entities().get(*span_index).map(|t| t.entity)
963
{
964
color = text_styles
965
.get(span_entity)
966
.map(|text_color| LinearRgba::from(text_color.0))
967
.unwrap_or_default();
968
current_span_index = *span_index;
969
}
970
971
let rect = texture_atlases
972
.get(atlas_info.texture_atlas)
973
.unwrap()
974
.textures[atlas_info.location.glyph_index]
975
.as_rect();
976
extracted_uinodes.glyphs.push(ExtractedGlyph {
977
color,
978
translation: *position,
979
rect,
980
});
981
982
if text_layout_info
983
.glyphs
984
.get(i + 1)
985
.is_none_or(|info| info.atlas_info.texture != atlas_info.texture)
986
{
987
extracted_uinodes.uinodes.push(ExtractedUiNode {
988
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
989
render_entity: commands.spawn(TemporaryRenderEntity).id(),
990
image: atlas_info.texture,
991
clip: clip.map(|clip| clip.clip),
992
extracted_camera_entity,
993
item: ExtractedUiItem::Glyphs { range: start..end },
994
main_entity: entity.into(),
995
transform,
996
});
997
start = end;
998
}
999
1000
end += 1;
1001
}
1002
}
1003
}
1004
1005
pub fn extract_text_shadows(
1006
mut commands: Commands,
1007
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1008
texture_atlases: Extract<Res<Assets<TextureAtlasLayout>>>,
1009
uinode_query: Extract<
1010
Query<(
1011
Entity,
1012
&ComputedNode,
1013
&UiGlobalTransform,
1014
&ComputedUiTargetCamera,
1015
&InheritedVisibility,
1016
Option<&CalculatedClip>,
1017
&TextLayoutInfo,
1018
&TextShadow,
1019
)>,
1020
>,
1021
camera_map: Extract<UiCameraMap>,
1022
) {
1023
let mut start = extracted_uinodes.glyphs.len();
1024
let mut end = start + 1;
1025
1026
let mut camera_mapper = camera_map.get_mapper();
1027
for (entity, uinode, transform, target, inherited_visibility, clip, text_layout_info, shadow) in
1028
&uinode_query
1029
{
1030
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
1031
if !inherited_visibility.get() || uinode.is_empty() {
1032
continue;
1033
}
1034
1035
let Some(extracted_camera_entity) = camera_mapper.map(target) else {
1036
continue;
1037
};
1038
1039
let node_transform = Affine2::from(*transform)
1040
* Affine2::from_translation(
1041
-0.5 * uinode.size() + shadow.offset / uinode.inverse_scale_factor(),
1042
);
1043
1044
for (
1045
i,
1046
PositionedGlyph {
1047
position,
1048
atlas_info,
1049
span_index,
1050
..
1051
},
1052
) in text_layout_info.glyphs.iter().enumerate()
1053
{
1054
let rect = texture_atlases
1055
.get(atlas_info.texture_atlas)
1056
.unwrap()
1057
.textures[atlas_info.location.glyph_index]
1058
.as_rect();
1059
extracted_uinodes.glyphs.push(ExtractedGlyph {
1060
color: shadow.color.into(),
1061
translation: *position,
1062
rect,
1063
});
1064
1065
if text_layout_info.glyphs.get(i + 1).is_none_or(|info| {
1066
info.span_index != *span_index || info.atlas_info.texture != atlas_info.texture
1067
}) {
1068
extracted_uinodes.uinodes.push(ExtractedUiNode {
1069
transform: node_transform,
1070
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1071
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1072
image: atlas_info.texture,
1073
clip: clip.map(|clip| clip.clip),
1074
extracted_camera_entity,
1075
item: ExtractedUiItem::Glyphs { range: start..end },
1076
main_entity: entity.into(),
1077
});
1078
start = end;
1079
}
1080
1081
end += 1;
1082
}
1083
}
1084
}
1085
1086
pub fn extract_text_background_colors(
1087
mut commands: Commands,
1088
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1089
uinode_query: Extract<
1090
Query<(
1091
Entity,
1092
&ComputedNode,
1093
&UiGlobalTransform,
1094
&InheritedVisibility,
1095
Option<&CalculatedClip>,
1096
&ComputedUiTargetCamera,
1097
&TextLayoutInfo,
1098
)>,
1099
>,
1100
text_background_colors_query: Extract<Query<&TextBackgroundColor>>,
1101
camera_map: Extract<UiCameraMap>,
1102
) {
1103
let mut camera_mapper = camera_map.get_mapper();
1104
for (entity, uinode, global_transform, inherited_visibility, clip, camera, text_layout_info) in
1105
&uinode_query
1106
{
1107
// Skip if not visible or if size is set to zero (e.g. when a parent is set to `Display::None`)
1108
if !inherited_visibility.get() || uinode.is_empty() {
1109
continue;
1110
}
1111
1112
let Some(extracted_camera_entity) = camera_mapper.map(camera) else {
1113
continue;
1114
};
1115
1116
let transform =
1117
Affine2::from(global_transform) * Affine2::from_translation(-0.5 * uinode.size());
1118
1119
for &(section_entity, rect) in text_layout_info.section_rects.iter() {
1120
let Ok(text_background_color) = text_background_colors_query.get(section_entity) else {
1121
continue;
1122
};
1123
1124
extracted_uinodes.uinodes.push(ExtractedUiNode {
1125
z_order: uinode.stack_index as f32 + stack_z_offsets::TEXT,
1126
render_entity: commands.spawn(TemporaryRenderEntity).id(),
1127
clip: clip.map(|clip| clip.clip),
1128
image: AssetId::default(),
1129
extracted_camera_entity,
1130
transform: transform * Affine2::from_translation(rect.center()),
1131
item: ExtractedUiItem::Node {
1132
color: text_background_color.0.to_linear(),
1133
rect: Rect {
1134
min: Vec2::ZERO,
1135
max: rect.size(),
1136
},
1137
atlas_scaling: None,
1138
flip_x: false,
1139
flip_y: false,
1140
border: uinode.border(),
1141
border_radius: uinode.border_radius(),
1142
node_type: NodeType::Rect,
1143
},
1144
main_entity: entity.into(),
1145
});
1146
}
1147
}
1148
}
1149
1150
#[repr(C)]
1151
#[derive(Copy, Clone, Pod, Zeroable)]
1152
struct UiVertex {
1153
pub position: [f32; 3],
1154
pub uv: [f32; 2],
1155
pub color: [f32; 4],
1156
/// Shader flags to determine how to render the UI node.
1157
/// See [`shader_flags`] for possible values.
1158
pub flags: u32,
1159
/// Border radius of the UI node.
1160
/// Ordering: top left, top right, bottom right, bottom left.
1161
pub radius: [f32; 4],
1162
/// Border thickness of the UI node.
1163
/// Ordering: left, top, right, bottom.
1164
pub border: [f32; 4],
1165
/// Size of the UI node.
1166
pub size: [f32; 2],
1167
/// Position relative to the center of the UI node.
1168
pub point: [f32; 2],
1169
}
1170
1171
#[derive(Resource)]
1172
pub struct UiMeta {
1173
vertices: RawBufferVec<UiVertex>,
1174
indices: RawBufferVec<u32>,
1175
view_bind_group: Option<BindGroup>,
1176
}
1177
1178
impl Default for UiMeta {
1179
fn default() -> Self {
1180
Self {
1181
vertices: RawBufferVec::new(BufferUsages::VERTEX),
1182
indices: RawBufferVec::new(BufferUsages::INDEX),
1183
view_bind_group: None,
1184
}
1185
}
1186
}
1187
1188
pub(crate) const QUAD_VERTEX_POSITIONS: [Vec2; 4] = [
1189
Vec2::new(-0.5, -0.5),
1190
Vec2::new(0.5, -0.5),
1191
Vec2::new(0.5, 0.5),
1192
Vec2::new(-0.5, 0.5),
1193
];
1194
1195
pub(crate) const QUAD_INDICES: [usize; 6] = [0, 2, 3, 0, 1, 2];
1196
1197
#[derive(Component)]
1198
pub struct UiBatch {
1199
pub range: Range<u32>,
1200
pub image: AssetId<Image>,
1201
}
1202
1203
/// The values here should match the values for the constants in `ui.wgsl`
1204
pub mod shader_flags {
1205
/// Texture should be ignored
1206
pub const UNTEXTURED: u32 = 0;
1207
/// Textured
1208
pub const TEXTURED: u32 = 1;
1209
/// Ordering: top left, top right, bottom right, bottom left.
1210
pub const CORNERS: [u32; 4] = [0, 2, 2 | 4, 4];
1211
pub const RADIAL: u32 = 16;
1212
pub const FILL_START: u32 = 32;
1213
pub const FILL_END: u32 = 64;
1214
pub const CONIC: u32 = 128;
1215
pub const BORDER_LEFT: u32 = 256;
1216
pub const BORDER_TOP: u32 = 512;
1217
pub const BORDER_RIGHT: u32 = 1024;
1218
pub const BORDER_BOTTOM: u32 = 2048;
1219
pub const BORDER_ALL: u32 = BORDER_LEFT + BORDER_TOP + BORDER_RIGHT + BORDER_BOTTOM;
1220
}
1221
1222
pub fn queue_uinodes(
1223
extracted_uinodes: Res<ExtractedUiNodes>,
1224
ui_pipeline: Res<UiPipeline>,
1225
mut pipelines: ResMut<SpecializedRenderPipelines<UiPipeline>>,
1226
mut transparent_render_phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1227
render_views: Query<(&UiCameraView, Option<&UiAntiAlias>), With<ExtractedView>>,
1228
camera_views: Query<&ExtractedView>,
1229
pipeline_cache: Res<PipelineCache>,
1230
draw_functions: Res<DrawFunctions<TransparentUi>>,
1231
) {
1232
let draw_function = draw_functions.read().id::<DrawUi>();
1233
let mut current_camera_entity = Entity::PLACEHOLDER;
1234
let mut current_phase = None;
1235
1236
for (index, extracted_uinode) in extracted_uinodes.uinodes.iter().enumerate() {
1237
if current_camera_entity != extracted_uinode.extracted_camera_entity {
1238
current_phase = render_views
1239
.get(extracted_uinode.extracted_camera_entity)
1240
.ok()
1241
.and_then(|(default_camera_view, ui_anti_alias)| {
1242
camera_views
1243
.get(default_camera_view.0)
1244
.ok()
1245
.and_then(|view| {
1246
transparent_render_phases
1247
.get_mut(&view.retained_view_entity)
1248
.map(|transparent_phase| (view, ui_anti_alias, transparent_phase))
1249
})
1250
});
1251
current_camera_entity = extracted_uinode.extracted_camera_entity;
1252
}
1253
1254
let Some((view, ui_anti_alias, transparent_phase)) = current_phase.as_mut() else {
1255
continue;
1256
};
1257
1258
let pipeline = pipelines.specialize(
1259
&pipeline_cache,
1260
&ui_pipeline,
1261
UiPipelineKey {
1262
hdr: view.hdr,
1263
anti_alias: matches!(ui_anti_alias, None | Some(UiAntiAlias::On)),
1264
},
1265
);
1266
1267
transparent_phase.add(TransparentUi {
1268
draw_function,
1269
pipeline,
1270
entity: (extracted_uinode.render_entity, extracted_uinode.main_entity),
1271
sort_key: FloatOrd(extracted_uinode.z_order),
1272
index,
1273
// batch_range will be calculated in prepare_uinodes
1274
batch_range: 0..0,
1275
extra_index: PhaseItemExtraIndex::None,
1276
indexed: true,
1277
});
1278
}
1279
}
1280
1281
#[derive(Resource, Default)]
1282
pub struct ImageNodeBindGroups {
1283
pub values: HashMap<AssetId<Image>, BindGroup>,
1284
}
1285
1286
pub fn prepare_uinodes(
1287
mut commands: Commands,
1288
render_device: Res<RenderDevice>,
1289
render_queue: Res<RenderQueue>,
1290
mut ui_meta: ResMut<UiMeta>,
1291
mut extracted_uinodes: ResMut<ExtractedUiNodes>,
1292
view_uniforms: Res<ViewUniforms>,
1293
ui_pipeline: Res<UiPipeline>,
1294
mut image_bind_groups: ResMut<ImageNodeBindGroups>,
1295
gpu_images: Res<RenderAssets<GpuImage>>,
1296
mut phases: ResMut<ViewSortedRenderPhases<TransparentUi>>,
1297
events: Res<SpriteAssetEvents>,
1298
mut previous_len: Local<usize>,
1299
) {
1300
// If an image has changed, the GpuImage has (probably) changed
1301
for event in &events.images {
1302
match event {
1303
AssetEvent::Added { .. } |
1304
AssetEvent::Unused { .. } |
1305
// Images don't have dependencies
1306
AssetEvent::LoadedWithDependencies { .. } => {}
1307
AssetEvent::Modified { id } | AssetEvent::Removed { id } => {
1308
image_bind_groups.values.remove(id);
1309
}
1310
};
1311
}
1312
1313
if let Some(view_binding) = view_uniforms.uniforms.binding() {
1314
let mut batches: Vec<(Entity, UiBatch)> = Vec::with_capacity(*previous_len);
1315
1316
ui_meta.vertices.clear();
1317
ui_meta.indices.clear();
1318
ui_meta.view_bind_group = Some(render_device.create_bind_group(
1319
"ui_view_bind_group",
1320
&ui_pipeline.view_layout,
1321
&BindGroupEntries::single(view_binding),
1322
));
1323
1324
// Buffer indexes
1325
let mut vertices_index = 0;
1326
let mut indices_index = 0;
1327
1328
for ui_phase in phases.values_mut() {
1329
let mut batch_item_index = 0;
1330
let mut batch_image_handle = AssetId::invalid();
1331
1332
for item_index in 0..ui_phase.items.len() {
1333
let item = &mut ui_phase.items[item_index];
1334
let Some(extracted_uinode) = extracted_uinodes
1335
.uinodes
1336
.get(item.index)
1337
.filter(|n| item.entity() == n.render_entity)
1338
else {
1339
batch_image_handle = AssetId::invalid();
1340
continue;
1341
};
1342
1343
let mut existing_batch = batches.last_mut();
1344
1345
if batch_image_handle == AssetId::invalid()
1346
|| existing_batch.is_none()
1347
|| (batch_image_handle != AssetId::default()
1348
&& extracted_uinode.image != AssetId::default()
1349
&& batch_image_handle != extracted_uinode.image)
1350
{
1351
if let Some(gpu_image) = gpu_images.get(extracted_uinode.image) {
1352
batch_item_index = item_index;
1353
batch_image_handle = extracted_uinode.image;
1354
1355
let new_batch = UiBatch {
1356
range: vertices_index..vertices_index,
1357
image: extracted_uinode.image,
1358
};
1359
1360
batches.push((item.entity(), new_batch));
1361
1362
image_bind_groups
1363
.values
1364
.entry(batch_image_handle)
1365
.or_insert_with(|| {
1366
render_device.create_bind_group(
1367
"ui_material_bind_group",
1368
&ui_pipeline.image_layout,
1369
&BindGroupEntries::sequential((
1370
&gpu_image.texture_view,
1371
&gpu_image.sampler,
1372
)),
1373
)
1374
});
1375
1376
existing_batch = batches.last_mut();
1377
} else {
1378
continue;
1379
}
1380
} else if batch_image_handle == AssetId::default()
1381
&& extracted_uinode.image != AssetId::default()
1382
{
1383
if let Some(ref mut existing_batch) = existing_batch
1384
&& let Some(gpu_image) = gpu_images.get(extracted_uinode.image)
1385
{
1386
batch_image_handle = extracted_uinode.image;
1387
existing_batch.1.image = extracted_uinode.image;
1388
1389
image_bind_groups
1390
.values
1391
.entry(batch_image_handle)
1392
.or_insert_with(|| {
1393
render_device.create_bind_group(
1394
"ui_material_bind_group",
1395
&ui_pipeline.image_layout,
1396
&BindGroupEntries::sequential((
1397
&gpu_image.texture_view,
1398
&gpu_image.sampler,
1399
)),
1400
)
1401
});
1402
} else {
1403
continue;
1404
}
1405
}
1406
match &extracted_uinode.item {
1407
ExtractedUiItem::Node {
1408
atlas_scaling,
1409
flip_x,
1410
flip_y,
1411
border_radius,
1412
border,
1413
node_type,
1414
rect,
1415
color,
1416
} => {
1417
let mut flags = if extracted_uinode.image != AssetId::default() {
1418
shader_flags::TEXTURED
1419
} else {
1420
shader_flags::UNTEXTURED
1421
};
1422
1423
let mut uinode_rect = *rect;
1424
1425
let rect_size = uinode_rect.size();
1426
1427
let transform = extracted_uinode.transform;
1428
1429
// Specify the corners of the node
1430
let positions = QUAD_VERTEX_POSITIONS
1431
.map(|pos| transform.transform_point2(pos * rect_size).extend(0.));
1432
let points = QUAD_VERTEX_POSITIONS.map(|pos| pos * rect_size);
1433
1434
// Calculate the effect of clipping
1435
// Note: this won't work with rotation/scaling, but that's much more complex (may need more that 2 quads)
1436
let mut positions_diff = if let Some(clip) = extracted_uinode.clip {
1437
[
1438
Vec2::new(
1439
f32::max(clip.min.x - positions[0].x, 0.),
1440
f32::max(clip.min.y - positions[0].y, 0.),
1441
),
1442
Vec2::new(
1443
f32::min(clip.max.x - positions[1].x, 0.),
1444
f32::max(clip.min.y - positions[1].y, 0.),
1445
),
1446
Vec2::new(
1447
f32::min(clip.max.x - positions[2].x, 0.),
1448
f32::min(clip.max.y - positions[2].y, 0.),
1449
),
1450
Vec2::new(
1451
f32::max(clip.min.x - positions[3].x, 0.),
1452
f32::min(clip.max.y - positions[3].y, 0.),
1453
),
1454
]
1455
} else {
1456
[Vec2::ZERO; 4]
1457
};
1458
1459
let positions_clipped = [
1460
positions[0] + positions_diff[0].extend(0.),
1461
positions[1] + positions_diff[1].extend(0.),
1462
positions[2] + positions_diff[2].extend(0.),
1463
positions[3] + positions_diff[3].extend(0.),
1464
];
1465
1466
let points = [
1467
points[0] + positions_diff[0],
1468
points[1] + positions_diff[1],
1469
points[2] + positions_diff[2],
1470
points[3] + positions_diff[3],
1471
];
1472
1473
let transformed_rect_size = transform.transform_vector2(rect_size);
1474
1475
// Don't try to cull nodes that have a rotation
1476
// In a rotation around the Z-axis, this value is 0.0 for an angle of 0.0 or π
1477
// In those two cases, the culling check can proceed normally as corners will be on
1478
// horizontal / vertical lines
1479
// For all other angles, bypass the culling check
1480
// This does not properly handles all rotations on all axis
1481
if transform.x_axis[1] == 0.0 {
1482
// Cull nodes that are completely clipped
1483
if positions_diff[0].x - positions_diff[1].x >= transformed_rect_size.x
1484
|| positions_diff[1].y - positions_diff[2].y
1485
>= transformed_rect_size.y
1486
{
1487
continue;
1488
}
1489
}
1490
let uvs = if flags == shader_flags::UNTEXTURED {
1491
[Vec2::ZERO, Vec2::X, Vec2::ONE, Vec2::Y]
1492
} else {
1493
let image = gpu_images
1494
.get(extracted_uinode.image)
1495
.expect("Image was checked during batching and should still exist");
1496
// Rescale atlases. This is done here because we need texture data that might not be available in Extract.
1497
let atlas_extent = atlas_scaling
1498
.map(|scaling| image.size_2d().as_vec2() * scaling)
1499
.unwrap_or(uinode_rect.max);
1500
if *flip_x {
1501
core::mem::swap(&mut uinode_rect.max.x, &mut uinode_rect.min.x);
1502
positions_diff[0].x *= -1.;
1503
positions_diff[1].x *= -1.;
1504
positions_diff[2].x *= -1.;
1505
positions_diff[3].x *= -1.;
1506
}
1507
if *flip_y {
1508
core::mem::swap(&mut uinode_rect.max.y, &mut uinode_rect.min.y);
1509
positions_diff[0].y *= -1.;
1510
positions_diff[1].y *= -1.;
1511
positions_diff[2].y *= -1.;
1512
positions_diff[3].y *= -1.;
1513
}
1514
[
1515
Vec2::new(
1516
uinode_rect.min.x + positions_diff[0].x,
1517
uinode_rect.min.y + positions_diff[0].y,
1518
),
1519
Vec2::new(
1520
uinode_rect.max.x + positions_diff[1].x,
1521
uinode_rect.min.y + positions_diff[1].y,
1522
),
1523
Vec2::new(
1524
uinode_rect.max.x + positions_diff[2].x,
1525
uinode_rect.max.y + positions_diff[2].y,
1526
),
1527
Vec2::new(
1528
uinode_rect.min.x + positions_diff[3].x,
1529
uinode_rect.max.y + positions_diff[3].y,
1530
),
1531
]
1532
.map(|pos| pos / atlas_extent)
1533
};
1534
1535
let color = color.to_f32_array();
1536
if let NodeType::Border(border_flags) = *node_type {
1537
flags |= border_flags;
1538
}
1539
1540
for i in 0..4 {
1541
ui_meta.vertices.push(UiVertex {
1542
position: positions_clipped[i].into(),
1543
uv: uvs[i].into(),
1544
color,
1545
flags: flags | shader_flags::CORNERS[i],
1546
radius: (*border_radius).into(),
1547
border: [border.left, border.top, border.right, border.bottom],
1548
size: rect_size.into(),
1549
point: points[i].into(),
1550
});
1551
}
1552
1553
for &i in &QUAD_INDICES {
1554
ui_meta.indices.push(indices_index + i as u32);
1555
}
1556
1557
vertices_index += 6;
1558
indices_index += 4;
1559
}
1560
ExtractedUiItem::Glyphs { range } => {
1561
let image = gpu_images
1562
.get(extracted_uinode.image)
1563
.expect("Image was checked during batching and should still exist");
1564
1565
let atlas_extent = image.size_2d().as_vec2();
1566
1567
for glyph in &extracted_uinodes.glyphs[range.clone()] {
1568
let color = glyph.color.to_f32_array();
1569
let glyph_rect = glyph.rect;
1570
let rect_size = glyph_rect.size();
1571
1572
// Specify the corners of the glyph
1573
let positions = QUAD_VERTEX_POSITIONS.map(|pos| {
1574
extracted_uinode
1575
.transform
1576
.transform_point2(glyph.translation + pos * glyph_rect.size())
1577
.extend(0.)
1578
});
1579
1580
let positions_diff = if let Some(clip) = extracted_uinode.clip {
1581
[
1582
Vec2::new(
1583
f32::max(clip.min.x - positions[0].x, 0.),
1584
f32::max(clip.min.y - positions[0].y, 0.),
1585
),
1586
Vec2::new(
1587
f32::min(clip.max.x - positions[1].x, 0.),
1588
f32::max(clip.min.y - positions[1].y, 0.),
1589
),
1590
Vec2::new(
1591
f32::min(clip.max.x - positions[2].x, 0.),
1592
f32::min(clip.max.y - positions[2].y, 0.),
1593
),
1594
Vec2::new(
1595
f32::max(clip.min.x - positions[3].x, 0.),
1596
f32::min(clip.max.y - positions[3].y, 0.),
1597
),
1598
]
1599
} else {
1600
[Vec2::ZERO; 4]
1601
};
1602
1603
let positions_clipped = [
1604
positions[0] + positions_diff[0].extend(0.),
1605
positions[1] + positions_diff[1].extend(0.),
1606
positions[2] + positions_diff[2].extend(0.),
1607
positions[3] + positions_diff[3].extend(0.),
1608
];
1609
1610
// cull nodes that are completely clipped
1611
let transformed_rect_size =
1612
extracted_uinode.transform.transform_vector2(rect_size);
1613
if positions_diff[0].x - positions_diff[1].x
1614
>= transformed_rect_size.x.abs()
1615
|| positions_diff[1].y - positions_diff[2].y
1616
>= transformed_rect_size.y.abs()
1617
{
1618
continue;
1619
}
1620
1621
let uvs = [
1622
Vec2::new(
1623
glyph.rect.min.x + positions_diff[0].x,
1624
glyph.rect.min.y + positions_diff[0].y,
1625
),
1626
Vec2::new(
1627
glyph.rect.max.x + positions_diff[1].x,
1628
glyph.rect.min.y + positions_diff[1].y,
1629
),
1630
Vec2::new(
1631
glyph.rect.max.x + positions_diff[2].x,
1632
glyph.rect.max.y + positions_diff[2].y,
1633
),
1634
Vec2::new(
1635
glyph.rect.min.x + positions_diff[3].x,
1636
glyph.rect.max.y + positions_diff[3].y,
1637
),
1638
]
1639
.map(|pos| pos / atlas_extent);
1640
1641
for i in 0..4 {
1642
ui_meta.vertices.push(UiVertex {
1643
position: positions_clipped[i].into(),
1644
uv: uvs[i].into(),
1645
color,
1646
flags: shader_flags::TEXTURED | shader_flags::CORNERS[i],
1647
radius: [0.0; 4],
1648
border: [0.0; 4],
1649
size: rect_size.into(),
1650
point: [0.0; 2],
1651
});
1652
}
1653
1654
for &i in &QUAD_INDICES {
1655
ui_meta.indices.push(indices_index + i as u32);
1656
}
1657
1658
vertices_index += 6;
1659
indices_index += 4;
1660
}
1661
}
1662
}
1663
existing_batch.unwrap().1.range.end = vertices_index;
1664
ui_phase.items[batch_item_index].batch_range_mut().end += 1;
1665
}
1666
}
1667
1668
ui_meta.vertices.write_buffer(&render_device, &render_queue);
1669
ui_meta.indices.write_buffer(&render_device, &render_queue);
1670
*previous_len = batches.len();
1671
commands.try_insert_batch(batches);
1672
}
1673
extracted_uinodes.clear();
1674
}
1675
1676