Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/src/view/mod.rs
9330 views
1
pub mod visibility;
2
pub mod window;
3
4
use bevy_camera::{
5
primitives::Frustum, CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure,
6
MainPassResolutionOverride, NormalizedRenderTarget,
7
};
8
use bevy_diagnostic::FrameCount;
9
pub use visibility::*;
10
pub use window::*;
11
12
use crate::{
13
camera::{ExtractedCamera, MipBias, NormalizedRenderTargetExt as _, TemporalJitter},
14
extract_component::ExtractComponentPlugin,
15
occlusion_culling::OcclusionCulling,
16
render_asset::RenderAssets,
17
render_phase::ViewRangefinder3d,
18
render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
19
renderer::{RenderDevice, RenderQueue},
20
sync_world::MainEntity,
21
texture::{
22
CachedTexture, ColorAttachment, DepthAttachment, GpuImage, ManualTextureViews,
23
OutputColorAttachment, TextureCache,
24
},
25
Render, RenderApp, RenderSystems,
26
};
27
use alloc::sync::Arc;
28
use bevy_app::{App, Plugin};
29
use bevy_color::LinearRgba;
30
use bevy_derive::{Deref, DerefMut};
31
use bevy_ecs::prelude::*;
32
use bevy_image::{BevyDefault as _, ToExtents};
33
use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
34
use bevy_platform::collections::{hash_map::Entry, HashMap};
35
use bevy_reflect::{std_traits::ReflectDefault, Reflect};
36
use bevy_render_macros::ExtractComponent;
37
use bevy_shader::load_shader_library;
38
use bevy_transform::components::GlobalTransform;
39
use core::{
40
ops::Range,
41
sync::atomic::{AtomicUsize, Ordering},
42
};
43
use wgpu::{
44
BufferUsages, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
45
TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
46
};
47
48
/// The matrix that converts from the RGB to the LMS color space.
49
///
50
/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
51
///
52
/// ```text
53
/// ⎡ X ⎤ ⎡ 0.490 0.310 0.200 ⎤ ⎡ R ⎤
54
/// ⎢ Y ⎥ = ⎢ 0.177 0.812 0.011 ⎥ ⎢ G ⎥
55
/// ⎣ Z ⎦ ⎣ 0.000 0.010 0.990 ⎦ ⎣ B ⎦
56
/// ```
57
///
58
/// Then we convert to LMS according to the [CAM16 standard matrix]:
59
///
60
/// ```text
61
/// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤
62
/// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥
63
/// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦
64
/// ```
65
///
66
/// The resulting matrix is just the concatenation of these two matrices, to do
67
/// the conversion in one step.
68
///
69
/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
70
/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
71
static RGB_TO_LMS: Mat3 = mat3(
72
vec3(0.311692, 0.0905138, 0.00764433),
73
vec3(0.652085, 0.901341, 0.0486554),
74
vec3(0.0362225, 0.00814478, 0.943700),
75
);
76
77
/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
78
/// space back to RGB.
79
static LMS_TO_RGB: Mat3 = mat3(
80
vec3(4.06305, -0.40791, -0.0118812),
81
vec3(-2.93241, 1.40437, -0.0486532),
82
vec3(-0.130646, 0.00353630, 1.0605344),
83
);
84
85
/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
86
///
87
/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
88
/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
89
static D65_XY: Vec2 = vec2(0.31272, 0.32903);
90
91
/// The [D65 white point] in [LMS color space].
92
///
93
/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
94
/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
95
static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
96
97
pub struct ViewPlugin;
98
99
impl Plugin for ViewPlugin {
100
fn build(&self, app: &mut App) {
101
load_shader_library!(app, "view.wgsl");
102
103
app
104
// NOTE: windows.is_changed() handles cases where a window was resized
105
.add_plugins((
106
ExtractComponentPlugin::<Msaa>::default(),
107
ExtractComponentPlugin::<OcclusionCulling>::default(),
108
RenderVisibilityRangePlugin,
109
));
110
111
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
112
render_app.add_systems(
113
Render,
114
(
115
// `TextureView`s need to be dropped before reconfiguring window surfaces.
116
clear_view_attachments
117
.in_set(RenderSystems::ManageViews)
118
.before(create_surfaces),
119
cleanup_view_targets_for_resize
120
.in_set(RenderSystems::ManageViews)
121
.before(create_surfaces),
122
prepare_view_attachments
123
.in_set(RenderSystems::ManageViews)
124
.before(prepare_view_targets)
125
.after(prepare_windows),
126
prepare_view_targets
127
.in_set(RenderSystems::ManageViews)
128
.after(prepare_windows)
129
.after(crate::render_asset::prepare_assets::<GpuImage>)
130
.ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
131
prepare_view_uniforms.in_set(RenderSystems::PrepareResources),
132
),
133
);
134
}
135
}
136
137
fn finish(&self, app: &mut App) {
138
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
139
render_app
140
.init_resource::<ViewUniforms>()
141
.init_resource::<ViewTargetAttachments>();
142
}
143
}
144
}
145
146
/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
147
/// for a [`Camera`](bevy_camera::Camera).
148
///
149
/// Defaults to 4 samples. A higher number of samples results in smoother edges.
150
///
151
/// Some advanced rendering features may require that MSAA is disabled.
152
///
153
/// Note that the web currently only supports 1 or 4 samples.
154
#[derive(
155
Component,
156
Default,
157
Clone,
158
Copy,
159
ExtractComponent,
160
Reflect,
161
PartialEq,
162
PartialOrd,
163
Eq,
164
Hash,
165
Debug,
166
)]
167
#[reflect(Component, Default, PartialEq, Hash, Debug)]
168
pub enum Msaa {
169
Off = 1,
170
Sample2 = 2,
171
#[default]
172
Sample4 = 4,
173
Sample8 = 8,
174
}
175
176
impl Msaa {
177
#[inline]
178
pub fn samples(&self) -> u32 {
179
*self as u32
180
}
181
182
pub fn from_samples(samples: u32) -> Self {
183
match samples {
184
1 => Msaa::Off,
185
2 => Msaa::Sample2,
186
4 => Msaa::Sample4,
187
8 => Msaa::Sample8,
188
_ => panic!("Unsupported MSAA sample count: {samples}"),
189
}
190
}
191
}
192
193
/// An identifier for a view that is stable across frames.
194
///
195
/// We can't use [`Entity`] for this because render world entities aren't
196
/// stable, and we can't use just [`MainEntity`] because some main world views
197
/// extract to multiple render world views. For example, a directional light
198
/// extracts to one render world view per cascade, and a point light extracts to
199
/// one render world view per cubemap face. So we pair the main entity with an
200
/// *auxiliary entity* and a *subview index*, which *together* uniquely identify
201
/// a view in the render world in a way that's stable from frame to frame.
202
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
203
pub struct RetainedViewEntity {
204
/// The main entity that this view corresponds to.
205
pub main_entity: MainEntity,
206
207
/// Another entity associated with the view entity.
208
///
209
/// This is currently used for shadow cascades. If there are multiple
210
/// cameras, each camera needs to have its own set of shadow cascades. Thus
211
/// the light and subview index aren't themselves enough to uniquely
212
/// identify a shadow cascade: we need the camera that the cascade is
213
/// associated with as well. This entity stores that camera.
214
///
215
/// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.
216
pub auxiliary_entity: MainEntity,
217
218
/// The index of the view corresponding to the entity.
219
///
220
/// For example, for point lights that cast shadows, this is the index of
221
/// the cubemap face (0 through 5 inclusive). For directional lights, this
222
/// is the index of the cascade.
223
pub subview_index: u32,
224
}
225
226
impl RetainedViewEntity {
227
/// Creates a new [`RetainedViewEntity`] from the given main world entity,
228
/// auxiliary main world entity, and subview index.
229
///
230
/// See [`RetainedViewEntity::subview_index`] for an explanation of what
231
/// `auxiliary_entity` and `subview_index` are.
232
pub fn new(
233
main_entity: MainEntity,
234
auxiliary_entity: Option<MainEntity>,
235
subview_index: u32,
236
) -> Self {
237
Self {
238
main_entity,
239
auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),
240
subview_index,
241
}
242
}
243
}
244
245
/// Describes a camera in the render world.
246
///
247
/// Each entity in the main world can potentially extract to multiple subviews,
248
/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D
249
/// cameras extract to both a 3D camera subview with index 0 and a special UI
250
/// subview with index 1. Likewise, point lights with shadows extract to 6
251
/// subviews, one for each side of the shadow cubemap.
252
#[derive(Component)]
253
pub struct ExtractedView {
254
/// The entity in the main world corresponding to this render world view.
255
pub retained_view_entity: RetainedViewEntity,
256
/// Typically a column-major right-handed projection matrix, one of either:
257
///
258
/// Perspective (infinite reverse z)
259
/// ```text
260
/// f = 1 / tan(fov_y_radians / 2)
261
///
262
/// ⎡ f / aspect 0 0 0 ⎤
263
/// ⎢ 0 f 0 0 ⎥
264
/// ⎢ 0 0 0 near ⎥
265
/// ⎣ 0 0 -1 0 ⎦
266
/// ```
267
///
268
/// Orthographic
269
/// ```text
270
/// w = right - left
271
/// h = top - bottom
272
/// d = far - near
273
/// cw = -right - left
274
/// ch = -top - bottom
275
///
276
/// ⎡ 2 / w 0 0 cw / w ⎤
277
/// ⎢ 0 2 / h 0 ch / h ⎥
278
/// ⎢ 0 0 1 / d far / d ⎥
279
/// ⎣ 0 0 0 1 ⎦
280
/// ```
281
///
282
/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
283
///
284
/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
285
///
286
/// Custom projections are also possible however.
287
pub clip_from_view: Mat4,
288
pub world_from_view: GlobalTransform,
289
// The view-projection matrix. When provided it is used instead of deriving it from
290
// `projection` and `transform` fields, which can be helpful in cases where numerical
291
// stability matters and there is a more direct way to derive the view-projection matrix.
292
pub clip_from_world: Option<Mat4>,
293
pub hdr: bool,
294
// uvec4(origin.x, origin.y, width, height)
295
pub viewport: UVec4,
296
pub color_grading: ColorGrading,
297
298
/// Whether to switch culling mode so that materials that request backface
299
/// culling cull front faces, and vice versa.
300
///
301
/// This is typically used for cameras that mirror the world that they
302
/// render across a plane, because doing that flips the winding of each
303
/// polygon.
304
///
305
/// This setting doesn't affect materials that disable backface culling.
306
pub invert_culling: bool,
307
}
308
309
impl ExtractedView {
310
/// Creates a 3D rangefinder for a view
311
pub fn rangefinder3d(&self) -> ViewRangefinder3d {
312
ViewRangefinder3d::from_world_from_view(&self.world_from_view.affine())
313
}
314
}
315
316
/// Configures filmic color grading parameters to adjust the image appearance.
317
///
318
/// Color grading is applied just before tonemapping for a given
319
/// [`Camera`](bevy_camera::Camera) entity, with the sole exception of the
320
/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
321
/// tonemapping.
322
#[derive(Component, Reflect, Debug, Default, Clone)]
323
#[reflect(Component, Default, Debug, Clone)]
324
pub struct ColorGrading {
325
/// Filmic color grading values applied to the image as a whole (as opposed
326
/// to individual sections, like shadows and highlights).
327
pub global: ColorGradingGlobal,
328
329
/// Color grading values that are applied to the darker parts of the image.
330
///
331
/// The cutoff points can be customized with the
332
/// [`ColorGradingGlobal::midtones_range`] field.
333
pub shadows: ColorGradingSection,
334
335
/// Color grading values that are applied to the parts of the image with
336
/// intermediate brightness.
337
///
338
/// The cutoff points can be customized with the
339
/// [`ColorGradingGlobal::midtones_range`] field.
340
pub midtones: ColorGradingSection,
341
342
/// Color grading values that are applied to the lighter parts of the image.
343
///
344
/// The cutoff points can be customized with the
345
/// [`ColorGradingGlobal::midtones_range`] field.
346
pub highlights: ColorGradingSection,
347
}
348
349
/// Filmic color grading values applied to the image as a whole (as opposed to
350
/// individual sections, like shadows and highlights).
351
#[derive(Clone, Debug, Reflect)]
352
#[reflect(Default, Clone)]
353
pub struct ColorGradingGlobal {
354
/// Exposure value (EV) offset, measured in stops.
355
pub exposure: f32,
356
357
/// An adjustment made to the [CIE 1931] chromaticity *x* value.
358
///
359
/// Positive values make the colors redder. Negative values make the colors
360
/// bluer. This has no effect on luminance (brightness).
361
///
362
/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
363
pub temperature: f32,
364
365
/// An adjustment made to the [CIE 1931] chromaticity *y* value.
366
///
367
/// Positive values make the colors more magenta. Negative values make the
368
/// colors greener. This has no effect on luminance (brightness).
369
///
370
/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
371
pub tint: f32,
372
373
/// An adjustment to the [hue], in radians.
374
///
375
/// Adjusting this value changes the perceived colors in the image: red to
376
/// yellow to green to blue, etc. It has no effect on the saturation or
377
/// brightness of the colors.
378
///
379
/// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
380
pub hue: f32,
381
382
/// Saturation adjustment applied after tonemapping.
383
/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
384
/// with luminance defined by ITU-R BT.709
385
/// Values above 1.0 increase saturation.
386
pub post_saturation: f32,
387
388
/// The luminance (brightness) ranges that are considered part of the
389
/// "midtones" of the image.
390
///
391
/// This affects which [`ColorGradingSection`]s apply to which colors. Note
392
/// that the sections smoothly blend into one another, to avoid abrupt
393
/// transitions.
394
///
395
/// The default value is 0.2 to 0.7.
396
pub midtones_range: Range<f32>,
397
}
398
399
/// The [`ColorGrading`] structure, packed into the most efficient form for the
400
/// GPU.
401
#[derive(Clone, Copy, Debug, ShaderType)]
402
pub struct ColorGradingUniform {
403
pub balance: Mat3,
404
pub saturation: Vec3,
405
pub contrast: Vec3,
406
pub gamma: Vec3,
407
pub gain: Vec3,
408
pub lift: Vec3,
409
pub midtone_range: Vec2,
410
pub exposure: f32,
411
pub hue: f32,
412
pub post_saturation: f32,
413
}
414
415
/// A section of color grading values that can be selectively applied to
416
/// shadows, midtones, and highlights.
417
#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
418
#[reflect(Clone, PartialEq)]
419
pub struct ColorGradingSection {
420
/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
421
/// with luminance defined by ITU-R BT.709.
422
/// Values above 1.0 increase saturation.
423
pub saturation: f32,
424
425
/// Adjusts the range of colors.
426
///
427
/// A value of 1.0 applies no changes. Values below 1.0 move the colors more
428
/// toward a neutral gray. Values above 1.0 spread the colors out away from
429
/// the neutral gray.
430
pub contrast: f32,
431
432
/// A nonlinear luminance adjustment, mainly affecting the high end of the
433
/// range.
434
///
435
/// This is the *n* exponent in the standard [ASC CDL] formula for color
436
/// correction:
437
///
438
/// ```text
439
/// out = (i × s + o)ⁿ
440
/// ```
441
///
442
/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
443
pub gamma: f32,
444
445
/// A linear luminance adjustment, mainly affecting the middle part of the
446
/// range.
447
///
448
/// This is the *s* factor in the standard [ASC CDL] formula for color
449
/// correction:
450
///
451
/// ```text
452
/// out = (i × s + o)ⁿ
453
/// ```
454
///
455
/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
456
pub gain: f32,
457
458
/// A fixed luminance adjustment, mainly affecting the lower part of the
459
/// range.
460
///
461
/// This is the *o* term in the standard [ASC CDL] formula for color
462
/// correction:
463
///
464
/// ```text
465
/// out = (i × s + o)ⁿ
466
/// ```
467
///
468
/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
469
pub lift: f32,
470
}
471
472
impl Default for ColorGradingGlobal {
473
fn default() -> Self {
474
Self {
475
exposure: 0.0,
476
temperature: 0.0,
477
tint: 0.0,
478
hue: 0.0,
479
post_saturation: 1.0,
480
midtones_range: 0.2..0.7,
481
}
482
}
483
}
484
485
impl Default for ColorGradingSection {
486
fn default() -> Self {
487
Self {
488
saturation: 1.0,
489
contrast: 1.0,
490
gamma: 1.0,
491
gain: 1.0,
492
lift: 0.0,
493
}
494
}
495
}
496
497
impl ColorGrading {
498
/// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
499
/// highlights all have the same set of color grading values.
500
pub fn with_identical_sections(
501
global: ColorGradingGlobal,
502
section: ColorGradingSection,
503
) -> ColorGrading {
504
ColorGrading {
505
global,
506
highlights: section,
507
midtones: section,
508
shadows: section,
509
}
510
}
511
512
/// Returns an iterator that visits the shadows, midtones, and highlights
513
/// sections, in that order.
514
pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
515
[&self.shadows, &self.midtones, &self.highlights].into_iter()
516
}
517
518
/// Applies the given mutating function to the shadows, midtones, and
519
/// highlights sections, in that order.
520
///
521
/// Returns an array composed of the results of such evaluation, in that
522
/// order.
523
pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
524
[&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
525
}
526
}
527
528
#[derive(Clone, ShaderType)]
529
pub struct ViewUniform {
530
pub clip_from_world: Mat4,
531
pub unjittered_clip_from_world: Mat4,
532
pub world_from_clip: Mat4,
533
pub world_from_view: Mat4,
534
pub view_from_world: Mat4,
535
/// Typically a column-major right-handed projection matrix, one of either:
536
///
537
/// Perspective (infinite reverse z)
538
/// ```text
539
/// f = 1 / tan(fov_y_radians / 2)
540
///
541
/// ⎡ f / aspect 0 0 0 ⎤
542
/// ⎢ 0 f 0 0 ⎥
543
/// ⎢ 0 0 0 near ⎥
544
/// ⎣ 0 0 -1 0 ⎦
545
/// ```
546
///
547
/// Orthographic
548
/// ```text
549
/// w = right - left
550
/// h = top - bottom
551
/// d = far - near
552
/// cw = -right - left
553
/// ch = -top - bottom
554
///
555
/// ⎡ 2 / w 0 0 cw / w ⎤
556
/// ⎢ 0 2 / h 0 ch / h ⎥
557
/// ⎢ 0 0 1 / d far / d ⎥
558
/// ⎣ 0 0 0 1 ⎦
559
/// ```
560
///
561
/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
562
///
563
/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
564
///
565
/// Custom projections are also possible however.
566
pub clip_from_view: Mat4,
567
pub view_from_clip: Mat4,
568
pub world_position: Vec3,
569
pub exposure: f32,
570
// viewport(x_origin, y_origin, width, height)
571
pub viewport: Vec4,
572
pub main_pass_viewport: Vec4,
573
/// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.
574
/// The normal vectors point towards the interior of the frustum.
575
/// A half space contains `p` if `normal.dot(p) + distance > 0.`
576
pub frustum: [Vec4; 6],
577
pub color_grading: ColorGradingUniform,
578
pub mip_bias: f32,
579
pub frame_count: u32,
580
}
581
582
#[derive(Resource)]
583
pub struct ViewUniforms {
584
pub uniforms: DynamicUniformBuffer<ViewUniform>,
585
}
586
587
impl FromWorld for ViewUniforms {
588
fn from_world(world: &mut World) -> Self {
589
let mut uniforms = DynamicUniformBuffer::default();
590
uniforms.set_label(Some("view_uniforms_buffer"));
591
592
let render_device = world.resource::<RenderDevice>();
593
if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
594
uniforms.add_usages(BufferUsages::STORAGE);
595
}
596
597
Self { uniforms }
598
}
599
}
600
601
#[derive(Component)]
602
pub struct ViewUniformOffset {
603
pub offset: u32,
604
}
605
606
#[derive(Component, Clone)]
607
pub struct ViewTarget {
608
main_textures: MainTargetTextures,
609
main_texture_format: TextureFormat,
610
/// 0 represents `main_textures.a`, 1 represents `main_textures.b`
611
/// This is shared across view targets with the same render target
612
main_texture: Arc<AtomicUsize>,
613
out_texture: OutputColorAttachment,
614
}
615
616
/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
617
/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
618
/// the default output color attachment for a specific target can do so by adding a
619
/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
620
#[derive(Resource, Default, Deref, DerefMut)]
621
pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
622
623
pub struct PostProcessWrite<'a> {
624
pub source: &'a TextureView,
625
pub source_texture: &'a Texture,
626
pub destination: &'a TextureView,
627
pub destination_texture: &'a Texture,
628
}
629
630
impl From<ColorGrading> for ColorGradingUniform {
631
fn from(component: ColorGrading) -> Self {
632
// Compute the balance matrix that will be used to apply the white
633
// balance adjustment to an RGB color. Our general approach will be to
634
// convert both the color and the developer-supplied white point to the
635
// LMS color space, apply the conversion, and then convert back.
636
//
637
// First, we start with the CIE 1931 *xy* values of the standard D65
638
// illuminant:
639
// <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
640
//
641
// We then adjust them based on the developer's requested white balance.
642
let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
643
644
// Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
645
//
646
// Y Y
647
// Y = 1 X = ─ x Z = ─ (1 - x - y)
648
// y y
649
//
650
// Then we convert from XYZ to LMS color space, using the CAM16 matrix
651
// from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
652
//
653
// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤
654
// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥
655
// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦
656
//
657
// The following formula is just a simplification of the above.
658
659
let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
660
+ (vec3(-0.051461, 0.045854, 0.953127)
661
+ vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
662
/ white_point_xy.y;
663
664
// Now that we're in LMS space, perform the white point scaling.
665
let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
666
667
// Finally, combine the RGB → LMS → corrected LMS → corrected RGB
668
// pipeline into a single 3×3 matrix.
669
let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
670
671
Self {
672
balance,
673
saturation: vec3(
674
component.shadows.saturation,
675
component.midtones.saturation,
676
component.highlights.saturation,
677
),
678
contrast: vec3(
679
component.shadows.contrast,
680
component.midtones.contrast,
681
component.highlights.contrast,
682
),
683
gamma: vec3(
684
component.shadows.gamma,
685
component.midtones.gamma,
686
component.highlights.gamma,
687
),
688
gain: vec3(
689
component.shadows.gain,
690
component.midtones.gain,
691
component.highlights.gain,
692
),
693
lift: vec3(
694
component.shadows.lift,
695
component.midtones.lift,
696
component.highlights.lift,
697
),
698
midtone_range: vec2(
699
component.global.midtones_range.start,
700
component.global.midtones_range.end,
701
),
702
exposure: component.global.exposure,
703
hue: component.global.hue,
704
post_saturation: component.global.post_saturation,
705
}
706
}
707
}
708
709
/// Add this component to a camera to disable *indirect mode*.
710
///
711
/// Indirect mode, automatically enabled on supported hardware, allows Bevy to
712
/// offload transform and cull operations to the GPU, reducing CPU overhead.
713
/// Doing this, however, reduces the amount of control that your app has over
714
/// instancing decisions. In certain circumstances, you may want to disable
715
/// indirect drawing so that your app can manually instance meshes as it sees
716
/// fit. See the `custom_shader_instancing` example.
717
///
718
/// The vast majority of applications will not need to use this component, as it
719
/// generally reduces rendering performance.
720
///
721
/// Note: This component should only be added when initially spawning a camera. Adding
722
/// or removing after spawn can result in unspecified behavior.
723
#[derive(Component, Default)]
724
pub struct NoIndirectDrawing;
725
726
impl ViewTarget {
727
pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
728
729
/// Retrieve this target's main texture's color attachment.
730
pub fn get_color_attachment(&self) -> RenderPassColorAttachment<'_> {
731
if self.main_texture.load(Ordering::SeqCst) == 0 {
732
self.main_textures.a.get_attachment()
733
} else {
734
self.main_textures.b.get_attachment()
735
}
736
}
737
738
/// Retrieve this target's "unsampled" main texture's color attachment.
739
pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment<'_> {
740
if self.main_texture.load(Ordering::SeqCst) == 0 {
741
self.main_textures.a.get_unsampled_attachment()
742
} else {
743
self.main_textures.b.get_unsampled_attachment()
744
}
745
}
746
747
/// The "main" unsampled texture.
748
pub fn main_texture(&self) -> &Texture {
749
if self.main_texture.load(Ordering::SeqCst) == 0 {
750
&self.main_textures.a.texture.texture
751
} else {
752
&self.main_textures.b.texture.texture
753
}
754
}
755
756
/// The _other_ "main" unsampled texture.
757
/// In most cases you should use [`Self::main_texture`] instead and never this.
758
/// The textures will naturally be swapped when [`Self::post_process_write`] is called.
759
///
760
/// A use case for this is to be able to prepare a bind group for all main textures
761
/// ahead of time.
762
pub fn main_texture_other(&self) -> &Texture {
763
if self.main_texture.load(Ordering::SeqCst) == 0 {
764
&self.main_textures.b.texture.texture
765
} else {
766
&self.main_textures.a.texture.texture
767
}
768
}
769
770
/// The "main" unsampled texture.
771
pub fn main_texture_view(&self) -> &TextureView {
772
if self.main_texture.load(Ordering::SeqCst) == 0 {
773
&self.main_textures.a.texture.default_view
774
} else {
775
&self.main_textures.b.texture.default_view
776
}
777
}
778
779
/// The _other_ "main" unsampled texture view.
780
/// In most cases you should use [`Self::main_texture_view`] instead and never this.
781
/// The textures will naturally be swapped when [`Self::post_process_write`] is called.
782
///
783
/// A use case for this is to be able to prepare a bind group for all main textures
784
/// ahead of time.
785
pub fn main_texture_other_view(&self) -> &TextureView {
786
if self.main_texture.load(Ordering::SeqCst) == 0 {
787
&self.main_textures.b.texture.default_view
788
} else {
789
&self.main_textures.a.texture.default_view
790
}
791
}
792
793
/// The "main" sampled texture.
794
pub fn sampled_main_texture(&self) -> Option<&Texture> {
795
self.main_textures
796
.a
797
.resolve_target
798
.as_ref()
799
.map(|sampled| &sampled.texture)
800
}
801
802
/// The "main" sampled texture view.
803
pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
804
self.main_textures
805
.a
806
.resolve_target
807
.as_ref()
808
.map(|sampled| &sampled.default_view)
809
}
810
811
#[inline]
812
pub fn main_texture_format(&self) -> TextureFormat {
813
self.main_texture_format
814
}
815
816
/// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
817
#[inline]
818
pub fn is_hdr(&self) -> bool {
819
self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
820
}
821
822
/// The final texture this view will render to.
823
#[inline]
824
pub fn out_texture(&self) -> &TextureView {
825
&self.out_texture.view
826
}
827
828
pub fn out_texture_color_attachment(
829
&self,
830
clear_color: Option<LinearRgba>,
831
) -> RenderPassColorAttachment<'_> {
832
self.out_texture.get_attachment(clear_color)
833
}
834
835
/// Whether the final texture this view will render to needs to be presented.
836
pub fn needs_present(&self) -> bool {
837
self.out_texture.needs_present()
838
}
839
840
/// The format of the final texture this view will render to
841
#[inline]
842
pub fn out_texture_view_format(&self) -> TextureFormat {
843
self.out_texture.view_format
844
}
845
846
/// This will start a new "post process write", which assumes that the caller
847
/// will write the [`PostProcessWrite`]'s `source` to the `destination`.
848
///
849
/// `source` is the "current" main texture. This will internally flip this
850
/// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
851
/// _must_ ensure `source` is copied to `destination`, with or without modifications.
852
/// Failing to do so will cause the current main texture information to be lost.
853
pub fn post_process_write(&self) -> PostProcessWrite<'_> {
854
let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
855
// if the old main texture is a, then the post processing must write from a to b
856
if old_is_a_main_texture == 0 {
857
self.main_textures.b.mark_as_cleared();
858
PostProcessWrite {
859
source: &self.main_textures.a.texture.default_view,
860
source_texture: &self.main_textures.a.texture.texture,
861
destination: &self.main_textures.b.texture.default_view,
862
destination_texture: &self.main_textures.b.texture.texture,
863
}
864
} else {
865
self.main_textures.a.mark_as_cleared();
866
PostProcessWrite {
867
source: &self.main_textures.b.texture.default_view,
868
source_texture: &self.main_textures.b.texture.texture,
869
destination: &self.main_textures.a.texture.default_view,
870
destination_texture: &self.main_textures.a.texture.texture,
871
}
872
}
873
}
874
}
875
876
#[derive(Component)]
877
pub struct ViewDepthTexture {
878
pub texture: Texture,
879
attachment: DepthAttachment,
880
}
881
882
impl ViewDepthTexture {
883
pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
884
Self {
885
texture: texture.texture,
886
attachment: DepthAttachment::new(texture.default_view, clear_value),
887
}
888
}
889
890
pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment<'_> {
891
self.attachment.get_attachment(store)
892
}
893
894
pub fn view(&self) -> &TextureView {
895
&self.attachment.view
896
}
897
}
898
899
pub fn prepare_view_uniforms(
900
mut commands: Commands,
901
render_device: Res<RenderDevice>,
902
render_queue: Res<RenderQueue>,
903
mut view_uniforms: ResMut<ViewUniforms>,
904
views: Query<(
905
Entity,
906
Option<&ExtractedCamera>,
907
&ExtractedView,
908
Option<&Frustum>,
909
Option<&TemporalJitter>,
910
Option<&MipBias>,
911
Option<&MainPassResolutionOverride>,
912
)>,
913
frame_count: Res<FrameCount>,
914
) {
915
let view_iter = views.iter();
916
let view_count = view_iter.len();
917
let Some(mut writer) =
918
view_uniforms
919
.uniforms
920
.get_writer(view_count, &render_device, &render_queue)
921
else {
922
return;
923
};
924
for (
925
entity,
926
extracted_camera,
927
extracted_view,
928
frustum,
929
temporal_jitter,
930
mip_bias,
931
resolution_override,
932
) in &views
933
{
934
let viewport = extracted_view.viewport.as_vec4();
935
let mut main_pass_viewport = viewport;
936
if let Some(resolution_override) = resolution_override {
937
main_pass_viewport.z = resolution_override.0.x as f32;
938
main_pass_viewport.w = resolution_override.0.y as f32;
939
}
940
941
let unjittered_projection = extracted_view.clip_from_view;
942
let mut clip_from_view = unjittered_projection;
943
944
if let Some(temporal_jitter) = temporal_jitter {
945
temporal_jitter.jitter_projection(&mut clip_from_view, main_pass_viewport.zw());
946
}
947
948
let view_from_clip = clip_from_view.inverse();
949
let world_from_view = extracted_view.world_from_view.to_matrix();
950
let view_from_world = world_from_view.inverse();
951
952
let clip_from_world = if temporal_jitter.is_some() {
953
clip_from_view * view_from_world
954
} else {
955
extracted_view
956
.clip_from_world
957
.unwrap_or_else(|| clip_from_view * view_from_world)
958
};
959
960
// Map Frustum type to shader array<vec4<f32>, 6>
961
let frustum = frustum
962
.map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
963
.unwrap_or([Vec4::ZERO; 6]);
964
965
let view_uniforms = ViewUniformOffset {
966
offset: writer.write(&ViewUniform {
967
clip_from_world,
968
unjittered_clip_from_world: unjittered_projection * view_from_world,
969
world_from_clip: world_from_view * view_from_clip,
970
world_from_view,
971
view_from_world,
972
clip_from_view,
973
view_from_clip,
974
world_position: extracted_view.world_from_view.translation(),
975
exposure: extracted_camera
976
.map(|c| c.exposure)
977
.unwrap_or_else(|| Exposure::default().exposure()),
978
viewport,
979
main_pass_viewport,
980
frustum,
981
color_grading: extracted_view.color_grading.clone().into(),
982
mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
983
frame_count: frame_count.0,
984
}),
985
};
986
987
commands.entity(entity).insert(view_uniforms);
988
}
989
}
990
991
#[derive(Clone)]
992
struct MainTargetTextures {
993
a: ColorAttachment,
994
b: ColorAttachment,
995
/// 0 represents `main_textures.a`, 1 represents `main_textures.b`
996
/// This is shared across view targets with the same render target
997
main_texture: Arc<AtomicUsize>,
998
}
999
1000
/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
1001
pub fn prepare_view_attachments(
1002
windows: Res<ExtractedWindows>,
1003
images: Res<RenderAssets<GpuImage>>,
1004
manual_texture_views: Res<ManualTextureViews>,
1005
cameras: Query<&ExtractedCamera>,
1006
mut view_target_attachments: ResMut<ViewTargetAttachments>,
1007
) {
1008
for camera in cameras.iter() {
1009
let Some(target) = &camera.target else {
1010
continue;
1011
};
1012
1013
match view_target_attachments.entry(target.clone()) {
1014
Entry::Occupied(_) => {}
1015
Entry::Vacant(entry) => {
1016
let Some(attachment) = target
1017
.get_texture_view(&windows, &images, &manual_texture_views)
1018
.cloned()
1019
.zip(target.get_texture_view_format(&windows, &images, &manual_texture_views))
1020
.map(|(view, format)| OutputColorAttachment::new(view.clone(), format))
1021
else {
1022
continue;
1023
};
1024
entry.insert(attachment);
1025
}
1026
};
1027
}
1028
}
1029
1030
/// Clears the view target [`OutputColorAttachment`]s.
1031
pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
1032
view_target_attachments.clear();
1033
}
1034
1035
pub fn cleanup_view_targets_for_resize(
1036
mut commands: Commands,
1037
windows: Res<ExtractedWindows>,
1038
cameras: Query<(Entity, &ExtractedCamera), With<ViewTarget>>,
1039
) {
1040
for (entity, camera) in &cameras {
1041
if let Some(NormalizedRenderTarget::Window(window_ref)) = &camera.target
1042
&& let Some(window) = windows.get(&window_ref.entity())
1043
&& (window.size_changed || window.present_mode_changed)
1044
{
1045
commands.entity(entity).remove::<ViewTarget>();
1046
}
1047
}
1048
}
1049
1050
pub fn prepare_view_targets(
1051
mut commands: Commands,
1052
clear_color_global: Res<ClearColor>,
1053
render_device: Res<RenderDevice>,
1054
mut texture_cache: ResMut<TextureCache>,
1055
cameras: Query<(
1056
Entity,
1057
&ExtractedCamera,
1058
&ExtractedView,
1059
&CameraMainTextureUsages,
1060
&Msaa,
1061
)>,
1062
view_target_attachments: Res<ViewTargetAttachments>,
1063
) {
1064
let mut textures = <HashMap<_, _>>::default();
1065
for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
1066
let (Some(target_size), Some(out_attachment)) = (
1067
camera.physical_target_size,
1068
camera
1069
.target
1070
.as_ref()
1071
.and_then(|target| view_target_attachments.get(target)),
1072
) else {
1073
// If we can't find an output attachment we need to remove the ViewTarget
1074
// component to make sure the camera doesn't try rendering to an invalid
1075
// output attachment.
1076
commands.entity(entity).try_remove::<ViewTarget>();
1077
1078
continue;
1079
};
1080
1081
let main_texture_format = if view.hdr {
1082
ViewTarget::TEXTURE_FORMAT_HDR
1083
} else {
1084
TextureFormat::bevy_default()
1085
};
1086
1087
let clear_color = match camera.clear_color {
1088
ClearColorConfig::Custom(color) => Some(color),
1089
ClearColorConfig::None => None,
1090
_ => Some(clear_color_global.0),
1091
};
1092
1093
let (a, b, sampled, main_texture) = textures
1094
.entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))
1095
.or_insert_with(|| {
1096
let descriptor = TextureDescriptor {
1097
label: None,
1098
size: target_size.to_extents(),
1099
mip_level_count: 1,
1100
sample_count: 1,
1101
dimension: TextureDimension::D2,
1102
format: main_texture_format,
1103
usage: texture_usage.0,
1104
view_formats: match main_texture_format {
1105
TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
1106
TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
1107
_ => &[],
1108
},
1109
};
1110
let a = texture_cache.get(
1111
&render_device,
1112
TextureDescriptor {
1113
label: Some("main_texture_a"),
1114
..descriptor
1115
},
1116
);
1117
let b = texture_cache.get(
1118
&render_device,
1119
TextureDescriptor {
1120
label: Some("main_texture_b"),
1121
..descriptor
1122
},
1123
);
1124
let sampled = if msaa.samples() > 1 {
1125
let sampled = texture_cache.get(
1126
&render_device,
1127
TextureDescriptor {
1128
label: Some("main_texture_sampled"),
1129
size: target_size.to_extents(),
1130
mip_level_count: 1,
1131
sample_count: msaa.samples(),
1132
dimension: TextureDimension::D2,
1133
format: main_texture_format,
1134
usage: TextureUsages::RENDER_ATTACHMENT,
1135
view_formats: descriptor.view_formats,
1136
},
1137
);
1138
Some(sampled)
1139
} else {
1140
None
1141
};
1142
let main_texture = Arc::new(AtomicUsize::new(0));
1143
(a, b, sampled, main_texture)
1144
});
1145
1146
let converted_clear_color = clear_color.map(Into::into);
1147
1148
let main_textures = MainTargetTextures {
1149
a: ColorAttachment::new(a.clone(), sampled.clone(), None, converted_clear_color),
1150
b: ColorAttachment::new(b.clone(), sampled.clone(), None, converted_clear_color),
1151
main_texture: main_texture.clone(),
1152
};
1153
1154
commands.entity(entity).insert(ViewTarget {
1155
main_texture: main_textures.main_texture.clone(),
1156
main_textures,
1157
main_texture_format,
1158
out_texture: out_attachment.clone(),
1159
});
1160
}
1161
}
1162
1163