Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/src/camera.rs
6595 views
1
use crate::{
2
batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport},
3
extract_component::{ExtractComponent, ExtractComponentPlugin},
4
extract_resource::{ExtractResource, ExtractResourcePlugin},
5
render_asset::RenderAssets,
6
render_graph::{CameraDriverNode, InternedRenderSubGraph, RenderGraph, RenderSubGraph},
7
render_resource::TextureView,
8
sync_world::{RenderEntity, SyncToRenderWorld},
9
texture::{GpuImage, ManualTextureViews},
10
view::{
11
ColorGrading, ExtractedView, ExtractedWindows, Hdr, Msaa, NoIndirectDrawing,
12
RenderVisibleEntities, RetainedViewEntity, ViewUniformOffset,
13
},
14
Extract, ExtractSchedule, Render, RenderApp, RenderSystems,
15
};
16
17
use bevy_app::{App, Plugin, PostStartup, PostUpdate};
18
use bevy_asset::{AssetEvent, AssetEventSystems, AssetId, Assets};
19
use bevy_camera::{
20
primitives::Frustum,
21
visibility::{self, RenderLayers, VisibleEntities},
22
Camera, Camera2d, Camera3d, CameraMainTextureUsages, CameraOutputMode, CameraUpdateSystems,
23
ClearColor, ClearColorConfig, Exposure, ManualTextureViewHandle, NormalizedRenderTarget,
24
Projection, RenderTargetInfo, Viewport,
25
};
26
use bevy_derive::{Deref, DerefMut};
27
use bevy_ecs::{
28
change_detection::DetectChanges,
29
component::Component,
30
entity::{ContainsEntity, Entity},
31
error::BevyError,
32
event::EventReader,
33
lifecycle::HookContext,
34
prelude::With,
35
query::{Has, QueryItem},
36
reflect::ReflectComponent,
37
resource::Resource,
38
schedule::IntoScheduleConfigs,
39
system::{Commands, Query, Res, ResMut},
40
world::DeferredWorld,
41
};
42
use bevy_image::Image;
43
use bevy_math::{uvec2, vec2, Mat4, URect, UVec2, UVec4, Vec2};
44
use bevy_platform::collections::{HashMap, HashSet};
45
use bevy_reflect::prelude::*;
46
use bevy_transform::components::GlobalTransform;
47
use bevy_window::{PrimaryWindow, Window, WindowCreated, WindowResized, WindowScaleFactorChanged};
48
use tracing::warn;
49
use wgpu::TextureFormat;
50
51
#[derive(Default)]
52
pub struct CameraPlugin;
53
54
impl Plugin for CameraPlugin {
55
fn build(&self, app: &mut App) {
56
app.register_required_components::<Camera, Msaa>()
57
.register_required_components::<Camera, SyncToRenderWorld>()
58
.register_required_components::<Camera3d, ColorGrading>()
59
.register_required_components::<Camera3d, Exposure>()
60
.add_plugins((
61
ExtractResourcePlugin::<ClearColor>::default(),
62
ExtractComponentPlugin::<CameraMainTextureUsages>::default(),
63
))
64
.add_systems(PostStartup, camera_system.in_set(CameraUpdateSystems))
65
.add_systems(
66
PostUpdate,
67
camera_system
68
.in_set(CameraUpdateSystems)
69
.before(AssetEventSystems)
70
.before(visibility::update_frusta),
71
);
72
app.world_mut()
73
.register_component_hooks::<Camera>()
74
.on_add(warn_on_no_render_graph);
75
76
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
77
render_app
78
.init_resource::<SortedCameras>()
79
.add_systems(ExtractSchedule, extract_cameras)
80
.add_systems(Render, sort_cameras.in_set(RenderSystems::ManageViews));
81
let camera_driver_node = CameraDriverNode::new(render_app.world_mut());
82
let mut render_graph = render_app.world_mut().resource_mut::<RenderGraph>();
83
render_graph.add_node(crate::graph::CameraDriverLabel, camera_driver_node);
84
}
85
}
86
}
87
88
fn warn_on_no_render_graph(world: DeferredWorld, HookContext { entity, caller, .. }: HookContext) {
89
if !world.entity(entity).contains::<CameraRenderGraph>() {
90
warn!("{}Entity {entity} has a `Camera` component, but it doesn't have a render graph configured. Consider adding a `Camera2d` or `Camera3d` component, or manually adding a `CameraRenderGraph` component if you need a custom render graph.", caller.map(|location|format!("{location}: ")).unwrap_or_default());
91
}
92
}
93
94
impl ExtractResource for ClearColor {
95
type Source = Self;
96
97
fn extract_resource(source: &Self::Source) -> Self {
98
source.clone()
99
}
100
}
101
impl ExtractComponent for CameraMainTextureUsages {
102
type QueryData = &'static Self;
103
type QueryFilter = ();
104
type Out = Self;
105
106
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
107
Some(*item)
108
}
109
}
110
impl ExtractComponent for Camera2d {
111
type QueryData = &'static Self;
112
type QueryFilter = With<Camera>;
113
type Out = Self;
114
115
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
116
Some(item.clone())
117
}
118
}
119
impl ExtractComponent for Camera3d {
120
type QueryData = &'static Self;
121
type QueryFilter = With<Camera>;
122
type Out = Self;
123
124
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
125
Some(item.clone())
126
}
127
}
128
129
/// Configures the [`RenderGraph`] name assigned to be run for a given [`Camera`] entity.
130
#[derive(Component, Debug, Deref, DerefMut, Reflect, Clone)]
131
#[reflect(opaque)]
132
#[reflect(Component, Debug, Clone)]
133
pub struct CameraRenderGraph(InternedRenderSubGraph);
134
135
impl CameraRenderGraph {
136
/// Creates a new [`CameraRenderGraph`] from any string-like type.
137
#[inline]
138
pub fn new<T: RenderSubGraph>(name: T) -> Self {
139
Self(name.intern())
140
}
141
142
/// Sets the graph name.
143
#[inline]
144
pub fn set<T: RenderSubGraph>(&mut self, name: T) {
145
self.0 = name.intern();
146
}
147
}
148
149
pub trait NormalizedRenderTargetExt {
150
fn get_texture_view<'a>(
151
&self,
152
windows: &'a ExtractedWindows,
153
images: &'a RenderAssets<GpuImage>,
154
manual_texture_views: &'a ManualTextureViews,
155
) -> Option<&'a TextureView>;
156
157
/// Retrieves the [`TextureFormat`] of this render target, if it exists.
158
fn get_texture_format<'a>(
159
&self,
160
windows: &'a ExtractedWindows,
161
images: &'a RenderAssets<GpuImage>,
162
manual_texture_views: &'a ManualTextureViews,
163
) -> Option<TextureFormat>;
164
165
fn get_render_target_info<'a>(
166
&self,
167
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
168
images: &Assets<Image>,
169
manual_texture_views: &ManualTextureViews,
170
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError>;
171
172
// Check if this render target is contained in the given changed windows or images.
173
fn is_changed(
174
&self,
175
changed_window_ids: &HashSet<Entity>,
176
changed_image_handles: &HashSet<&AssetId<Image>>,
177
) -> bool;
178
}
179
180
impl NormalizedRenderTargetExt for NormalizedRenderTarget {
181
fn get_texture_view<'a>(
182
&self,
183
windows: &'a ExtractedWindows,
184
images: &'a RenderAssets<GpuImage>,
185
manual_texture_views: &'a ManualTextureViews,
186
) -> Option<&'a TextureView> {
187
match self {
188
NormalizedRenderTarget::Window(window_ref) => windows
189
.get(&window_ref.entity())
190
.and_then(|window| window.swap_chain_texture_view.as_ref()),
191
NormalizedRenderTarget::Image(image_target) => images
192
.get(&image_target.handle)
193
.map(|image| &image.texture_view),
194
NormalizedRenderTarget::TextureView(id) => {
195
manual_texture_views.get(id).map(|tex| &tex.texture_view)
196
}
197
NormalizedRenderTarget::None { .. } => None,
198
}
199
}
200
201
/// Retrieves the [`TextureFormat`] of this render target, if it exists.
202
fn get_texture_format<'a>(
203
&self,
204
windows: &'a ExtractedWindows,
205
images: &'a RenderAssets<GpuImage>,
206
manual_texture_views: &'a ManualTextureViews,
207
) -> Option<TextureFormat> {
208
match self {
209
NormalizedRenderTarget::Window(window_ref) => windows
210
.get(&window_ref.entity())
211
.and_then(|window| window.swap_chain_texture_format),
212
NormalizedRenderTarget::Image(image_target) => images
213
.get(&image_target.handle)
214
.map(|image| image.texture_format),
215
NormalizedRenderTarget::TextureView(id) => {
216
manual_texture_views.get(id).map(|tex| tex.format)
217
}
218
NormalizedRenderTarget::None { .. } => None,
219
}
220
}
221
222
fn get_render_target_info<'a>(
223
&self,
224
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
225
images: &Assets<Image>,
226
manual_texture_views: &ManualTextureViews,
227
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError> {
228
match self {
229
NormalizedRenderTarget::Window(window_ref) => resolutions
230
.into_iter()
231
.find(|(entity, _)| *entity == window_ref.entity())
232
.map(|(_, window)| RenderTargetInfo {
233
physical_size: window.physical_size(),
234
scale_factor: window.resolution.scale_factor(),
235
})
236
.ok_or(MissingRenderTargetInfoError::Window {
237
window: window_ref.entity(),
238
}),
239
NormalizedRenderTarget::Image(image_target) => images
240
.get(&image_target.handle)
241
.map(|image| RenderTargetInfo {
242
physical_size: image.size(),
243
scale_factor: image_target.scale_factor.0,
244
})
245
.ok_or(MissingRenderTargetInfoError::Image {
246
image: image_target.handle.id(),
247
}),
248
NormalizedRenderTarget::TextureView(id) => manual_texture_views
249
.get(id)
250
.map(|tex| RenderTargetInfo {
251
physical_size: tex.size,
252
scale_factor: 1.0,
253
})
254
.ok_or(MissingRenderTargetInfoError::TextureView { texture_view: *id }),
255
NormalizedRenderTarget::None { width, height } => Ok(RenderTargetInfo {
256
physical_size: uvec2(*width, *height),
257
scale_factor: 1.0,
258
}),
259
}
260
}
261
262
// Check if this render target is contained in the given changed windows or images.
263
fn is_changed(
264
&self,
265
changed_window_ids: &HashSet<Entity>,
266
changed_image_handles: &HashSet<&AssetId<Image>>,
267
) -> bool {
268
match self {
269
NormalizedRenderTarget::Window(window_ref) => {
270
changed_window_ids.contains(&window_ref.entity())
271
}
272
NormalizedRenderTarget::Image(image_target) => {
273
changed_image_handles.contains(&image_target.handle.id())
274
}
275
NormalizedRenderTarget::TextureView(_) => true,
276
NormalizedRenderTarget::None { .. } => false,
277
}
278
}
279
}
280
281
#[derive(Debug, thiserror::Error)]
282
pub enum MissingRenderTargetInfoError {
283
#[error("RenderTarget::Window missing ({window:?}): Make sure the provided entity has a Window component.")]
284
Window { window: Entity },
285
#[error("RenderTarget::Image missing ({image:?}): Make sure the Image's usages include RenderAssetUsages::MAIN_WORLD.")]
286
Image { image: AssetId<Image> },
287
#[error("RenderTarget::TextureView missing ({texture_view:?}): make sure the texture view handle was not removed.")]
288
TextureView {
289
texture_view: ManualTextureViewHandle,
290
},
291
}
292
293
/// System in charge of updating a [`Camera`] when its window or projection changes.
294
///
295
/// The system detects window creation, resize, and scale factor change events to update the camera
296
/// [`Projection`] if needed.
297
///
298
/// ## World Resources
299
///
300
/// [`Res<Assets<Image>>`](Assets<Image>) -- For cameras that render to an image, this resource is used to
301
/// inspect information about the render target. This system will not access any other image assets.
302
///
303
/// [`OrthographicProjection`]: bevy_camera::OrthographicProjection
304
/// [`PerspectiveProjection`]: bevy_camera::PerspectiveProjection
305
pub fn camera_system(
306
mut window_resized_events: EventReader<WindowResized>,
307
mut window_created_events: EventReader<WindowCreated>,
308
mut window_scale_factor_changed_events: EventReader<WindowScaleFactorChanged>,
309
mut image_asset_events: EventReader<AssetEvent<Image>>,
310
primary_window: Query<Entity, With<PrimaryWindow>>,
311
windows: Query<(Entity, &Window)>,
312
images: Res<Assets<Image>>,
313
manual_texture_views: Res<ManualTextureViews>,
314
mut cameras: Query<(&mut Camera, &mut Projection)>,
315
) -> Result<(), BevyError> {
316
let primary_window = primary_window.iter().next();
317
318
let mut changed_window_ids = <HashSet<_>>::default();
319
changed_window_ids.extend(window_created_events.read().map(|event| event.window));
320
changed_window_ids.extend(window_resized_events.read().map(|event| event.window));
321
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_events
322
.read()
323
.map(|event| event.window)
324
.collect();
325
changed_window_ids.extend(scale_factor_changed_window_ids.clone());
326
327
let changed_image_handles: HashSet<&AssetId<Image>> = image_asset_events
328
.read()
329
.filter_map(|event| match event {
330
AssetEvent::Modified { id } | AssetEvent::Added { id } => Some(id),
331
_ => None,
332
})
333
.collect();
334
335
for (mut camera, mut camera_projection) in &mut cameras {
336
let mut viewport_size = camera
337
.viewport
338
.as_ref()
339
.map(|viewport| viewport.physical_size);
340
341
if let Some(normalized_target) = &camera.target.normalize(primary_window)
342
&& (normalized_target.is_changed(&changed_window_ids, &changed_image_handles)
343
|| camera.is_added()
344
|| camera_projection.is_changed()
345
|| camera.computed.old_viewport_size != viewport_size
346
|| camera.computed.old_sub_camera_view != camera.sub_camera_view)
347
{
348
let new_computed_target_info = normalized_target.get_render_target_info(
349
windows,
350
&images,
351
&manual_texture_views,
352
)?;
353
// Check for the scale factor changing, and resize the viewport if needed.
354
// This can happen when the window is moved between monitors with different DPIs.
355
// Without this, the viewport will take a smaller portion of the window moved to
356
// a higher DPI monitor.
357
if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
358
&& let Some(old_scale_factor) = camera
359
.computed
360
.target_info
361
.as_ref()
362
.map(|info| info.scale_factor)
363
{
364
let resize_factor = new_computed_target_info.scale_factor / old_scale_factor;
365
if let Some(ref mut viewport) = camera.viewport {
366
let resize = |vec: UVec2| (vec.as_vec2() * resize_factor).as_uvec2();
367
viewport.physical_position = resize(viewport.physical_position);
368
viewport.physical_size = resize(viewport.physical_size);
369
viewport_size = Some(viewport.physical_size);
370
}
371
}
372
// This check is needed because when changing WindowMode to Fullscreen, the viewport may have invalid
373
// arguments due to a sudden change on the window size to a lower value.
374
// If the size of the window is lower, the viewport will match that lower value.
375
if let Some(viewport) = &mut camera.viewport {
376
viewport.clamp_to_size(new_computed_target_info.physical_size);
377
}
378
camera.computed.target_info = Some(new_computed_target_info);
379
if let Some(size) = camera.logical_viewport_size()
380
&& size.x != 0.0
381
&& size.y != 0.0
382
{
383
camera_projection.update(size.x, size.y);
384
camera.computed.clip_from_view = match &camera.sub_camera_view {
385
Some(sub_view) => camera_projection.get_clip_from_view_for_sub(sub_view),
386
None => camera_projection.get_clip_from_view(),
387
}
388
}
389
}
390
391
if camera.computed.old_viewport_size != viewport_size {
392
camera.computed.old_viewport_size = viewport_size;
393
}
394
395
if camera.computed.old_sub_camera_view != camera.sub_camera_view {
396
camera.computed.old_sub_camera_view = camera.sub_camera_view;
397
}
398
}
399
Ok(())
400
}
401
402
#[derive(Component, Debug)]
403
pub struct ExtractedCamera {
404
pub target: Option<NormalizedRenderTarget>,
405
pub physical_viewport_size: Option<UVec2>,
406
pub physical_target_size: Option<UVec2>,
407
pub viewport: Option<Viewport>,
408
pub render_graph: InternedRenderSubGraph,
409
pub order: isize,
410
pub output_mode: CameraOutputMode,
411
pub msaa_writeback: bool,
412
pub clear_color: ClearColorConfig,
413
pub sorted_camera_index_for_target: usize,
414
pub exposure: f32,
415
pub hdr: bool,
416
}
417
418
pub fn extract_cameras(
419
mut commands: Commands,
420
query: Extract<
421
Query<(
422
Entity,
423
RenderEntity,
424
&Camera,
425
&CameraRenderGraph,
426
&GlobalTransform,
427
&VisibleEntities,
428
&Frustum,
429
Has<Hdr>,
430
Option<&ColorGrading>,
431
Option<&Exposure>,
432
Option<&TemporalJitter>,
433
Option<&MipBias>,
434
Option<&RenderLayers>,
435
Option<&Projection>,
436
Has<NoIndirectDrawing>,
437
)>,
438
>,
439
primary_window: Extract<Query<Entity, With<PrimaryWindow>>>,
440
gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
441
mapper: Extract<Query<&RenderEntity>>,
442
) {
443
let primary_window = primary_window.iter().next();
444
type ExtractedCameraComponents = (
445
ExtractedCamera,
446
ExtractedView,
447
RenderVisibleEntities,
448
TemporalJitter,
449
MipBias,
450
RenderLayers,
451
Projection,
452
NoIndirectDrawing,
453
ViewUniformOffset,
454
);
455
for (
456
main_entity,
457
render_entity,
458
camera,
459
camera_render_graph,
460
transform,
461
visible_entities,
462
frustum,
463
hdr,
464
color_grading,
465
exposure,
466
temporal_jitter,
467
mip_bias,
468
render_layers,
469
projection,
470
no_indirect_drawing,
471
) in query.iter()
472
{
473
if !camera.is_active {
474
commands
475
.entity(render_entity)
476
.remove::<ExtractedCameraComponents>();
477
continue;
478
}
479
480
let color_grading = color_grading.unwrap_or(&ColorGrading::default()).clone();
481
482
if let (
483
Some(URect {
484
min: viewport_origin,
485
..
486
}),
487
Some(viewport_size),
488
Some(target_size),
489
) = (
490
camera.physical_viewport_rect(),
491
camera.physical_viewport_size(),
492
camera.physical_target_size(),
493
) {
494
if target_size.x == 0 || target_size.y == 0 {
495
commands
496
.entity(render_entity)
497
.remove::<ExtractedCameraComponents>();
498
continue;
499
}
500
501
let render_visible_entities = RenderVisibleEntities {
502
entities: visible_entities
503
.entities
504
.iter()
505
.map(|(type_id, entities)| {
506
let entities = entities
507
.iter()
508
.map(|entity| {
509
let render_entity = mapper
510
.get(*entity)
511
.cloned()
512
.map(|entity| entity.id())
513
.unwrap_or(Entity::PLACEHOLDER);
514
(render_entity, (*entity).into())
515
})
516
.collect();
517
(*type_id, entities)
518
})
519
.collect(),
520
};
521
522
let mut commands = commands.entity(render_entity);
523
commands.insert((
524
ExtractedCamera {
525
target: camera.target.normalize(primary_window),
526
viewport: camera.viewport.clone(),
527
physical_viewport_size: Some(viewport_size),
528
physical_target_size: Some(target_size),
529
render_graph: camera_render_graph.0,
530
order: camera.order,
531
output_mode: camera.output_mode,
532
msaa_writeback: camera.msaa_writeback,
533
clear_color: camera.clear_color,
534
// this will be set in sort_cameras
535
sorted_camera_index_for_target: 0,
536
exposure: exposure
537
.map(Exposure::exposure)
538
.unwrap_or_else(|| Exposure::default().exposure()),
539
hdr,
540
},
541
ExtractedView {
542
retained_view_entity: RetainedViewEntity::new(main_entity.into(), None, 0),
543
clip_from_view: camera.clip_from_view(),
544
world_from_view: *transform,
545
clip_from_world: None,
546
hdr,
547
viewport: UVec4::new(
548
viewport_origin.x,
549
viewport_origin.y,
550
viewport_size.x,
551
viewport_size.y,
552
),
553
color_grading,
554
},
555
render_visible_entities,
556
*frustum,
557
));
558
559
if let Some(temporal_jitter) = temporal_jitter {
560
commands.insert(temporal_jitter.clone());
561
} else {
562
commands.remove::<TemporalJitter>();
563
}
564
565
if let Some(mip_bias) = mip_bias {
566
commands.insert(mip_bias.clone());
567
} else {
568
commands.remove::<MipBias>();
569
}
570
571
if let Some(render_layers) = render_layers {
572
commands.insert(render_layers.clone());
573
} else {
574
commands.remove::<RenderLayers>();
575
}
576
577
if let Some(perspective) = projection {
578
commands.insert(perspective.clone());
579
} else {
580
commands.remove::<Projection>();
581
}
582
583
if no_indirect_drawing
584
|| !matches!(
585
gpu_preprocessing_support.max_supported_mode,
586
GpuPreprocessingMode::Culling
587
)
588
{
589
commands.insert(NoIndirectDrawing);
590
} else {
591
commands.remove::<NoIndirectDrawing>();
592
}
593
};
594
}
595
}
596
597
/// Cameras sorted by their order field. This is updated in the [`sort_cameras`] system.
598
#[derive(Resource, Default)]
599
pub struct SortedCameras(pub Vec<SortedCamera>);
600
601
pub struct SortedCamera {
602
pub entity: Entity,
603
pub order: isize,
604
pub target: Option<NormalizedRenderTarget>,
605
pub hdr: bool,
606
}
607
608
pub fn sort_cameras(
609
mut sorted_cameras: ResMut<SortedCameras>,
610
mut cameras: Query<(Entity, &mut ExtractedCamera)>,
611
) {
612
sorted_cameras.0.clear();
613
for (entity, camera) in cameras.iter() {
614
sorted_cameras.0.push(SortedCamera {
615
entity,
616
order: camera.order,
617
target: camera.target.clone(),
618
hdr: camera.hdr,
619
});
620
}
621
// sort by order and ensure within an order, RenderTargets of the same type are packed together
622
sorted_cameras
623
.0
624
.sort_by(|c1, c2| (c1.order, &c1.target).cmp(&(c2.order, &c2.target)));
625
let mut previous_order_target = None;
626
let mut ambiguities = <HashSet<_>>::default();
627
let mut target_counts = <HashMap<_, _>>::default();
628
for sorted_camera in &mut sorted_cameras.0 {
629
let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
630
if let Some(previous_order_target) = previous_order_target
631
&& previous_order_target == new_order_target
632
{
633
ambiguities.insert(new_order_target.clone());
634
}
635
if let Some(target) = &sorted_camera.target {
636
let count = target_counts
637
.entry((target.clone(), sorted_camera.hdr))
638
.or_insert(0usize);
639
let (_, mut camera) = cameras.get_mut(sorted_camera.entity).unwrap();
640
camera.sorted_camera_index_for_target = *count;
641
*count += 1;
642
}
643
previous_order_target = Some(new_order_target);
644
}
645
646
if !ambiguities.is_empty() {
647
warn!(
648
"Camera order ambiguities detected for active cameras with the following priorities: {:?}. \
649
To fix this, ensure there is exactly one Camera entity spawned with a given order for a given RenderTarget. \
650
Ambiguities should be resolved because either (1) multiple active cameras were spawned accidentally, which will \
651
result in rendering multiple instances of the scene or (2) for cases where multiple active cameras is intentional, \
652
ambiguities could result in unpredictable render results.",
653
ambiguities
654
);
655
}
656
}
657
658
/// A subpixel offset to jitter a perspective camera's frustum by.
659
///
660
/// Useful for temporal rendering techniques.
661
///
662
/// Do not use with [`OrthographicProjection`].
663
///
664
/// [`OrthographicProjection`]: bevy_camera::OrthographicProjection
665
#[derive(Component, Clone, Default, Reflect)]
666
#[reflect(Default, Component, Clone)]
667
pub struct TemporalJitter {
668
/// Offset is in range [-0.5, 0.5].
669
pub offset: Vec2,
670
}
671
672
impl TemporalJitter {
673
pub fn jitter_projection(&self, clip_from_view: &mut Mat4, view_size: Vec2) {
674
if clip_from_view.w_axis.w == 1.0 {
675
warn!(
676
"TemporalJitter not supported with OrthographicProjection. Use PerspectiveProjection instead."
677
);
678
return;
679
}
680
681
// https://github.com/GPUOpen-LibrariesAndSDKs/FidelityFX-SDK/blob/d7531ae47d8b36a5d4025663e731a47a38be882f/docs/techniques/media/super-resolution-temporal/jitter-space.svg
682
let jitter = (self.offset * vec2(2.0, -2.0)) / view_size;
683
684
clip_from_view.z_axis.x += jitter.x;
685
clip_from_view.z_axis.y += jitter.y;
686
}
687
}
688
689
/// Camera component specifying a mip bias to apply when sampling from material textures.
690
///
691
/// Often used in conjunction with antialiasing post-process effects to reduce textures blurriness.
692
#[derive(Component, Reflect, Clone)]
693
#[reflect(Default, Component)]
694
pub struct MipBias(pub f32);
695
696
impl Default for MipBias {
697
fn default() -> Self {
698
Self(-1.0)
699
}
700
}
701
702