Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
bevyengine
GitHub Repository: bevyengine/bevy
Path: blob/main/crates/bevy_render/src/camera.rs
9315 views
1
use crate::{
2
batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport},
3
extract_component::{ExtractComponent, ExtractComponentPlugin},
4
extract_resource::{ExtractResource, ExtractResourcePlugin},
5
render_asset::RenderAssets,
6
render_resource::TextureView,
7
sync_component::SyncComponent,
8
sync_world::{RenderEntity, SyncToRenderWorld},
9
texture::{GpuImage, ManualTextureViews},
10
view::{
11
ColorGrading, ExtractedView, ExtractedWindows, Msaa, NoIndirectDrawing,
12
RenderVisibleEntities, RetainedViewEntity, ViewUniformOffset,
13
},
14
Extract, ExtractSchedule, Render, RenderApp, RenderSystems,
15
};
16
17
use bevy_app::{App, Plugin, PostStartup, PostUpdate};
18
use bevy_asset::{AssetEvent, AssetEventSystems, AssetId, Assets};
19
use bevy_camera::{
20
primitives::Frustum,
21
visibility::{self, RenderLayers, VisibleEntities},
22
Camera, Camera2d, Camera3d, CameraMainTextureUsages, CameraOutputMode, CameraUpdateSystems,
23
ClearColor, ClearColorConfig, Exposure, Hdr, ManualTextureViewHandle, MsaaWriteback,
24
NormalizedRenderTarget, Projection, RenderTarget, RenderTargetInfo, Viewport,
25
};
26
use bevy_derive::{Deref, DerefMut};
27
use bevy_ecs::{
28
change_detection::DetectChanges,
29
component::Component,
30
entity::{ContainsEntity, Entity},
31
error::BevyError,
32
lifecycle::HookContext,
33
message::MessageReader,
34
prelude::With,
35
query::{Has, QueryItem},
36
reflect::ReflectComponent,
37
resource::Resource,
38
schedule::{InternedScheduleLabel, IntoScheduleConfigs, ScheduleLabel},
39
system::{Commands, Query, Res, ResMut},
40
world::DeferredWorld,
41
};
42
use bevy_image::Image;
43
use bevy_log::warn;
44
use bevy_log::warn_once;
45
use bevy_math::{uvec2, vec2, Mat4, URect, UVec2, UVec4, Vec2};
46
use bevy_platform::collections::{HashMap, HashSet};
47
use bevy_reflect::prelude::*;
48
use bevy_transform::components::GlobalTransform;
49
use bevy_window::{PrimaryWindow, Window, WindowCreated, WindowResized, WindowScaleFactorChanged};
50
use wgpu::TextureFormat;
51
52
#[derive(Default)]
53
pub struct CameraPlugin;
54
55
impl Plugin for CameraPlugin {
56
fn build(&self, app: &mut App) {
57
app.register_required_components::<Camera, Msaa>()
58
.register_required_components::<Camera, SyncToRenderWorld>()
59
.register_required_components::<Camera3d, ColorGrading>()
60
.register_required_components::<Camera3d, Exposure>()
61
.add_plugins((
62
ExtractResourcePlugin::<ClearColor>::default(),
63
ExtractComponentPlugin::<CameraMainTextureUsages>::default(),
64
))
65
.add_systems(PostStartup, camera_system.in_set(CameraUpdateSystems))
66
.add_systems(
67
PostUpdate,
68
camera_system
69
.in_set(CameraUpdateSystems)
70
.before(AssetEventSystems)
71
.before(visibility::update_frusta),
72
);
73
app.world_mut()
74
.register_component_hooks::<Camera>()
75
.on_add(warn_on_no_render_graph);
76
77
if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
78
render_app
79
.init_resource::<SortedCameras>()
80
.add_systems(ExtractSchedule, extract_cameras)
81
.add_systems(Render, sort_cameras.in_set(RenderSystems::ManageViews));
82
}
83
}
84
}
85
86
fn warn_on_no_render_graph(world: DeferredWorld, HookContext { entity, caller, .. }: HookContext) {
87
if !world.entity(entity).contains::<CameraRenderGraph>() {
88
warn!("{}Entity {entity} has a `Camera` component, but it doesn't have a render graph configured. Usually, adding a `Camera2d` or `Camera3d` component will work.
89
However, you may instead need to enable `bevy_core_pipeline`, or may want to manually add a `CameraRenderGraph` component to create a custom render graph.", caller.map(|location|format!("{location}: ")).unwrap_or_default());
90
}
91
}
92
93
impl ExtractResource for ClearColor {
94
type Source = Self;
95
96
fn extract_resource(source: &Self::Source) -> Self {
97
source.clone()
98
}
99
}
100
101
impl SyncComponent for CameraMainTextureUsages {
102
type Out = Self;
103
}
104
105
impl ExtractComponent for CameraMainTextureUsages {
106
type QueryData = &'static Self;
107
type QueryFilter = ();
108
109
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
110
Some(*item)
111
}
112
}
113
114
impl SyncComponent for Camera2d {
115
type Out = Self;
116
}
117
118
impl ExtractComponent for Camera2d {
119
type QueryData = &'static Self;
120
type QueryFilter = With<Camera>;
121
122
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
123
Some(item.clone())
124
}
125
}
126
127
impl SyncComponent for Camera3d {
128
type Out = Self;
129
}
130
131
impl ExtractComponent for Camera3d {
132
type QueryData = &'static Self;
133
type QueryFilter = With<Camera>;
134
135
fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
136
Some(item.clone())
137
}
138
}
139
140
/// Configures the render schedule to be run for a given [`Camera`] entity.
141
#[derive(Component, Debug, Deref, DerefMut, Reflect, Clone)]
142
#[reflect(opaque)]
143
#[reflect(Component, Debug, Clone)]
144
pub struct CameraRenderGraph(pub InternedScheduleLabel);
145
146
impl CameraRenderGraph {
147
/// Creates a new [`CameraRenderGraph`] from a schedule label.
148
#[inline]
149
pub fn new<T: ScheduleLabel>(schedule: T) -> Self {
150
Self(schedule.intern())
151
}
152
153
/// Sets the schedule.
154
#[inline]
155
pub fn set<T: ScheduleLabel>(&mut self, schedule: T) {
156
self.0 = schedule.intern();
157
}
158
}
159
160
pub trait NormalizedRenderTargetExt {
161
fn get_texture_view<'a>(
162
&self,
163
windows: &'a ExtractedWindows,
164
images: &'a RenderAssets<GpuImage>,
165
manual_texture_views: &'a ManualTextureViews,
166
) -> Option<&'a TextureView>;
167
168
/// Retrieves the [`TextureFormat`] of this render target, if it exists.
169
fn get_texture_view_format<'a>(
170
&self,
171
windows: &'a ExtractedWindows,
172
images: &'a RenderAssets<GpuImage>,
173
manual_texture_views: &'a ManualTextureViews,
174
) -> Option<TextureFormat>;
175
176
fn get_render_target_info<'a>(
177
&self,
178
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
179
images: &Assets<Image>,
180
manual_texture_views: &ManualTextureViews,
181
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError>;
182
183
// Check if this render target is contained in the given changed windows or images.
184
fn is_changed(
185
&self,
186
changed_window_ids: &HashSet<Entity>,
187
changed_image_handles: &HashSet<&AssetId<Image>>,
188
) -> bool;
189
}
190
191
impl NormalizedRenderTargetExt for NormalizedRenderTarget {
192
fn get_texture_view<'a>(
193
&self,
194
windows: &'a ExtractedWindows,
195
images: &'a RenderAssets<GpuImage>,
196
manual_texture_views: &'a ManualTextureViews,
197
) -> Option<&'a TextureView> {
198
match self {
199
NormalizedRenderTarget::Window(window_ref) => windows
200
.get(&window_ref.entity())
201
.and_then(|window| window.swap_chain_texture_view.as_ref()),
202
NormalizedRenderTarget::Image(image_target) => images
203
.get(&image_target.handle)
204
.map(|image| &image.texture_view),
205
NormalizedRenderTarget::TextureView(id) => {
206
manual_texture_views.get(id).map(|tex| &tex.texture_view)
207
}
208
NormalizedRenderTarget::None { .. } => None,
209
}
210
}
211
212
/// Retrieves the texture view's [`TextureFormat`] of this render target, if it exists.
213
fn get_texture_view_format<'a>(
214
&self,
215
windows: &'a ExtractedWindows,
216
images: &'a RenderAssets<GpuImage>,
217
manual_texture_views: &'a ManualTextureViews,
218
) -> Option<TextureFormat> {
219
match self {
220
NormalizedRenderTarget::Window(window_ref) => windows
221
.get(&window_ref.entity())
222
.and_then(|window| window.swap_chain_texture_view_format),
223
NormalizedRenderTarget::Image(image_target) => {
224
images.get(&image_target.handle).map(GpuImage::view_format)
225
}
226
NormalizedRenderTarget::TextureView(id) => {
227
manual_texture_views.get(id).map(|tex| tex.view_format)
228
}
229
NormalizedRenderTarget::None { .. } => None,
230
}
231
}
232
233
fn get_render_target_info<'a>(
234
&self,
235
resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
236
images: &Assets<Image>,
237
manual_texture_views: &ManualTextureViews,
238
) -> Result<RenderTargetInfo, MissingRenderTargetInfoError> {
239
match self {
240
NormalizedRenderTarget::Window(window_ref) => resolutions
241
.into_iter()
242
.find(|(entity, _)| *entity == window_ref.entity())
243
.map(|(_, window)| RenderTargetInfo {
244
physical_size: window.physical_size(),
245
scale_factor: window.resolution.scale_factor(),
246
})
247
.ok_or(MissingRenderTargetInfoError::Window {
248
window: window_ref.entity(),
249
}),
250
NormalizedRenderTarget::Image(image_target) => images
251
.get(&image_target.handle)
252
.map(|image| RenderTargetInfo {
253
physical_size: image.size(),
254
scale_factor: image_target.scale_factor,
255
})
256
.ok_or(MissingRenderTargetInfoError::Image {
257
image: image_target.handle.id(),
258
}),
259
NormalizedRenderTarget::TextureView(id) => manual_texture_views
260
.get(id)
261
.map(|tex| RenderTargetInfo {
262
physical_size: tex.size,
263
scale_factor: 1.0,
264
})
265
.ok_or(MissingRenderTargetInfoError::TextureView { texture_view: *id }),
266
NormalizedRenderTarget::None { width, height } => Ok(RenderTargetInfo {
267
physical_size: uvec2(*width, *height),
268
scale_factor: 1.0,
269
}),
270
}
271
}
272
273
// Check if this render target is contained in the given changed windows or images.
274
fn is_changed(
275
&self,
276
changed_window_ids: &HashSet<Entity>,
277
changed_image_handles: &HashSet<&AssetId<Image>>,
278
) -> bool {
279
match self {
280
NormalizedRenderTarget::Window(window_ref) => {
281
changed_window_ids.contains(&window_ref.entity())
282
}
283
NormalizedRenderTarget::Image(image_target) => {
284
changed_image_handles.contains(&image_target.handle.id())
285
}
286
NormalizedRenderTarget::TextureView(_) => true,
287
NormalizedRenderTarget::None { .. } => false,
288
}
289
}
290
}
291
292
#[derive(Debug, thiserror::Error)]
293
pub enum MissingRenderTargetInfoError {
294
#[error("RenderTarget::Window missing ({window:?}): Make sure the provided entity has a Window component.")]
295
Window { window: Entity },
296
#[error("RenderTarget::Image missing ({image:?}): Make sure the Image's usages include RenderAssetUsages::MAIN_WORLD.")]
297
Image { image: AssetId<Image> },
298
#[error("RenderTarget::TextureView missing ({texture_view:?}): make sure the texture view handle was not removed.")]
299
TextureView {
300
texture_view: ManualTextureViewHandle,
301
},
302
}
303
304
/// System in charge of updating a [`Camera`] when its window or projection changes.
305
///
306
/// The system detects window creation, resize, and scale factor change events to update the camera
307
/// [`Projection`] if needed.
308
///
309
/// ## World Resources
310
///
311
/// [`Res<Assets<Image>>`](Assets<Image>) -- For cameras that render to an image, this resource is used to
312
/// inspect information about the render target. This system will not access any other image assets.
313
///
314
/// [`OrthographicProjection`]: bevy_camera::OrthographicProjection
315
/// [`PerspectiveProjection`]: bevy_camera::PerspectiveProjection
316
pub fn camera_system(
317
mut window_resized_reader: MessageReader<WindowResized>,
318
mut window_created_reader: MessageReader<WindowCreated>,
319
mut window_scale_factor_changed_reader: MessageReader<WindowScaleFactorChanged>,
320
mut image_asset_event_reader: MessageReader<AssetEvent<Image>>,
321
primary_window: Query<Entity, With<PrimaryWindow>>,
322
windows: Query<(Entity, &Window)>,
323
images: Res<Assets<Image>>,
324
manual_texture_views: Res<ManualTextureViews>,
325
mut cameras: Query<(&mut Camera, &RenderTarget, &mut Projection)>,
326
) -> Result<(), BevyError> {
327
let primary_window = primary_window.iter().next();
328
329
let mut changed_window_ids = <HashSet<_>>::default();
330
changed_window_ids.extend(window_created_reader.read().map(|event| event.window));
331
changed_window_ids.extend(window_resized_reader.read().map(|event| event.window));
332
let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_reader
333
.read()
334
.map(|event| event.window)
335
.collect();
336
changed_window_ids.extend(scale_factor_changed_window_ids.clone());
337
338
let changed_image_handles: HashSet<&AssetId<Image>> = image_asset_event_reader
339
.read()
340
.filter_map(|event| match event {
341
AssetEvent::Modified { id } | AssetEvent::Added { id } => Some(id),
342
_ => None,
343
})
344
.collect();
345
346
for (mut camera, render_target, mut camera_projection) in &mut cameras {
347
let mut viewport_size = camera
348
.viewport
349
.as_ref()
350
.map(|viewport| viewport.physical_size);
351
352
if let Some(normalized_target) = render_target.normalize(primary_window)
353
&& (normalized_target.is_changed(&changed_window_ids, &changed_image_handles)
354
|| camera.is_added()
355
|| camera_projection.is_changed()
356
|| camera.computed.old_viewport_size != viewport_size
357
|| camera.computed.old_sub_camera_view != camera.sub_camera_view)
358
{
359
let new_computed_target_info = normalized_target.get_render_target_info(
360
windows,
361
&images,
362
&manual_texture_views,
363
)?;
364
// Check for the scale factor changing, and resize the viewport if needed.
365
// This can happen when the window is moved between monitors with different DPIs.
366
// Without this, the viewport will take a smaller portion of the window moved to
367
// a higher DPI monitor.
368
if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
369
&& let Some(old_scale_factor) = camera
370
.computed
371
.target_info
372
.as_ref()
373
.map(|info| info.scale_factor)
374
{
375
let resize_factor = new_computed_target_info.scale_factor / old_scale_factor;
376
if let Some(ref mut viewport) = camera.viewport {
377
let resize = |vec: UVec2| (vec.as_vec2() * resize_factor).as_uvec2();
378
viewport.physical_position = resize(viewport.physical_position);
379
viewport.physical_size = resize(viewport.physical_size);
380
viewport_size = Some(viewport.physical_size);
381
}
382
}
383
// This check is needed because when changing WindowMode to Fullscreen, the viewport may have invalid
384
// arguments due to a sudden change on the window size to a lower value.
385
// If the size of the window is lower, the viewport will match that lower value.
386
if let Some(viewport) = &mut camera.viewport {
387
viewport.clamp_to_size(new_computed_target_info.physical_size);
388
}
389
camera.computed.target_info = Some(new_computed_target_info);
390
if let Some(size) = camera.logical_viewport_size()
391
&& size.x != 0.0
392
&& size.y != 0.0
393
{
394
camera_projection.update(size.x, size.y);
395
camera.computed.clip_from_view = match &camera.sub_camera_view {
396
Some(sub_view) => camera_projection.get_clip_from_view_for_sub(sub_view),
397
None => camera_projection.get_clip_from_view(),
398
}
399
}
400
}
401
402
if camera.computed.old_viewport_size != viewport_size {
403
camera.computed.old_viewport_size = viewport_size;
404
}
405
406
if camera.computed.old_sub_camera_view != camera.sub_camera_view {
407
camera.computed.old_sub_camera_view = camera.sub_camera_view;
408
}
409
}
410
Ok(())
411
}
412
413
#[derive(Component, Debug)]
414
pub struct ExtractedCamera {
415
pub target: Option<NormalizedRenderTarget>,
416
pub physical_viewport_size: Option<UVec2>,
417
pub physical_target_size: Option<UVec2>,
418
pub viewport: Option<Viewport>,
419
pub schedule: InternedScheduleLabel,
420
pub order: isize,
421
pub output_mode: CameraOutputMode,
422
pub msaa_writeback: MsaaWriteback,
423
pub clear_color: ClearColorConfig,
424
pub sorted_camera_index_for_target: usize,
425
pub exposure: f32,
426
pub hdr: bool,
427
}
428
429
pub fn extract_cameras(
430
mut commands: Commands,
431
query: Extract<
432
Query<(
433
Entity,
434
RenderEntity,
435
&Camera,
436
&RenderTarget,
437
&CameraRenderGraph,
438
&GlobalTransform,
439
&VisibleEntities,
440
&Frustum,
441
(
442
Has<Hdr>,
443
Option<&ColorGrading>,
444
Option<&Exposure>,
445
Option<&TemporalJitter>,
446
Option<&MipBias>,
447
Option<&RenderLayers>,
448
Option<&Projection>,
449
Has<NoIndirectDrawing>,
450
),
451
)>,
452
>,
453
primary_window: Extract<Query<Entity, With<PrimaryWindow>>>,
454
gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
455
mapper: Extract<Query<&RenderEntity>>,
456
) {
457
let primary_window = primary_window.iter().next();
458
type ExtractedCameraComponents = (
459
ExtractedCamera,
460
ExtractedView,
461
RenderVisibleEntities,
462
TemporalJitter,
463
MipBias,
464
RenderLayers,
465
Projection,
466
NoIndirectDrawing,
467
ViewUniformOffset,
468
);
469
for (
470
main_entity,
471
render_entity,
472
camera,
473
render_target,
474
camera_render_graph,
475
transform,
476
visible_entities,
477
frustum,
478
(
479
hdr,
480
color_grading,
481
exposure,
482
temporal_jitter,
483
mip_bias,
484
render_layers,
485
projection,
486
no_indirect_drawing,
487
),
488
) in query.iter()
489
{
490
if !camera.is_active {
491
commands
492
.entity(render_entity)
493
.remove::<ExtractedCameraComponents>();
494
continue;
495
}
496
497
let color_grading = color_grading.unwrap_or(&ColorGrading::default()).clone();
498
499
if let (
500
Some(URect {
501
min: viewport_origin,
502
..
503
}),
504
Some(viewport_size),
505
Some(target_size),
506
) = (
507
camera.physical_viewport_rect(),
508
camera.physical_viewport_size(),
509
camera.physical_target_size(),
510
) {
511
if target_size.x == 0 || target_size.y == 0 {
512
commands
513
.entity(render_entity)
514
.remove::<ExtractedCameraComponents>();
515
continue;
516
}
517
518
let render_visible_entities = RenderVisibleEntities {
519
entities: visible_entities
520
.entities
521
.iter()
522
.map(|(type_id, entities)| {
523
let entities = entities
524
.iter()
525
.map(|entity| {
526
let render_entity = mapper
527
.get(*entity)
528
.cloned()
529
.map(|entity| entity.id())
530
.unwrap_or(Entity::PLACEHOLDER);
531
(render_entity, (*entity).into())
532
})
533
.collect();
534
(*type_id, entities)
535
})
536
.collect(),
537
};
538
539
let mut commands = commands.entity(render_entity);
540
commands.insert((
541
ExtractedCamera {
542
target: render_target.normalize(primary_window),
543
viewport: camera.viewport.clone(),
544
physical_viewport_size: Some(viewport_size),
545
physical_target_size: Some(target_size),
546
schedule: camera_render_graph.0,
547
order: camera.order,
548
output_mode: camera.output_mode,
549
msaa_writeback: camera.msaa_writeback,
550
clear_color: camera.clear_color,
551
// this will be set in sort_cameras
552
sorted_camera_index_for_target: 0,
553
exposure: exposure
554
.map(Exposure::exposure)
555
.unwrap_or_else(|| Exposure::default().exposure()),
556
hdr,
557
},
558
ExtractedView {
559
retained_view_entity: RetainedViewEntity::new(main_entity.into(), None, 0),
560
clip_from_view: camera.clip_from_view(),
561
world_from_view: *transform,
562
clip_from_world: None,
563
hdr,
564
viewport: UVec4::new(
565
viewport_origin.x,
566
viewport_origin.y,
567
viewport_size.x,
568
viewport_size.y,
569
),
570
color_grading,
571
invert_culling: camera.invert_culling,
572
},
573
render_visible_entities,
574
*frustum,
575
));
576
577
if let Some(temporal_jitter) = temporal_jitter {
578
commands.insert(temporal_jitter.clone());
579
} else {
580
commands.remove::<TemporalJitter>();
581
}
582
583
if let Some(mip_bias) = mip_bias {
584
commands.insert(mip_bias.clone());
585
} else {
586
commands.remove::<MipBias>();
587
}
588
589
if let Some(render_layers) = render_layers {
590
commands.insert(render_layers.clone());
591
} else {
592
commands.remove::<RenderLayers>();
593
}
594
595
if let Some(projection) = projection {
596
commands.insert(projection.clone());
597
} else {
598
commands.remove::<Projection>();
599
}
600
601
if no_indirect_drawing
602
|| !matches!(
603
gpu_preprocessing_support.max_supported_mode,
604
GpuPreprocessingMode::Culling
605
)
606
{
607
commands.insert(NoIndirectDrawing);
608
} else {
609
commands.remove::<NoIndirectDrawing>();
610
}
611
};
612
}
613
}
614
615
/// Cameras sorted by their order field. This is updated in the [`sort_cameras`] system.
616
#[derive(Resource, Default)]
617
pub struct SortedCameras(pub Vec<SortedCamera>);
618
619
pub struct SortedCamera {
620
pub entity: Entity,
621
pub order: isize,
622
pub target: Option<NormalizedRenderTarget>,
623
pub hdr: bool,
624
}
625
626
pub fn sort_cameras(
627
mut sorted_cameras: ResMut<SortedCameras>,
628
mut cameras: Query<(Entity, &mut ExtractedCamera)>,
629
) {
630
sorted_cameras.0.clear();
631
for (entity, camera) in cameras.iter() {
632
sorted_cameras.0.push(SortedCamera {
633
entity,
634
order: camera.order,
635
target: camera.target.clone(),
636
hdr: camera.hdr,
637
});
638
}
639
// sort by order and ensure within an order, RenderTargets of the same type are packed together
640
sorted_cameras
641
.0
642
.sort_by(|c1, c2| (c1.order, &c1.target).cmp(&(c2.order, &c2.target)));
643
let mut previous_order_target = None;
644
let mut ambiguities = <HashSet<_>>::default();
645
let mut target_counts = <HashMap<_, _>>::default();
646
for sorted_camera in &mut sorted_cameras.0 {
647
let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
648
if let Some(previous_order_target) = previous_order_target
649
&& previous_order_target == new_order_target
650
{
651
ambiguities.insert(new_order_target.clone());
652
}
653
if let Some(target) = &sorted_camera.target {
654
let count = target_counts
655
.entry((target.clone(), sorted_camera.hdr))
656
.or_insert(0usize);
657
let (_, mut camera) = cameras.get_mut(sorted_camera.entity).unwrap();
658
camera.sorted_camera_index_for_target = *count;
659
*count += 1;
660
}
661
previous_order_target = Some(new_order_target);
662
}
663
664
if !ambiguities.is_empty() {
665
warn_once!(
666
"Camera order ambiguities detected for active cameras with the following priorities: {:?}. \
667
To fix this, ensure there is exactly one Camera entity spawned with a given order for a given RenderTarget. \
668
Ambiguities should be resolved because either (1) multiple active cameras were spawned accidentally, which will \
669
result in rendering multiple instances of the scene or (2) for cases where multiple active cameras is intentional, \
670
ambiguities could result in unpredictable render results.",
671
ambiguities
672
);
673
}
674
}
675
676
/// A subpixel offset to jitter a perspective camera's frustum by.
677
///
678
/// Useful for temporal rendering techniques.
679
#[derive(Component, Clone, Default, Reflect)]
680
#[reflect(Default, Component, Clone)]
681
pub struct TemporalJitter {
682
/// Offset is in range [-0.5, 0.5].
683
pub offset: Vec2,
684
}
685
686
impl TemporalJitter {
687
pub fn jitter_projection(&self, clip_from_view: &mut Mat4, view_size: Vec2) {
688
// https://github.com/GPUOpen-LibrariesAndSDKs/FidelityFX-SDK/blob/d7531ae47d8b36a5d4025663e731a47a38be882f/docs/techniques/media/super-resolution-temporal/jitter-space.svg
689
let mut jitter = (self.offset * vec2(2.0, -2.0)) / view_size;
690
691
// orthographic
692
if clip_from_view.w_axis.w == 1.0 {
693
jitter *= vec2(clip_from_view.x_axis.x, clip_from_view.y_axis.y) * 0.5;
694
}
695
696
clip_from_view.z_axis.x += jitter.x;
697
clip_from_view.z_axis.y += jitter.y;
698
}
699
}
700
701
/// Camera component specifying a mip bias to apply when sampling from material textures.
702
///
703
/// Often used in conjunction with antialiasing post-process effects to reduce textures blurriness.
704
#[derive(Component, Reflect, Clone)]
705
#[reflect(Default, Component)]
706
pub struct MipBias(pub f32);
707
708
impl Default for MipBias {
709
fn default() -> Self {
710
Self(-1.0)
711
}
712
}
713
714