pub mod visibility;1pub mod window;23use bevy_camera::{4primitives::Frustum, CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure,5MainPassResolutionOverride, NormalizedRenderTarget,6};7use bevy_diagnostic::FrameCount;8pub use visibility::*;9pub use window::*;1011use crate::{12camera::{ExtractedCamera, MipBias, NormalizedRenderTargetExt as _, TemporalJitter},13experimental::occlusion_culling::OcclusionCulling,14extract_component::ExtractComponentPlugin,15render_asset::RenderAssets,16render_phase::ViewRangefinder3d,17render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},18renderer::{RenderDevice, RenderQueue},19sync_world::MainEntity,20texture::{21CachedTexture, ColorAttachment, DepthAttachment, GpuImage, ManualTextureViews,22OutputColorAttachment, TextureCache,23},24Render, RenderApp, RenderSystems,25};26use alloc::sync::Arc;27use bevy_app::{App, Plugin};28use bevy_color::LinearRgba;29use bevy_derive::{Deref, DerefMut};30use bevy_ecs::prelude::*;31use bevy_image::{BevyDefault as _, ToExtents};32use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};33use bevy_platform::collections::{hash_map::Entry, HashMap};34use bevy_reflect::{std_traits::ReflectDefault, Reflect};35use bevy_render_macros::ExtractComponent;36use bevy_shader::load_shader_library;37use bevy_transform::components::GlobalTransform;38use core::{39ops::Range,40sync::atomic::{AtomicUsize, Ordering},41};42use wgpu::{43BufferUsages, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,44TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,45};4647/// The matrix that converts from the RGB to the LMS color space.48///49/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:50///51/// ```text52/// ⎡ X ⎤ ⎡ 0.490 0.310 0.200 ⎤ ⎡ R ⎤53/// ⎢ Y ⎥ = ⎢ 0.177 0.812 0.011 ⎥ ⎢ G ⎥54/// ⎣ Z ⎦ ⎣ 0.000 0.010 0.990 ⎦ ⎣ B ⎦55/// ```56///57/// Then we convert to LMS according to the [CAM16 standard matrix]:58///59/// ```text60/// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤61/// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥62/// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦63/// ```64///65/// The resulting matrix is just the concatenation of these two matrices, to do66/// the conversion in one step.67///68/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space69/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space70static RGB_TO_LMS: Mat3 = mat3(71vec3(0.311692, 0.0905138, 0.00764433),72vec3(0.652085, 0.901341, 0.0486554),73vec3(0.0362225, 0.00814478, 0.943700),74);7576/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color77/// space back to RGB.78static LMS_TO_RGB: Mat3 = mat3(79vec3(4.06305, -0.40791, -0.0118812),80vec3(-2.93241, 1.40437, -0.0486532),81vec3(-0.130646, 0.00353630, 1.0605344),82);8384/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].85///86/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space87/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values88static D65_XY: Vec2 = vec2(0.31272, 0.32903);8990/// The [D65 white point] in [LMS color space].91///92/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space93/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values94static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);9596pub struct ViewPlugin;9798impl Plugin for ViewPlugin {99fn build(&self, app: &mut App) {100load_shader_library!(app, "view.wgsl");101102app103// NOTE: windows.is_changed() handles cases where a window was resized104.add_plugins((105ExtractComponentPlugin::<Hdr>::default(),106ExtractComponentPlugin::<Msaa>::default(),107ExtractComponentPlugin::<OcclusionCulling>::default(),108RenderVisibilityRangePlugin,109));110111if let Some(render_app) = app.get_sub_app_mut(RenderApp) {112render_app.add_systems(113Render,114(115// `TextureView`s need to be dropped before reconfiguring window surfaces.116clear_view_attachments117.in_set(RenderSystems::ManageViews)118.before(create_surfaces),119prepare_view_attachments120.in_set(RenderSystems::ManageViews)121.before(prepare_view_targets)122.after(prepare_windows),123prepare_view_targets124.in_set(RenderSystems::ManageViews)125.after(prepare_windows)126.after(crate::render_asset::prepare_assets::<GpuImage>)127.ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`128prepare_view_uniforms.in_set(RenderSystems::PrepareResources),129),130);131}132}133134fn finish(&self, app: &mut App) {135if let Some(render_app) = app.get_sub_app_mut(RenderApp) {136render_app137.init_resource::<ViewUniforms>()138.init_resource::<ViewTargetAttachments>();139}140}141}142143/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)144/// for a [`Camera`](bevy_camera::Camera).145///146/// Defaults to 4 samples. A higher number of samples results in smoother edges.147///148/// Some advanced rendering features may require that MSAA is disabled.149///150/// Note that the web currently only supports 1 or 4 samples.151#[derive(152Component,153Default,154Clone,155Copy,156ExtractComponent,157Reflect,158PartialEq,159PartialOrd,160Eq,161Hash,162Debug,163)]164#[reflect(Component, Default, PartialEq, Hash, Debug)]165pub enum Msaa {166Off = 1,167Sample2 = 2,168#[default]169Sample4 = 4,170Sample8 = 8,171}172173impl Msaa {174#[inline]175pub fn samples(&self) -> u32 {176*self as u32177}178179pub fn from_samples(samples: u32) -> Self {180match samples {1811 => Msaa::Off,1822 => Msaa::Sample2,1834 => Msaa::Sample4,1848 => Msaa::Sample8,185_ => panic!("Unsupported MSAA sample count: {samples}"),186}187}188}189190/// If this component is added to a camera, the camera will use an intermediate "high dynamic range" render texture.191/// This allows rendering with a wider range of lighting values. However, this does *not* affect192/// whether the camera will render with hdr display output (which bevy does not support currently)193/// and only affects the intermediate render texture.194#[derive(195Component, Default, Copy, Clone, ExtractComponent, Reflect, PartialEq, Eq, Hash, Debug,196)]197#[reflect(Component, Default, PartialEq, Hash, Debug)]198pub struct Hdr;199200/// An identifier for a view that is stable across frames.201///202/// We can't use [`Entity`] for this because render world entities aren't203/// stable, and we can't use just [`MainEntity`] because some main world views204/// extract to multiple render world views. For example, a directional light205/// extracts to one render world view per cascade, and a point light extracts to206/// one render world view per cubemap face. So we pair the main entity with an207/// *auxiliary entity* and a *subview index*, which *together* uniquely identify208/// a view in the render world in a way that's stable from frame to frame.209#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]210pub struct RetainedViewEntity {211/// The main entity that this view corresponds to.212pub main_entity: MainEntity,213214/// Another entity associated with the view entity.215///216/// This is currently used for shadow cascades. If there are multiple217/// cameras, each camera needs to have its own set of shadow cascades. Thus218/// the light and subview index aren't themselves enough to uniquely219/// identify a shadow cascade: we need the camera that the cascade is220/// associated with as well. This entity stores that camera.221///222/// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.223pub auxiliary_entity: MainEntity,224225/// The index of the view corresponding to the entity.226///227/// For example, for point lights that cast shadows, this is the index of228/// the cubemap face (0 through 5 inclusive). For directional lights, this229/// is the index of the cascade.230pub subview_index: u32,231}232233impl RetainedViewEntity {234/// Creates a new [`RetainedViewEntity`] from the given main world entity,235/// auxiliary main world entity, and subview index.236///237/// See [`RetainedViewEntity::subview_index`] for an explanation of what238/// `auxiliary_entity` and `subview_index` are.239pub fn new(240main_entity: MainEntity,241auxiliary_entity: Option<MainEntity>,242subview_index: u32,243) -> Self {244Self {245main_entity,246auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),247subview_index,248}249}250}251252/// Describes a camera in the render world.253///254/// Each entity in the main world can potentially extract to multiple subviews,255/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D256/// cameras extract to both a 3D camera subview with index 0 and a special UI257/// subview with index 1. Likewise, point lights with shadows extract to 6258/// subviews, one for each side of the shadow cubemap.259#[derive(Component)]260pub struct ExtractedView {261/// The entity in the main world corresponding to this render world view.262pub retained_view_entity: RetainedViewEntity,263/// Typically a column-major right-handed projection matrix, one of either:264///265/// Perspective (infinite reverse z)266/// ```text267/// f = 1 / tan(fov_y_radians / 2)268///269/// ⎡ f / aspect 0 0 0 ⎤270/// ⎢ 0 f 0 0 ⎥271/// ⎢ 0 0 0 near ⎥272/// ⎣ 0 0 -1 0 ⎦273/// ```274///275/// Orthographic276/// ```text277/// w = right - left278/// h = top - bottom279/// d = far - near280/// cw = -right - left281/// ch = -top - bottom282///283/// ⎡ 2 / w 0 0 cw / w ⎤284/// ⎢ 0 2 / h 0 ch / h ⎥285/// ⎢ 0 0 1 / d far / d ⎥286/// ⎣ 0 0 0 1 ⎦287/// ```288///289/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic290///291/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`292///293/// Custom projections are also possible however.294pub clip_from_view: Mat4,295pub world_from_view: GlobalTransform,296// The view-projection matrix. When provided it is used instead of deriving it from297// `projection` and `transform` fields, which can be helpful in cases where numerical298// stability matters and there is a more direct way to derive the view-projection matrix.299pub clip_from_world: Option<Mat4>,300pub hdr: bool,301// uvec4(origin.x, origin.y, width, height)302pub viewport: UVec4,303pub color_grading: ColorGrading,304}305306impl ExtractedView {307/// Creates a 3D rangefinder for a view308pub fn rangefinder3d(&self) -> ViewRangefinder3d {309ViewRangefinder3d::from_world_from_view(&self.world_from_view.affine())310}311}312313/// Configures filmic color grading parameters to adjust the image appearance.314///315/// Color grading is applied just before tonemapping for a given316/// [`Camera`](bevy_camera::Camera) entity, with the sole exception of the317/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after318/// tonemapping.319#[derive(Component, Reflect, Debug, Default, Clone)]320#[reflect(Component, Default, Debug, Clone)]321pub struct ColorGrading {322/// Filmic color grading values applied to the image as a whole (as opposed323/// to individual sections, like shadows and highlights).324pub global: ColorGradingGlobal,325326/// Color grading values that are applied to the darker parts of the image.327///328/// The cutoff points can be customized with the329/// [`ColorGradingGlobal::midtones_range`] field.330pub shadows: ColorGradingSection,331332/// Color grading values that are applied to the parts of the image with333/// intermediate brightness.334///335/// The cutoff points can be customized with the336/// [`ColorGradingGlobal::midtones_range`] field.337pub midtones: ColorGradingSection,338339/// Color grading values that are applied to the lighter parts of the image.340///341/// The cutoff points can be customized with the342/// [`ColorGradingGlobal::midtones_range`] field.343pub highlights: ColorGradingSection,344}345346/// Filmic color grading values applied to the image as a whole (as opposed to347/// individual sections, like shadows and highlights).348#[derive(Clone, Debug, Reflect)]349#[reflect(Default, Clone)]350pub struct ColorGradingGlobal {351/// Exposure value (EV) offset, measured in stops.352pub exposure: f32,353354/// An adjustment made to the [CIE 1931] chromaticity *x* value.355///356/// Positive values make the colors redder. Negative values make the colors357/// bluer. This has no effect on luminance (brightness).358///359/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space360pub temperature: f32,361362/// An adjustment made to the [CIE 1931] chromaticity *y* value.363///364/// Positive values make the colors more magenta. Negative values make the365/// colors greener. This has no effect on luminance (brightness).366///367/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space368pub tint: f32,369370/// An adjustment to the [hue], in radians.371///372/// Adjusting this value changes the perceived colors in the image: red to373/// yellow to green to blue, etc. It has no effect on the saturation or374/// brightness of the colors.375///376/// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation377pub hue: f32,378379/// Saturation adjustment applied after tonemapping.380/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image381/// with luminance defined by ITU-R BT.709382/// Values above 1.0 increase saturation.383pub post_saturation: f32,384385/// The luminance (brightness) ranges that are considered part of the386/// "midtones" of the image.387///388/// This affects which [`ColorGradingSection`]s apply to which colors. Note389/// that the sections smoothly blend into one another, to avoid abrupt390/// transitions.391///392/// The default value is 0.2 to 0.7.393pub midtones_range: Range<f32>,394}395396/// The [`ColorGrading`] structure, packed into the most efficient form for the397/// GPU.398#[derive(Clone, Copy, Debug, ShaderType)]399pub struct ColorGradingUniform {400pub balance: Mat3,401pub saturation: Vec3,402pub contrast: Vec3,403pub gamma: Vec3,404pub gain: Vec3,405pub lift: Vec3,406pub midtone_range: Vec2,407pub exposure: f32,408pub hue: f32,409pub post_saturation: f32,410}411412/// A section of color grading values that can be selectively applied to413/// shadows, midtones, and highlights.414#[derive(Reflect, Debug, Copy, Clone, PartialEq)]415#[reflect(Clone, PartialEq)]416pub struct ColorGradingSection {417/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image418/// with luminance defined by ITU-R BT.709.419/// Values above 1.0 increase saturation.420pub saturation: f32,421422/// Adjusts the range of colors.423///424/// A value of 1.0 applies no changes. Values below 1.0 move the colors more425/// toward a neutral gray. Values above 1.0 spread the colors out away from426/// the neutral gray.427pub contrast: f32,428429/// A nonlinear luminance adjustment, mainly affecting the high end of the430/// range.431///432/// This is the *n* exponent in the standard [ASC CDL] formula for color433/// correction:434///435/// ```text436/// out = (i × s + o)ⁿ437/// ```438///439/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function440pub gamma: f32,441442/// A linear luminance adjustment, mainly affecting the middle part of the443/// range.444///445/// This is the *s* factor in the standard [ASC CDL] formula for color446/// correction:447///448/// ```text449/// out = (i × s + o)ⁿ450/// ```451///452/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function453pub gain: f32,454455/// A fixed luminance adjustment, mainly affecting the lower part of the456/// range.457///458/// This is the *o* term in the standard [ASC CDL] formula for color459/// correction:460///461/// ```text462/// out = (i × s + o)ⁿ463/// ```464///465/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function466pub lift: f32,467}468469impl Default for ColorGradingGlobal {470fn default() -> Self {471Self {472exposure: 0.0,473temperature: 0.0,474tint: 0.0,475hue: 0.0,476post_saturation: 1.0,477midtones_range: 0.2..0.7,478}479}480}481482impl Default for ColorGradingSection {483fn default() -> Self {484Self {485saturation: 1.0,486contrast: 1.0,487gamma: 1.0,488gain: 1.0,489lift: 0.0,490}491}492}493494impl ColorGrading {495/// Creates a new [`ColorGrading`] instance in which shadows, midtones, and496/// highlights all have the same set of color grading values.497pub fn with_identical_sections(498global: ColorGradingGlobal,499section: ColorGradingSection,500) -> ColorGrading {501ColorGrading {502global,503highlights: section,504midtones: section,505shadows: section,506}507}508509/// Returns an iterator that visits the shadows, midtones, and highlights510/// sections, in that order.511pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {512[&self.shadows, &self.midtones, &self.highlights].into_iter()513}514515/// Applies the given mutating function to the shadows, midtones, and516/// highlights sections, in that order.517///518/// Returns an array composed of the results of such evaluation, in that519/// order.520pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {521[&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()522}523}524525#[derive(Clone, ShaderType)]526pub struct ViewUniform {527pub clip_from_world: Mat4,528pub unjittered_clip_from_world: Mat4,529pub world_from_clip: Mat4,530pub world_from_view: Mat4,531pub view_from_world: Mat4,532/// Typically a column-major right-handed projection matrix, one of either:533///534/// Perspective (infinite reverse z)535/// ```text536/// f = 1 / tan(fov_y_radians / 2)537///538/// ⎡ f / aspect 0 0 0 ⎤539/// ⎢ 0 f 0 0 ⎥540/// ⎢ 0 0 0 near ⎥541/// ⎣ 0 0 -1 0 ⎦542/// ```543///544/// Orthographic545/// ```text546/// w = right - left547/// h = top - bottom548/// d = far - near549/// cw = -right - left550/// ch = -top - bottom551///552/// ⎡ 2 / w 0 0 cw / w ⎤553/// ⎢ 0 2 / h 0 ch / h ⎥554/// ⎢ 0 0 1 / d far / d ⎥555/// ⎣ 0 0 0 1 ⎦556/// ```557///558/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic559///560/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`561///562/// Custom projections are also possible however.563pub clip_from_view: Mat4,564pub view_from_clip: Mat4,565pub world_position: Vec3,566pub exposure: f32,567// viewport(x_origin, y_origin, width, height)568pub viewport: Vec4,569pub main_pass_viewport: Vec4,570/// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.571/// The normal vectors point towards the interior of the frustum.572/// A half space contains `p` if `normal.dot(p) + distance > 0.`573pub frustum: [Vec4; 6],574pub color_grading: ColorGradingUniform,575pub mip_bias: f32,576pub frame_count: u32,577}578579#[derive(Resource)]580pub struct ViewUniforms {581pub uniforms: DynamicUniformBuffer<ViewUniform>,582}583584impl FromWorld for ViewUniforms {585fn from_world(world: &mut World) -> Self {586let mut uniforms = DynamicUniformBuffer::default();587uniforms.set_label(Some("view_uniforms_buffer"));588589let render_device = world.resource::<RenderDevice>();590if render_device.limits().max_storage_buffers_per_shader_stage > 0 {591uniforms.add_usages(BufferUsages::STORAGE);592}593594Self { uniforms }595}596}597598#[derive(Component)]599pub struct ViewUniformOffset {600pub offset: u32,601}602603#[derive(Component)]604pub struct ViewTarget {605main_textures: MainTargetTextures,606main_texture_format: TextureFormat,607/// 0 represents `main_textures.a`, 1 represents `main_textures.b`608/// This is shared across view targets with the same render target609main_texture: Arc<AtomicUsize>,610out_texture: OutputColorAttachment,611}612613/// Contains [`OutputColorAttachment`] used for each target present on any view in the current614/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override615/// the default output color attachment for a specific target can do so by adding a616/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.617#[derive(Resource, Default, Deref, DerefMut)]618pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);619620pub struct PostProcessWrite<'a> {621pub source: &'a TextureView,622pub source_texture: &'a Texture,623pub destination: &'a TextureView,624pub destination_texture: &'a Texture,625}626627impl From<ColorGrading> for ColorGradingUniform {628fn from(component: ColorGrading) -> Self {629// Compute the balance matrix that will be used to apply the white630// balance adjustment to an RGB color. Our general approach will be to631// convert both the color and the developer-supplied white point to the632// LMS color space, apply the conversion, and then convert back.633//634// First, we start with the CIE 1931 *xy* values of the standard D65635// illuminant:636// <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>637//638// We then adjust them based on the developer's requested white balance.639let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);640641// Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:642//643// Y Y644// Y = 1 X = ─ x Z = ─ (1 - x - y)645// y y646//647// Then we convert from XYZ to LMS color space, using the CAM16 matrix648// from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:649//650// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤651// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥652// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦653//654// The following formula is just a simplification of the above.655656let white_point_lms = vec3(0.701634, 1.15856, -0.904175)657+ (vec3(-0.051461, 0.045854, 0.953127)658+ vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)659/ white_point_xy.y;660661// Now that we're in LMS space, perform the white point scaling.662let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);663664// Finally, combine the RGB → LMS → corrected LMS → corrected RGB665// pipeline into a single 3×3 matrix.666let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;667668Self {669balance,670saturation: vec3(671component.shadows.saturation,672component.midtones.saturation,673component.highlights.saturation,674),675contrast: vec3(676component.shadows.contrast,677component.midtones.contrast,678component.highlights.contrast,679),680gamma: vec3(681component.shadows.gamma,682component.midtones.gamma,683component.highlights.gamma,684),685gain: vec3(686component.shadows.gain,687component.midtones.gain,688component.highlights.gain,689),690lift: vec3(691component.shadows.lift,692component.midtones.lift,693component.highlights.lift,694),695midtone_range: vec2(696component.global.midtones_range.start,697component.global.midtones_range.end,698),699exposure: component.global.exposure,700hue: component.global.hue,701post_saturation: component.global.post_saturation,702}703}704}705706/// Add this component to a camera to disable *indirect mode*.707///708/// Indirect mode, automatically enabled on supported hardware, allows Bevy to709/// offload transform and cull operations to the GPU, reducing CPU overhead.710/// Doing this, however, reduces the amount of control that your app has over711/// instancing decisions. In certain circumstances, you may want to disable712/// indirect drawing so that your app can manually instance meshes as it sees713/// fit. See the `custom_shader_instancing` example.714///715/// The vast majority of applications will not need to use this component, as it716/// generally reduces rendering performance.717///718/// Note: This component should only be added when initially spawning a camera. Adding719/// or removing after spawn can result in unspecified behavior.720#[derive(Component, Default)]721pub struct NoIndirectDrawing;722723impl ViewTarget {724pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;725726/// Retrieve this target's main texture's color attachment.727pub fn get_color_attachment(&self) -> RenderPassColorAttachment<'_> {728if self.main_texture.load(Ordering::SeqCst) == 0 {729self.main_textures.a.get_attachment()730} else {731self.main_textures.b.get_attachment()732}733}734735/// Retrieve this target's "unsampled" main texture's color attachment.736pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment<'_> {737if self.main_texture.load(Ordering::SeqCst) == 0 {738self.main_textures.a.get_unsampled_attachment()739} else {740self.main_textures.b.get_unsampled_attachment()741}742}743744/// The "main" unsampled texture.745pub fn main_texture(&self) -> &Texture {746if self.main_texture.load(Ordering::SeqCst) == 0 {747&self.main_textures.a.texture.texture748} else {749&self.main_textures.b.texture.texture750}751}752753/// The _other_ "main" unsampled texture.754/// In most cases you should use [`Self::main_texture`] instead and never this.755/// The textures will naturally be swapped when [`Self::post_process_write`] is called.756///757/// A use case for this is to be able to prepare a bind group for all main textures758/// ahead of time.759pub fn main_texture_other(&self) -> &Texture {760if self.main_texture.load(Ordering::SeqCst) == 0 {761&self.main_textures.b.texture.texture762} else {763&self.main_textures.a.texture.texture764}765}766767/// The "main" unsampled texture.768pub fn main_texture_view(&self) -> &TextureView {769if self.main_texture.load(Ordering::SeqCst) == 0 {770&self.main_textures.a.texture.default_view771} else {772&self.main_textures.b.texture.default_view773}774}775776/// The _other_ "main" unsampled texture view.777/// In most cases you should use [`Self::main_texture_view`] instead and never this.778/// The textures will naturally be swapped when [`Self::post_process_write`] is called.779///780/// A use case for this is to be able to prepare a bind group for all main textures781/// ahead of time.782pub fn main_texture_other_view(&self) -> &TextureView {783if self.main_texture.load(Ordering::SeqCst) == 0 {784&self.main_textures.b.texture.default_view785} else {786&self.main_textures.a.texture.default_view787}788}789790/// The "main" sampled texture.791pub fn sampled_main_texture(&self) -> Option<&Texture> {792self.main_textures793.a794.resolve_target795.as_ref()796.map(|sampled| &sampled.texture)797}798799/// The "main" sampled texture view.800pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {801self.main_textures802.a803.resolve_target804.as_ref()805.map(|sampled| &sampled.default_view)806}807808#[inline]809pub fn main_texture_format(&self) -> TextureFormat {810self.main_texture_format811}812813/// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]814#[inline]815pub fn is_hdr(&self) -> bool {816self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR817}818819/// The final texture this view will render to.820#[inline]821pub fn out_texture(&self) -> &TextureView {822&self.out_texture.view823}824825pub fn out_texture_color_attachment(826&self,827clear_color: Option<LinearRgba>,828) -> RenderPassColorAttachment<'_> {829self.out_texture.get_attachment(clear_color)830}831832/// The format of the final texture this view will render to833#[inline]834pub fn out_texture_format(&self) -> TextureFormat {835self.out_texture.format836}837838/// This will start a new "post process write", which assumes that the caller839/// will write the [`PostProcessWrite`]'s `source` to the `destination`.840///841/// `source` is the "current" main texture. This will internally flip this842/// [`ViewTarget`]'s main texture to the `destination` texture, so the caller843/// _must_ ensure `source` is copied to `destination`, with or without modifications.844/// Failing to do so will cause the current main texture information to be lost.845pub fn post_process_write(&self) -> PostProcessWrite<'_> {846let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);847// if the old main texture is a, then the post processing must write from a to b848if old_is_a_main_texture == 0 {849self.main_textures.b.mark_as_cleared();850PostProcessWrite {851source: &self.main_textures.a.texture.default_view,852source_texture: &self.main_textures.a.texture.texture,853destination: &self.main_textures.b.texture.default_view,854destination_texture: &self.main_textures.b.texture.texture,855}856} else {857self.main_textures.a.mark_as_cleared();858PostProcessWrite {859source: &self.main_textures.b.texture.default_view,860source_texture: &self.main_textures.b.texture.texture,861destination: &self.main_textures.a.texture.default_view,862destination_texture: &self.main_textures.a.texture.texture,863}864}865}866}867868#[derive(Component)]869pub struct ViewDepthTexture {870pub texture: Texture,871attachment: DepthAttachment,872}873874impl ViewDepthTexture {875pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {876Self {877texture: texture.texture,878attachment: DepthAttachment::new(texture.default_view, clear_value),879}880}881882pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment<'_> {883self.attachment.get_attachment(store)884}885886pub fn view(&self) -> &TextureView {887&self.attachment.view888}889}890891pub fn prepare_view_uniforms(892mut commands: Commands,893render_device: Res<RenderDevice>,894render_queue: Res<RenderQueue>,895mut view_uniforms: ResMut<ViewUniforms>,896views: Query<(897Entity,898Option<&ExtractedCamera>,899&ExtractedView,900Option<&Frustum>,901Option<&TemporalJitter>,902Option<&MipBias>,903Option<&MainPassResolutionOverride>,904)>,905frame_count: Res<FrameCount>,906) {907let view_iter = views.iter();908let view_count = view_iter.len();909let Some(mut writer) =910view_uniforms911.uniforms912.get_writer(view_count, &render_device, &render_queue)913else {914return;915};916for (917entity,918extracted_camera,919extracted_view,920frustum,921temporal_jitter,922mip_bias,923resolution_override,924) in &views925{926let viewport = extracted_view.viewport.as_vec4();927let mut main_pass_viewport = viewport;928if let Some(resolution_override) = resolution_override {929main_pass_viewport.z = resolution_override.0.x as f32;930main_pass_viewport.w = resolution_override.0.y as f32;931}932933let unjittered_projection = extracted_view.clip_from_view;934let mut clip_from_view = unjittered_projection;935936if let Some(temporal_jitter) = temporal_jitter {937temporal_jitter.jitter_projection(&mut clip_from_view, main_pass_viewport.zw());938}939940let view_from_clip = clip_from_view.inverse();941let world_from_view = extracted_view.world_from_view.to_matrix();942let view_from_world = world_from_view.inverse();943944let clip_from_world = if temporal_jitter.is_some() {945clip_from_view * view_from_world946} else {947extracted_view948.clip_from_world949.unwrap_or_else(|| clip_from_view * view_from_world)950};951952// Map Frustum type to shader array<vec4<f32>, 6>953let frustum = frustum954.map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))955.unwrap_or([Vec4::ZERO; 6]);956957let view_uniforms = ViewUniformOffset {958offset: writer.write(&ViewUniform {959clip_from_world,960unjittered_clip_from_world: unjittered_projection * view_from_world,961world_from_clip: world_from_view * view_from_clip,962world_from_view,963view_from_world,964clip_from_view,965view_from_clip,966world_position: extracted_view.world_from_view.translation(),967exposure: extracted_camera968.map(|c| c.exposure)969.unwrap_or_else(|| Exposure::default().exposure()),970viewport,971main_pass_viewport,972frustum,973color_grading: extracted_view.color_grading.clone().into(),974mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,975frame_count: frame_count.0,976}),977};978979commands.entity(entity).insert(view_uniforms);980}981}982983#[derive(Clone)]984struct MainTargetTextures {985a: ColorAttachment,986b: ColorAttachment,987/// 0 represents `main_textures.a`, 1 represents `main_textures.b`988/// This is shared across view targets with the same render target989main_texture: Arc<AtomicUsize>,990}991992/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.993pub fn prepare_view_attachments(994windows: Res<ExtractedWindows>,995images: Res<RenderAssets<GpuImage>>,996manual_texture_views: Res<ManualTextureViews>,997cameras: Query<&ExtractedCamera>,998mut view_target_attachments: ResMut<ViewTargetAttachments>,999) {1000for camera in cameras.iter() {1001let Some(target) = &camera.target else {1002continue;1003};10041005match view_target_attachments.entry(target.clone()) {1006Entry::Occupied(_) => {}1007Entry::Vacant(entry) => {1008let Some(attachment) = target1009.get_texture_view(&windows, &images, &manual_texture_views)1010.cloned()1011.zip(target.get_texture_format(&windows, &images, &manual_texture_views))1012.map(|(view, format)| {1013OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())1014})1015else {1016continue;1017};1018entry.insert(attachment);1019}1020};1021}1022}10231024/// Clears the view target [`OutputColorAttachment`]s.1025pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {1026view_target_attachments.clear();1027}10281029pub fn prepare_view_targets(1030mut commands: Commands,1031clear_color_global: Res<ClearColor>,1032render_device: Res<RenderDevice>,1033mut texture_cache: ResMut<TextureCache>,1034cameras: Query<(1035Entity,1036&ExtractedCamera,1037&ExtractedView,1038&CameraMainTextureUsages,1039&Msaa,1040)>,1041view_target_attachments: Res<ViewTargetAttachments>,1042) {1043let mut textures = <HashMap<_, _>>::default();1044for (entity, camera, view, texture_usage, msaa) in cameras.iter() {1045let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)1046else {1047continue;1048};10491050let Some(out_attachment) = view_target_attachments.get(target) else {1051continue;1052};10531054let main_texture_format = if view.hdr {1055ViewTarget::TEXTURE_FORMAT_HDR1056} else {1057TextureFormat::bevy_default()1058};10591060let clear_color = match camera.clear_color {1061ClearColorConfig::Custom(color) => Some(color),1062ClearColorConfig::None => None,1063_ => Some(clear_color_global.0),1064};10651066let (a, b, sampled, main_texture) = textures1067.entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))1068.or_insert_with(|| {1069let descriptor = TextureDescriptor {1070label: None,1071size: target_size.to_extents(),1072mip_level_count: 1,1073sample_count: 1,1074dimension: TextureDimension::D2,1075format: main_texture_format,1076usage: texture_usage.0,1077view_formats: match main_texture_format {1078TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],1079TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],1080_ => &[],1081},1082};1083let a = texture_cache.get(1084&render_device,1085TextureDescriptor {1086label: Some("main_texture_a"),1087..descriptor1088},1089);1090let b = texture_cache.get(1091&render_device,1092TextureDescriptor {1093label: Some("main_texture_b"),1094..descriptor1095},1096);1097let sampled = if msaa.samples() > 1 {1098let sampled = texture_cache.get(1099&render_device,1100TextureDescriptor {1101label: Some("main_texture_sampled"),1102size: target_size.to_extents(),1103mip_level_count: 1,1104sample_count: msaa.samples(),1105dimension: TextureDimension::D2,1106format: main_texture_format,1107usage: TextureUsages::RENDER_ATTACHMENT,1108view_formats: descriptor.view_formats,1109},1110);1111Some(sampled)1112} else {1113None1114};1115let main_texture = Arc::new(AtomicUsize::new(0));1116(a, b, sampled, main_texture)1117});11181119let converted_clear_color = clear_color.map(Into::into);11201121let main_textures = MainTargetTextures {1122a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),1123b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),1124main_texture: main_texture.clone(),1125};11261127commands.entity(entity).insert(ViewTarget {1128main_texture: main_textures.main_texture.clone(),1129main_textures,1130main_texture_format,1131out_texture: out_attachment.clone(),1132});1133}1134}113511361137