pub mod visibility;1pub mod window;23use bevy_camera::{4primitives::Frustum, CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure,5MainPassResolutionOverride, NormalizedRenderTarget,6};7use bevy_diagnostic::FrameCount;8pub use visibility::*;9pub use window::*;1011use crate::{12camera::{ExtractedCamera, MipBias, NormalizedRenderTargetExt as _, TemporalJitter},13extract_component::ExtractComponentPlugin,14occlusion_culling::OcclusionCulling,15render_asset::RenderAssets,16render_phase::ViewRangefinder3d,17render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},18renderer::{RenderDevice, RenderQueue},19sync_world::MainEntity,20texture::{21CachedTexture, ColorAttachment, DepthAttachment, GpuImage, ManualTextureViews,22OutputColorAttachment, TextureCache,23},24Render, RenderApp, RenderSystems,25};26use alloc::sync::Arc;27use bevy_app::{App, Plugin};28use bevy_color::LinearRgba;29use bevy_derive::{Deref, DerefMut};30use bevy_ecs::prelude::*;31use bevy_image::{BevyDefault as _, ToExtents};32use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};33use bevy_platform::collections::{hash_map::Entry, HashMap};34use bevy_reflect::{std_traits::ReflectDefault, Reflect};35use bevy_render_macros::ExtractComponent;36use bevy_shader::load_shader_library;37use bevy_transform::components::GlobalTransform;38use core::{39ops::Range,40sync::atomic::{AtomicUsize, Ordering},41};42use wgpu::{43BufferUsages, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,44TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,45};4647/// The matrix that converts from the RGB to the LMS color space.48///49/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:50///51/// ```text52/// ⎡ X ⎤ ⎡ 0.490 0.310 0.200 ⎤ ⎡ R ⎤53/// ⎢ Y ⎥ = ⎢ 0.177 0.812 0.011 ⎥ ⎢ G ⎥54/// ⎣ Z ⎦ ⎣ 0.000 0.010 0.990 ⎦ ⎣ B ⎦55/// ```56///57/// Then we convert to LMS according to the [CAM16 standard matrix]:58///59/// ```text60/// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤61/// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥62/// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦63/// ```64///65/// The resulting matrix is just the concatenation of these two matrices, to do66/// the conversion in one step.67///68/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space69/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space70static RGB_TO_LMS: Mat3 = mat3(71vec3(0.311692, 0.0905138, 0.00764433),72vec3(0.652085, 0.901341, 0.0486554),73vec3(0.0362225, 0.00814478, 0.943700),74);7576/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color77/// space back to RGB.78static LMS_TO_RGB: Mat3 = mat3(79vec3(4.06305, -0.40791, -0.0118812),80vec3(-2.93241, 1.40437, -0.0486532),81vec3(-0.130646, 0.00353630, 1.0605344),82);8384/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].85///86/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space87/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values88static D65_XY: Vec2 = vec2(0.31272, 0.32903);8990/// The [D65 white point] in [LMS color space].91///92/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space93/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values94static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);9596pub struct ViewPlugin;9798impl Plugin for ViewPlugin {99fn build(&self, app: &mut App) {100load_shader_library!(app, "view.wgsl");101102app103// NOTE: windows.is_changed() handles cases where a window was resized104.add_plugins((105ExtractComponentPlugin::<Msaa>::default(),106ExtractComponentPlugin::<OcclusionCulling>::default(),107RenderVisibilityRangePlugin,108));109110if let Some(render_app) = app.get_sub_app_mut(RenderApp) {111render_app.add_systems(112Render,113(114// `TextureView`s need to be dropped before reconfiguring window surfaces.115clear_view_attachments116.in_set(RenderSystems::ManageViews)117.before(create_surfaces),118cleanup_view_targets_for_resize119.in_set(RenderSystems::ManageViews)120.before(create_surfaces),121prepare_view_attachments122.in_set(RenderSystems::ManageViews)123.before(prepare_view_targets)124.after(prepare_windows),125prepare_view_targets126.in_set(RenderSystems::ManageViews)127.after(prepare_windows)128.after(crate::render_asset::prepare_assets::<GpuImage>)129.ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`130prepare_view_uniforms.in_set(RenderSystems::PrepareResources),131),132);133}134}135136fn finish(&self, app: &mut App) {137if let Some(render_app) = app.get_sub_app_mut(RenderApp) {138render_app139.init_resource::<ViewUniforms>()140.init_resource::<ViewTargetAttachments>();141}142}143}144145/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)146/// for a [`Camera`](bevy_camera::Camera).147///148/// Defaults to 4 samples. A higher number of samples results in smoother edges.149///150/// Some advanced rendering features may require that MSAA is disabled.151///152/// Note that the web currently only supports 1 or 4 samples.153#[derive(154Component,155Default,156Clone,157Copy,158ExtractComponent,159Reflect,160PartialEq,161PartialOrd,162Eq,163Hash,164Debug,165)]166#[reflect(Component, Default, PartialEq, Hash, Debug)]167pub enum Msaa {168Off = 1,169Sample2 = 2,170#[default]171Sample4 = 4,172Sample8 = 8,173}174175impl Msaa {176#[inline]177pub fn samples(&self) -> u32 {178*self as u32179}180181pub fn from_samples(samples: u32) -> Self {182match samples {1831 => Msaa::Off,1842 => Msaa::Sample2,1854 => Msaa::Sample4,1868 => Msaa::Sample8,187_ => panic!("Unsupported MSAA sample count: {samples}"),188}189}190}191192/// An identifier for a view that is stable across frames.193///194/// We can't use [`Entity`] for this because render world entities aren't195/// stable, and we can't use just [`MainEntity`] because some main world views196/// extract to multiple render world views. For example, a directional light197/// extracts to one render world view per cascade, and a point light extracts to198/// one render world view per cubemap face. So we pair the main entity with an199/// *auxiliary entity* and a *subview index*, which *together* uniquely identify200/// a view in the render world in a way that's stable from frame to frame.201#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]202pub struct RetainedViewEntity {203/// The main entity that this view corresponds to.204pub main_entity: MainEntity,205206/// Another entity associated with the view entity.207///208/// This is currently used for shadow cascades. If there are multiple209/// cameras, each camera needs to have its own set of shadow cascades. Thus210/// the light and subview index aren't themselves enough to uniquely211/// identify a shadow cascade: we need the camera that the cascade is212/// associated with as well. This entity stores that camera.213///214/// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.215pub auxiliary_entity: MainEntity,216217/// The index of the view corresponding to the entity.218///219/// For example, for point lights that cast shadows, this is the index of220/// the cubemap face (0 through 5 inclusive). For directional lights, this221/// is the index of the cascade.222pub subview_index: u32,223}224225impl RetainedViewEntity {226/// Creates a new [`RetainedViewEntity`] from the given main world entity,227/// auxiliary main world entity, and subview index.228///229/// See [`RetainedViewEntity::subview_index`] for an explanation of what230/// `auxiliary_entity` and `subview_index` are.231pub fn new(232main_entity: MainEntity,233auxiliary_entity: Option<MainEntity>,234subview_index: u32,235) -> Self {236Self {237main_entity,238auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),239subview_index,240}241}242}243244/// Describes a camera in the render world.245///246/// Each entity in the main world can potentially extract to multiple subviews,247/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D248/// cameras extract to both a 3D camera subview with index 0 and a special UI249/// subview with index 1. Likewise, point lights with shadows extract to 6250/// subviews, one for each side of the shadow cubemap.251#[derive(Component)]252pub struct ExtractedView {253/// The entity in the main world corresponding to this render world view.254pub retained_view_entity: RetainedViewEntity,255/// Typically a column-major right-handed projection matrix, one of either:256///257/// Perspective (infinite reverse z)258/// ```text259/// f = 1 / tan(fov_y_radians / 2)260///261/// ⎡ f / aspect 0 0 0 ⎤262/// ⎢ 0 f 0 0 ⎥263/// ⎢ 0 0 0 near ⎥264/// ⎣ 0 0 -1 0 ⎦265/// ```266///267/// Orthographic268/// ```text269/// w = right - left270/// h = top - bottom271/// d = far - near272/// cw = -right - left273/// ch = -top - bottom274///275/// ⎡ 2 / w 0 0 cw / w ⎤276/// ⎢ 0 2 / h 0 ch / h ⎥277/// ⎢ 0 0 1 / d far / d ⎥278/// ⎣ 0 0 0 1 ⎦279/// ```280///281/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic282///283/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`284///285/// Custom projections are also possible however.286pub clip_from_view: Mat4,287pub world_from_view: GlobalTransform,288// The view-projection matrix. When provided it is used instead of deriving it from289// `projection` and `transform` fields, which can be helpful in cases where numerical290// stability matters and there is a more direct way to derive the view-projection matrix.291pub clip_from_world: Option<Mat4>,292pub hdr: bool,293// uvec4(origin.x, origin.y, width, height)294pub viewport: UVec4,295pub color_grading: ColorGrading,296297/// Whether to switch culling mode so that materials that request backface298/// culling cull front faces, and vice versa.299///300/// This is typically used for cameras that mirror the world that they301/// render across a plane, because doing that flips the winding of each302/// polygon.303///304/// This setting doesn't affect materials that disable backface culling.305pub invert_culling: bool,306}307308impl ExtractedView {309/// Creates a 3D rangefinder for a view310pub fn rangefinder3d(&self) -> ViewRangefinder3d {311ViewRangefinder3d::from_world_from_view(&self.world_from_view.affine())312}313}314315/// Configures filmic color grading parameters to adjust the image appearance.316///317/// Color grading is applied just before tonemapping for a given318/// [`Camera`](bevy_camera::Camera) entity, with the sole exception of the319/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after320/// tonemapping.321#[derive(Component, Reflect, Debug, Default, Clone)]322#[reflect(Component, Default, Debug, Clone)]323pub struct ColorGrading {324/// Filmic color grading values applied to the image as a whole (as opposed325/// to individual sections, like shadows and highlights).326pub global: ColorGradingGlobal,327328/// Color grading values that are applied to the darker parts of the image.329///330/// The cutoff points can be customized with the331/// [`ColorGradingGlobal::midtones_range`] field.332pub shadows: ColorGradingSection,333334/// Color grading values that are applied to the parts of the image with335/// intermediate brightness.336///337/// The cutoff points can be customized with the338/// [`ColorGradingGlobal::midtones_range`] field.339pub midtones: ColorGradingSection,340341/// Color grading values that are applied to the lighter parts of the image.342///343/// The cutoff points can be customized with the344/// [`ColorGradingGlobal::midtones_range`] field.345pub highlights: ColorGradingSection,346}347348/// Filmic color grading values applied to the image as a whole (as opposed to349/// individual sections, like shadows and highlights).350#[derive(Clone, Debug, Reflect)]351#[reflect(Default, Clone)]352pub struct ColorGradingGlobal {353/// Exposure value (EV) offset, measured in stops.354pub exposure: f32,355356/// An adjustment made to the [CIE 1931] chromaticity *x* value.357///358/// Positive values make the colors redder. Negative values make the colors359/// bluer. This has no effect on luminance (brightness).360///361/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space362pub temperature: f32,363364/// An adjustment made to the [CIE 1931] chromaticity *y* value.365///366/// Positive values make the colors more magenta. Negative values make the367/// colors greener. This has no effect on luminance (brightness).368///369/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space370pub tint: f32,371372/// An adjustment to the [hue], in radians.373///374/// Adjusting this value changes the perceived colors in the image: red to375/// yellow to green to blue, etc. It has no effect on the saturation or376/// brightness of the colors.377///378/// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation379pub hue: f32,380381/// Saturation adjustment applied after tonemapping.382/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image383/// with luminance defined by ITU-R BT.709384/// Values above 1.0 increase saturation.385pub post_saturation: f32,386387/// The luminance (brightness) ranges that are considered part of the388/// "midtones" of the image.389///390/// This affects which [`ColorGradingSection`]s apply to which colors. Note391/// that the sections smoothly blend into one another, to avoid abrupt392/// transitions.393///394/// The default value is 0.2 to 0.7.395pub midtones_range: Range<f32>,396}397398/// The [`ColorGrading`] structure, packed into the most efficient form for the399/// GPU.400#[derive(Clone, Copy, Debug, ShaderType)]401pub struct ColorGradingUniform {402pub balance: Mat3,403pub saturation: Vec3,404pub contrast: Vec3,405pub gamma: Vec3,406pub gain: Vec3,407pub lift: Vec3,408pub midtone_range: Vec2,409pub exposure: f32,410pub hue: f32,411pub post_saturation: f32,412}413414/// A section of color grading values that can be selectively applied to415/// shadows, midtones, and highlights.416#[derive(Reflect, Debug, Copy, Clone, PartialEq)]417#[reflect(Clone, PartialEq)]418pub struct ColorGradingSection {419/// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image420/// with luminance defined by ITU-R BT.709.421/// Values above 1.0 increase saturation.422pub saturation: f32,423424/// Adjusts the range of colors.425///426/// A value of 1.0 applies no changes. Values below 1.0 move the colors more427/// toward a neutral gray. Values above 1.0 spread the colors out away from428/// the neutral gray.429pub contrast: f32,430431/// A nonlinear luminance adjustment, mainly affecting the high end of the432/// range.433///434/// This is the *n* exponent in the standard [ASC CDL] formula for color435/// correction:436///437/// ```text438/// out = (i × s + o)ⁿ439/// ```440///441/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function442pub gamma: f32,443444/// A linear luminance adjustment, mainly affecting the middle part of the445/// range.446///447/// This is the *s* factor in the standard [ASC CDL] formula for color448/// correction:449///450/// ```text451/// out = (i × s + o)ⁿ452/// ```453///454/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function455pub gain: f32,456457/// A fixed luminance adjustment, mainly affecting the lower part of the458/// range.459///460/// This is the *o* term in the standard [ASC CDL] formula for color461/// correction:462///463/// ```text464/// out = (i × s + o)ⁿ465/// ```466///467/// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function468pub lift: f32,469}470471impl Default for ColorGradingGlobal {472fn default() -> Self {473Self {474exposure: 0.0,475temperature: 0.0,476tint: 0.0,477hue: 0.0,478post_saturation: 1.0,479midtones_range: 0.2..0.7,480}481}482}483484impl Default for ColorGradingSection {485fn default() -> Self {486Self {487saturation: 1.0,488contrast: 1.0,489gamma: 1.0,490gain: 1.0,491lift: 0.0,492}493}494}495496impl ColorGrading {497/// Creates a new [`ColorGrading`] instance in which shadows, midtones, and498/// highlights all have the same set of color grading values.499pub fn with_identical_sections(500global: ColorGradingGlobal,501section: ColorGradingSection,502) -> ColorGrading {503ColorGrading {504global,505highlights: section,506midtones: section,507shadows: section,508}509}510511/// Returns an iterator that visits the shadows, midtones, and highlights512/// sections, in that order.513pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {514[&self.shadows, &self.midtones, &self.highlights].into_iter()515}516517/// Applies the given mutating function to the shadows, midtones, and518/// highlights sections, in that order.519///520/// Returns an array composed of the results of such evaluation, in that521/// order.522pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {523[&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()524}525}526527#[derive(Clone, ShaderType)]528pub struct ViewUniform {529pub clip_from_world: Mat4,530pub unjittered_clip_from_world: Mat4,531pub world_from_clip: Mat4,532pub world_from_view: Mat4,533pub view_from_world: Mat4,534/// Typically a column-major right-handed projection matrix, one of either:535///536/// Perspective (infinite reverse z)537/// ```text538/// f = 1 / tan(fov_y_radians / 2)539///540/// ⎡ f / aspect 0 0 0 ⎤541/// ⎢ 0 f 0 0 ⎥542/// ⎢ 0 0 0 near ⎥543/// ⎣ 0 0 -1 0 ⎦544/// ```545///546/// Orthographic547/// ```text548/// w = right - left549/// h = top - bottom550/// d = far - near551/// cw = -right - left552/// ch = -top - bottom553///554/// ⎡ 2 / w 0 0 cw / w ⎤555/// ⎢ 0 2 / h 0 ch / h ⎥556/// ⎢ 0 0 1 / d far / d ⎥557/// ⎣ 0 0 0 1 ⎦558/// ```559///560/// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic561///562/// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`563///564/// Custom projections are also possible however.565pub clip_from_view: Mat4,566pub view_from_clip: Mat4,567pub world_position: Vec3,568pub exposure: f32,569// viewport(x_origin, y_origin, width, height)570pub viewport: Vec4,571pub main_pass_viewport: Vec4,572/// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.573/// The normal vectors point towards the interior of the frustum.574/// A half space contains `p` if `normal.dot(p) + distance > 0.`575pub frustum: [Vec4; 6],576pub color_grading: ColorGradingUniform,577pub mip_bias: f32,578pub frame_count: u32,579}580581#[derive(Resource)]582pub struct ViewUniforms {583pub uniforms: DynamicUniformBuffer<ViewUniform>,584}585586impl FromWorld for ViewUniforms {587fn from_world(world: &mut World) -> Self {588let mut uniforms = DynamicUniformBuffer::default();589uniforms.set_label(Some("view_uniforms_buffer"));590591let render_device = world.resource::<RenderDevice>();592if render_device.limits().max_storage_buffers_per_shader_stage > 0 {593uniforms.add_usages(BufferUsages::STORAGE);594}595596Self { uniforms }597}598}599600#[derive(Component)]601pub struct ViewUniformOffset {602pub offset: u32,603}604605#[derive(Component, Clone)]606pub struct ViewTarget {607main_textures: MainTargetTextures,608main_texture_format: TextureFormat,609/// 0 represents `main_textures.a`, 1 represents `main_textures.b`610/// This is shared across view targets with the same render target611main_texture: Arc<AtomicUsize>,612out_texture: OutputColorAttachment,613}614615/// Contains [`OutputColorAttachment`] used for each target present on any view in the current616/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override617/// the default output color attachment for a specific target can do so by adding a618/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.619#[derive(Resource, Default, Deref, DerefMut)]620pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);621622pub struct PostProcessWrite<'a> {623pub source: &'a TextureView,624pub source_texture: &'a Texture,625pub destination: &'a TextureView,626pub destination_texture: &'a Texture,627}628629impl From<ColorGrading> for ColorGradingUniform {630fn from(component: ColorGrading) -> Self {631// Compute the balance matrix that will be used to apply the white632// balance adjustment to an RGB color. Our general approach will be to633// convert both the color and the developer-supplied white point to the634// LMS color space, apply the conversion, and then convert back.635//636// First, we start with the CIE 1931 *xy* values of the standard D65637// illuminant:638// <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>639//640// We then adjust them based on the developer's requested white balance.641let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);642643// Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:644//645// Y Y646// Y = 1 X = ─ x Z = ─ (1 - x - y)647// y y648//649// Then we convert from XYZ to LMS color space, using the CAM16 matrix650// from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:651//652// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤653// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥654// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦655//656// The following formula is just a simplification of the above.657658let white_point_lms = vec3(0.701634, 1.15856, -0.904175)659+ (vec3(-0.051461, 0.045854, 0.953127)660+ vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)661/ white_point_xy.y;662663// Now that we're in LMS space, perform the white point scaling.664let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);665666// Finally, combine the RGB → LMS → corrected LMS → corrected RGB667// pipeline into a single 3×3 matrix.668let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;669670Self {671balance,672saturation: vec3(673component.shadows.saturation,674component.midtones.saturation,675component.highlights.saturation,676),677contrast: vec3(678component.shadows.contrast,679component.midtones.contrast,680component.highlights.contrast,681),682gamma: vec3(683component.shadows.gamma,684component.midtones.gamma,685component.highlights.gamma,686),687gain: vec3(688component.shadows.gain,689component.midtones.gain,690component.highlights.gain,691),692lift: vec3(693component.shadows.lift,694component.midtones.lift,695component.highlights.lift,696),697midtone_range: vec2(698component.global.midtones_range.start,699component.global.midtones_range.end,700),701exposure: component.global.exposure,702hue: component.global.hue,703post_saturation: component.global.post_saturation,704}705}706}707708/// Add this component to a camera to disable *indirect mode*.709///710/// Indirect mode, automatically enabled on supported hardware, allows Bevy to711/// offload transform and cull operations to the GPU, reducing CPU overhead.712/// Doing this, however, reduces the amount of control that your app has over713/// instancing decisions. In certain circumstances, you may want to disable714/// indirect drawing so that your app can manually instance meshes as it sees715/// fit. See the `custom_shader_instancing` example.716///717/// The vast majority of applications will not need to use this component, as it718/// generally reduces rendering performance.719///720/// Note: This component should only be added when initially spawning a camera. Adding721/// or removing after spawn can result in unspecified behavior.722#[derive(Component, Default)]723pub struct NoIndirectDrawing;724725impl ViewTarget {726pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;727728/// Retrieve this target's main texture's color attachment.729pub fn get_color_attachment(&self) -> RenderPassColorAttachment<'_> {730if self.main_texture.load(Ordering::SeqCst) == 0 {731self.main_textures.a.get_attachment()732} else {733self.main_textures.b.get_attachment()734}735}736737/// Retrieve this target's "unsampled" main texture's color attachment.738pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment<'_> {739if self.main_texture.load(Ordering::SeqCst) == 0 {740self.main_textures.a.get_unsampled_attachment()741} else {742self.main_textures.b.get_unsampled_attachment()743}744}745746/// The "main" unsampled texture.747pub fn main_texture(&self) -> &Texture {748if self.main_texture.load(Ordering::SeqCst) == 0 {749&self.main_textures.a.texture.texture750} else {751&self.main_textures.b.texture.texture752}753}754755/// The _other_ "main" unsampled texture.756/// In most cases you should use [`Self::main_texture`] instead and never this.757/// The textures will naturally be swapped when [`Self::post_process_write`] is called.758///759/// A use case for this is to be able to prepare a bind group for all main textures760/// ahead of time.761pub fn main_texture_other(&self) -> &Texture {762if self.main_texture.load(Ordering::SeqCst) == 0 {763&self.main_textures.b.texture.texture764} else {765&self.main_textures.a.texture.texture766}767}768769/// The "main" unsampled texture.770pub fn main_texture_view(&self) -> &TextureView {771if self.main_texture.load(Ordering::SeqCst) == 0 {772&self.main_textures.a.texture.default_view773} else {774&self.main_textures.b.texture.default_view775}776}777778/// The _other_ "main" unsampled texture view.779/// In most cases you should use [`Self::main_texture_view`] instead and never this.780/// The textures will naturally be swapped when [`Self::post_process_write`] is called.781///782/// A use case for this is to be able to prepare a bind group for all main textures783/// ahead of time.784pub fn main_texture_other_view(&self) -> &TextureView {785if self.main_texture.load(Ordering::SeqCst) == 0 {786&self.main_textures.b.texture.default_view787} else {788&self.main_textures.a.texture.default_view789}790}791792/// The "main" sampled texture.793pub fn sampled_main_texture(&self) -> Option<&Texture> {794self.main_textures795.a796.resolve_target797.as_ref()798.map(|sampled| &sampled.texture)799}800801/// The "main" sampled texture view.802pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {803self.main_textures804.a805.resolve_target806.as_ref()807.map(|sampled| &sampled.default_view)808}809810#[inline]811pub fn main_texture_format(&self) -> TextureFormat {812self.main_texture_format813}814815/// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]816#[inline]817pub fn is_hdr(&self) -> bool {818self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR819}820821/// The final texture this view will render to.822#[inline]823pub fn out_texture(&self) -> &TextureView {824&self.out_texture.view825}826827pub fn out_texture_color_attachment(828&self,829clear_color: Option<LinearRgba>,830) -> RenderPassColorAttachment<'_> {831self.out_texture.get_attachment(clear_color)832}833834/// Whether the final texture this view will render to needs to be presented.835pub fn needs_present(&self) -> bool {836self.out_texture.needs_present()837}838839/// The format of the final texture this view will render to840#[inline]841pub fn out_texture_view_format(&self) -> TextureFormat {842self.out_texture.view_format843}844845/// This will start a new "post process write", which assumes that the caller846/// will write the [`PostProcessWrite`]'s `source` to the `destination`.847///848/// `source` is the "current" main texture. This will internally flip this849/// [`ViewTarget`]'s main texture to the `destination` texture, so the caller850/// _must_ ensure `source` is copied to `destination`, with or without modifications.851/// Failing to do so will cause the current main texture information to be lost.852pub fn post_process_write(&self) -> PostProcessWrite<'_> {853let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);854// if the old main texture is a, then the post processing must write from a to b855if old_is_a_main_texture == 0 {856self.main_textures.b.mark_as_cleared();857PostProcessWrite {858source: &self.main_textures.a.texture.default_view,859source_texture: &self.main_textures.a.texture.texture,860destination: &self.main_textures.b.texture.default_view,861destination_texture: &self.main_textures.b.texture.texture,862}863} else {864self.main_textures.a.mark_as_cleared();865PostProcessWrite {866source: &self.main_textures.b.texture.default_view,867source_texture: &self.main_textures.b.texture.texture,868destination: &self.main_textures.a.texture.default_view,869destination_texture: &self.main_textures.a.texture.texture,870}871}872}873}874875#[derive(Component)]876pub struct ViewDepthTexture {877pub texture: Texture,878attachment: DepthAttachment,879}880881impl ViewDepthTexture {882pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {883Self {884texture: texture.texture,885attachment: DepthAttachment::new(texture.default_view, clear_value),886}887}888889pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment<'_> {890self.attachment.get_attachment(store)891}892893pub fn view(&self) -> &TextureView {894&self.attachment.view895}896}897898pub fn prepare_view_uniforms(899mut commands: Commands,900render_device: Res<RenderDevice>,901render_queue: Res<RenderQueue>,902mut view_uniforms: ResMut<ViewUniforms>,903views: Query<(904Entity,905Option<&ExtractedCamera>,906&ExtractedView,907Option<&Frustum>,908Option<&TemporalJitter>,909Option<&MipBias>,910Option<&MainPassResolutionOverride>,911)>,912frame_count: Res<FrameCount>,913) {914let view_iter = views.iter();915let view_count = view_iter.len();916let Some(mut writer) =917view_uniforms918.uniforms919.get_writer(view_count, &render_device, &render_queue)920else {921return;922};923for (924entity,925extracted_camera,926extracted_view,927frustum,928temporal_jitter,929mip_bias,930resolution_override,931) in &views932{933let viewport = extracted_view.viewport.as_vec4();934let mut main_pass_viewport = viewport;935if let Some(resolution_override) = resolution_override {936main_pass_viewport.z = resolution_override.0.x as f32;937main_pass_viewport.w = resolution_override.0.y as f32;938}939940let unjittered_projection = extracted_view.clip_from_view;941let mut clip_from_view = unjittered_projection;942943if let Some(temporal_jitter) = temporal_jitter {944temporal_jitter.jitter_projection(&mut clip_from_view, main_pass_viewport.zw());945}946947let view_from_clip = clip_from_view.inverse();948let world_from_view = extracted_view.world_from_view.to_matrix();949let view_from_world = world_from_view.inverse();950951let clip_from_world = if temporal_jitter.is_some() {952clip_from_view * view_from_world953} else {954extracted_view955.clip_from_world956.unwrap_or_else(|| clip_from_view * view_from_world)957};958959// Map Frustum type to shader array<vec4<f32>, 6>960let frustum = frustum961.map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))962.unwrap_or([Vec4::ZERO; 6]);963964let view_uniforms = ViewUniformOffset {965offset: writer.write(&ViewUniform {966clip_from_world,967unjittered_clip_from_world: unjittered_projection * view_from_world,968world_from_clip: world_from_view * view_from_clip,969world_from_view,970view_from_world,971clip_from_view,972view_from_clip,973world_position: extracted_view.world_from_view.translation(),974exposure: extracted_camera975.map(|c| c.exposure)976.unwrap_or_else(|| Exposure::default().exposure()),977viewport,978main_pass_viewport,979frustum,980color_grading: extracted_view.color_grading.clone().into(),981mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,982frame_count: frame_count.0,983}),984};985986commands.entity(entity).insert(view_uniforms);987}988}989990#[derive(Clone)]991struct MainTargetTextures {992a: ColorAttachment,993b: ColorAttachment,994/// 0 represents `main_textures.a`, 1 represents `main_textures.b`995/// This is shared across view targets with the same render target996main_texture: Arc<AtomicUsize>,997}998999/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.1000pub fn prepare_view_attachments(1001windows: Res<ExtractedWindows>,1002images: Res<RenderAssets<GpuImage>>,1003manual_texture_views: Res<ManualTextureViews>,1004cameras: Query<&ExtractedCamera>,1005mut view_target_attachments: ResMut<ViewTargetAttachments>,1006) {1007for camera in cameras.iter() {1008let Some(target) = &camera.target else {1009continue;1010};10111012match view_target_attachments.entry(target.clone()) {1013Entry::Occupied(_) => {}1014Entry::Vacant(entry) => {1015let Some(attachment) = target1016.get_texture_view(&windows, &images, &manual_texture_views)1017.cloned()1018.zip(target.get_texture_view_format(&windows, &images, &manual_texture_views))1019.map(|(view, format)| OutputColorAttachment::new(view.clone(), format))1020else {1021continue;1022};1023entry.insert(attachment);1024}1025};1026}1027}10281029/// Clears the view target [`OutputColorAttachment`]s.1030pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {1031view_target_attachments.clear();1032}10331034pub fn cleanup_view_targets_for_resize(1035mut commands: Commands,1036windows: Res<ExtractedWindows>,1037cameras: Query<(Entity, &ExtractedCamera), With<ViewTarget>>,1038) {1039for (entity, camera) in &cameras {1040if let Some(NormalizedRenderTarget::Window(window_ref)) = &camera.target1041&& let Some(window) = windows.get(&window_ref.entity())1042&& (window.size_changed || window.present_mode_changed)1043{1044commands.entity(entity).remove::<ViewTarget>();1045}1046}1047}10481049pub fn prepare_view_targets(1050mut commands: Commands,1051clear_color_global: Res<ClearColor>,1052render_device: Res<RenderDevice>,1053mut texture_cache: ResMut<TextureCache>,1054cameras: Query<(1055Entity,1056&ExtractedCamera,1057&ExtractedView,1058&CameraMainTextureUsages,1059&Msaa,1060)>,1061view_target_attachments: Res<ViewTargetAttachments>,1062) {1063let mut textures = <HashMap<_, _>>::default();1064for (entity, camera, view, texture_usage, msaa) in cameras.iter() {1065let (Some(target_size), Some(out_attachment)) = (1066camera.physical_target_size,1067camera1068.target1069.as_ref()1070.and_then(|target| view_target_attachments.get(target)),1071) else {1072// If we can't find an output attachment we need to remove the ViewTarget1073// component to make sure the camera doesn't try rendering to an invalid1074// output attachment.1075commands.entity(entity).try_remove::<ViewTarget>();10761077continue;1078};10791080let main_texture_format = if view.hdr {1081ViewTarget::TEXTURE_FORMAT_HDR1082} else {1083TextureFormat::bevy_default()1084};10851086let clear_color = match camera.clear_color {1087ClearColorConfig::Custom(color) => Some(color),1088ClearColorConfig::None => None,1089_ => Some(clear_color_global.0),1090};10911092let (a, b, sampled, main_texture) = textures1093.entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))1094.or_insert_with(|| {1095let descriptor = TextureDescriptor {1096label: None,1097size: target_size.to_extents(),1098mip_level_count: 1,1099sample_count: 1,1100dimension: TextureDimension::D2,1101format: main_texture_format,1102usage: texture_usage.0,1103view_formats: match main_texture_format {1104TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],1105TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],1106_ => &[],1107},1108};1109let a = texture_cache.get(1110&render_device,1111TextureDescriptor {1112label: Some("main_texture_a"),1113..descriptor1114},1115);1116let b = texture_cache.get(1117&render_device,1118TextureDescriptor {1119label: Some("main_texture_b"),1120..descriptor1121},1122);1123let sampled = if msaa.samples() > 1 {1124let sampled = texture_cache.get(1125&render_device,1126TextureDescriptor {1127label: Some("main_texture_sampled"),1128size: target_size.to_extents(),1129mip_level_count: 1,1130sample_count: msaa.samples(),1131dimension: TextureDimension::D2,1132format: main_texture_format,1133usage: TextureUsages::RENDER_ATTACHMENT,1134view_formats: descriptor.view_formats,1135},1136);1137Some(sampled)1138} else {1139None1140};1141let main_texture = Arc::new(AtomicUsize::new(0));1142(a, b, sampled, main_texture)1143});11441145let converted_clear_color = clear_color.map(Into::into);11461147let main_textures = MainTargetTextures {1148a: ColorAttachment::new(a.clone(), sampled.clone(), None, converted_clear_color),1149b: ColorAttachment::new(b.clone(), sampled.clone(), None, converted_clear_color),1150main_texture: main_texture.clone(),1151};11521153commands.entity(entity).insert(ViewTarget {1154main_texture: main_textures.main_texture.clone(),1155main_textures,1156main_texture_format,1157out_texture: out_attachment.clone(),1158});1159}1160}116111621163