Path: blob/main/examples/shader_advanced/custom_post_processing.rs
9341 views
//! This example shows how to create a custom post-processing effect that runs after the main pass1//! and reads the texture generated by the main pass.2//!3//! The example shader is a very simple implementation of chromatic aberration.4//! To adapt this example for 2D, replace all instances of 3D structures (such as `Core3d`, etc.) with their corresponding 2D counterparts.5//!6//! This is a fairly low level example and assumes some familiarity with rendering concepts and wgpu.78use bevy::{9core_pipeline::{schedule::Core3d, Core3dSystems, FullscreenShader},10prelude::*,11render::{12extract_component::{13ComponentUniforms, DynamicUniformIndex, ExtractComponent, ExtractComponentPlugin,14UniformComponentPlugin,15},16render_resource::{17binding_types::{sampler, texture_2d, uniform_buffer},18*,19},20renderer::{RenderContext, RenderDevice, ViewQuery},21view::ViewTarget,22RenderApp, RenderStartup,23},24};2526/// This example uses a shader source file from the assets subdirectory27const SHADER_ASSET_PATH: &str = "shaders/post_processing.wgsl";2829fn main() {30App::new()31.add_plugins((DefaultPlugins, PostProcessPlugin))32.add_systems(Startup, setup)33.add_systems(Update, (rotate, update_settings))34.run();35}3637/// It is generally encouraged to set up post processing effects as a plugin38struct PostProcessPlugin;3940impl Plugin for PostProcessPlugin {41fn build(&self, app: &mut App) {42app.add_plugins((43// The settings will be a component that lives in the main world but will44// be extracted to the render world every frame.45// This makes it possible to control the effect from the main world.46// This plugin will take care of extracting it automatically.47// It's important to derive [`ExtractComponent`] on [`PostProcessSettings`]48// for this plugin to work correctly.49ExtractComponentPlugin::<PostProcessSettings>::default(),50// The settings will also be the data used in the shader.51// This plugin will prepare the component for the GPU by creating a uniform buffer52// and writing the data to that buffer every frame.53UniformComponentPlugin::<PostProcessSettings>::default(),54));5556// We need to get the render app from the main app57let Some(render_app) = app.get_sub_app_mut(RenderApp) else {58return;59};6061render_app.add_systems(RenderStartup, init_post_process_pipeline);62render_app.add_systems(63Core3d,64post_process_system.in_set(Core3dSystems::PostProcess),65);66}67}6869#[derive(Default)]70struct PostProcessBindGroupCache {71cached: Option<(TextureViewId, BindGroup)>,72}7374fn post_process_system(75view: ViewQuery<(76&ViewTarget,77&PostProcessSettings,78&DynamicUniformIndex<PostProcessSettings>,79)>,80post_process_pipeline: Option<Res<PostProcessPipeline>>,81pipeline_cache: Res<PipelineCache>,82settings_uniforms: Res<ComponentUniforms<PostProcessSettings>>,83mut cache: Local<PostProcessBindGroupCache>,84mut ctx: RenderContext,85) {86let Some(post_process_pipeline) = post_process_pipeline else {87return;88};8990let (view_target, _post_process_settings, settings_index) = view.into_inner();9192let Some(pipeline) = pipeline_cache.get_render_pipeline(post_process_pipeline.pipeline_id)93else {94return;95};9697let Some(settings_binding) = settings_uniforms.uniforms().binding() else {98return;99};100101// This will start a new "post process write", obtaining two texture102// views from the view target - a `source` and a `destination`.103// `source` is the "current" main texture and you _must_ write into104// `destination` because calling `post_process_write()` on the105// [`ViewTarget`] will internally flip the [`ViewTarget`]'s main106// texture to the `destination` texture. Failing to do so will cause107// the current main texture information to be lost.108let post_process = view_target.post_process_write();109110let bind_group = match &mut cache.cached {111Some((texture_id, bind_group)) if post_process.source.id() == *texture_id => bind_group,112cached => {113// The bind_group gets created each frame.114//115// Normally, you would create a bind_group in the Queue set,116// but this doesn't work with the post_process_write().117// The reason it doesn't work is because each post_process_write will alternate the source/destination.118// The only way to have the correct source/destination for the bind_group119// is to make sure you get it during the node execution.120let bind_group = ctx.render_device().create_bind_group(121"post_process_bind_group",122&pipeline_cache.get_bind_group_layout(&post_process_pipeline.layout),123// It's important for this to match the BindGroupLayout defined in the PostProcessPipeline124&BindGroupEntries::sequential((125// Make sure to use the source view126post_process.source,127// Use the sampler created for the pipeline128&post_process_pipeline.sampler,129// Set the settings binding130settings_binding.clone(),131)),132);133134let (_, bind_group) = cached.insert((post_process.source.id(), bind_group));135bind_group136}137};138139let mut render_pass = ctx140.command_encoder()141.begin_render_pass(&RenderPassDescriptor {142label: Some("post_process_pass"),143color_attachments: &[Some(RenderPassColorAttachment {144// We need to specify the post process destination view here145// to make sure we write to the appropriate texture.146view: post_process.destination,147depth_slice: None,148resolve_target: None,149ops: Operations::default(),150})],151depth_stencil_attachment: None,152timestamp_writes: None,153occlusion_query_set: None,154multiview_mask: None,155});156157render_pass.set_pipeline(pipeline);158// By passing in the index of the post process settings on this view, we ensure159// that in the event that multiple settings were sent to the GPU (as would be the160// case with multiple cameras), we use the correct one.161render_pass.set_bind_group(0, bind_group, &[settings_index.index()]);162render_pass.draw(0..3, 0..1);163}164165// This contains global data used by the render pipeline. This will be created once on startup.166#[derive(Resource)]167struct PostProcessPipeline {168layout: BindGroupLayoutDescriptor,169sampler: Sampler,170pipeline_id: CachedRenderPipelineId,171}172173fn init_post_process_pipeline(174mut commands: Commands,175render_device: Res<RenderDevice>,176asset_server: Res<AssetServer>,177fullscreen_shader: Res<FullscreenShader>,178pipeline_cache: Res<PipelineCache>,179) {180// We need to define the bind group layout used for our pipeline181let layout = BindGroupLayoutDescriptor::new(182"post_process_bind_group_layout",183&BindGroupLayoutEntries::sequential(184// The layout entries will only be visible in the fragment stage185ShaderStages::FRAGMENT,186(187// The screen texture188texture_2d(TextureSampleType::Float { filterable: true }),189// The sampler that will be used to sample the screen texture190sampler(SamplerBindingType::Filtering),191// The settings uniform that will control the effect192uniform_buffer::<PostProcessSettings>(true),193),194),195);196// We can create the sampler here since it won't change at runtime and doesn't depend on the view197let sampler = render_device.create_sampler(&SamplerDescriptor::default());198199// Get the shader handle200let shader = asset_server.load(SHADER_ASSET_PATH);201// This will setup a fullscreen triangle for the vertex state.202let vertex_state = fullscreen_shader.to_vertex_state();203let pipeline_id = pipeline_cache204// This will add the pipeline to the cache and queue its creation205.queue_render_pipeline(RenderPipelineDescriptor {206label: Some("post_process_pipeline".into()),207layout: vec![layout.clone()],208vertex: vertex_state,209fragment: Some(FragmentState {210shader,211// Make sure this matches the entry point of your shader.212// It can be anything as long as it matches here and in the shader.213// Use `format: ViewTarget::TEXTURE_FORMAT_HDR` for HDR cameras.214targets: vec![Some(ColorTargetState {215format: TextureFormat::bevy_default(),216blend: None,217write_mask: ColorWrites::ALL,218})],219..default()220}),221..default()222});223commands.insert_resource(PostProcessPipeline {224layout,225sampler,226pipeline_id,227});228}229230// This is the component that will get passed to the shader231#[derive(Component, Default, Clone, Copy, ExtractComponent, ShaderType)]232struct PostProcessSettings {233intensity: f32,234// WebGL2 structs must be 16 byte aligned.235#[cfg(feature = "webgl2")]236_webgl2_padding: Vec3,237}238239/// Set up a simple 3D scene240fn setup(241mut commands: Commands,242mut meshes: ResMut<Assets<Mesh>>,243mut materials: ResMut<Assets<StandardMaterial>>,244) {245// camera246// Make sure you change the TextureFormat of the ColorTargetState247// if you enable Hdr directly or through features like Bloom.248commands.spawn((249Camera3d::default(),250Transform::from_translation(Vec3::new(0.0, 0.0, 5.0)).looking_at(Vec3::default(), Vec3::Y),251Camera {252clear_color: Color::WHITE.into(),253..default()254},255// Add the setting to the camera.256// This component is also used to determine on which camera to run the post processing effect.257PostProcessSettings {258intensity: 0.02,259..default()260},261));262263// cube264commands.spawn((265Mesh3d(meshes.add(Cuboid::default())),266MeshMaterial3d(materials.add(Color::srgb(0.8, 0.7, 0.6))),267Transform::from_xyz(0.0, 0.5, 0.0),268Rotates,269));270// light271commands.spawn(DirectionalLight {272illuminance: 1_000.,273..default()274});275}276277#[derive(Component)]278struct Rotates;279280/// Rotates any entity around the x and y axis281fn rotate(time: Res<Time>, mut query: Query<&mut Transform, With<Rotates>>) {282for mut transform in &mut query {283transform.rotate_x(0.55 * time.delta_secs());284transform.rotate_z(0.15 * time.delta_secs());285}286}287288// Change the intensity over time to show that the effect is controlled from the main world289fn update_settings(mut settings: Query<&mut PostProcessSettings>, time: Res<Time>) {290for mut setting in &mut settings {291let mut intensity = ops::sin(time.elapsed_secs());292// Make it loop periodically293intensity = ops::sin(intensity);294// Remap it to 0..1 because the intensity can't be negative295intensity = intensity * 0.5 + 0.5;296// Scale it to a more reasonable level297intensity *= 0.015;298299// Set the intensity.300// This will then be extracted to the render world and uploaded to the GPU automatically by the [`UniformComponentPlugin`]301setting.intensity = intensity;302}303}304305306