Skip to content

spec_v2: Auto Exposure #20125

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 13 commits into
base: main
Choose a base branch
from
21 changes: 10 additions & 11 deletions crates/bevy_core_pipeline/src/auto_exposure/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ use bevy_render::{
extract_component::ExtractComponentPlugin,
render_asset::RenderAssetPlugin,
render_graph::RenderGraphExt,
render_resource::{
Buffer, BufferDescriptor, BufferUsages, PipelineCache, SpecializedComputePipelines,
},
render_resource::{Buffer, BufferDescriptor, BufferUsages, PipelineCache},
renderer::RenderDevice,
ExtractSchedule, Render, RenderApp, RenderSystems,
};
Expand Down Expand Up @@ -59,7 +57,6 @@ impl Plugin for AutoExposurePlugin {
};

render_app
.init_resource::<SpecializedComputePipelines<AutoExposurePipeline>>()
.init_resource::<AutoExposureBuffers>()
.add_systems(ExtractSchedule, extract_buffers)
.add_systems(
Expand Down Expand Up @@ -104,15 +101,16 @@ impl FromWorld for AutoExposureResources {
fn queue_view_auto_exposure_pipelines(
mut commands: Commands,
pipeline_cache: Res<PipelineCache>,
mut compute_pipelines: ResMut<SpecializedComputePipelines<AutoExposurePipeline>>,
pipeline: Res<AutoExposurePipeline>,
mut auto_exposure_pipeline: ResMut<AutoExposurePipeline>,
view_targets: Query<(Entity, &AutoExposure)>,
) {
) -> Result<(), BevyError> {
for (entity, auto_exposure) in view_targets.iter() {
let histogram_pipeline =
compute_pipelines.specialize(&pipeline_cache, &pipeline, AutoExposurePass::Histogram);
let average_pipeline =
compute_pipelines.specialize(&pipeline_cache, &pipeline, AutoExposurePass::Average);
let histogram_pipeline = auto_exposure_pipeline
.variants
.specialize(&pipeline_cache, AutoExposurePass::Histogram)?;
let average_pipeline = auto_exposure_pipeline
.variants
.specialize(&pipeline_cache, AutoExposurePass::Average)?;

commands.entity(entity).insert(ViewAutoExposurePipeline {
histogram_pipeline,
Expand All @@ -121,4 +119,5 @@ fn queue_view_auto_exposure_pipelines(
metering_mask: auto_exposure.metering_mask.clone(),
});
}
Ok(())
}
2 changes: 1 addition & 1 deletion crates/bevy_core_pipeline/src/auto_exposure/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ impl Node for AutoExposureNode {

let compute_bind_group = render_context.render_device().create_bind_group(
None,
&pipeline.histogram_layout,
&pipeline.layout,
&BindGroupEntries::sequential((
&globals_buffer.buffer,
&auto_exposure_buffers.settings,
Expand Down
89 changes: 54 additions & 35 deletions crates/bevy_core_pipeline/src/auto_exposure/pipeline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@ use bevy_render::{
view::ViewUniform,
};
use bevy_utils::default;
use core::num::NonZero;
use core::{num::NonZero, result::Result};

#[derive(Resource)]
pub struct AutoExposurePipeline {
pub histogram_layout: BindGroupLayout,
pub histogram_shader: Handle<Shader>,
pub layout: BindGroupLayout,
pub variants: SpecializedCache<ComputePipeline, AutoExposureSpecializer>,
}

pub struct AutoExposureSpecializer;

#[derive(Component)]
pub struct ViewAutoExposurePipeline {
pub histogram_pipeline: CachedComputePipelineId,
Expand All @@ -39,7 +41,7 @@ pub struct AutoExposureUniform {
pub(super) exponential_transition_distance: f32,
}

#[derive(PartialEq, Eq, Hash, Clone)]
#[derive(PartialEq, Eq, Hash, Clone, SpecializerKey)]
pub enum AutoExposurePass {
Histogram,
Average,
Expand All @@ -51,43 +53,60 @@ impl FromWorld for AutoExposurePipeline {
fn from_world(world: &mut World) -> Self {
let render_device = world.resource::<RenderDevice>();

Self {
histogram_layout: render_device.create_bind_group_layout(
"compute histogram bind group",
&BindGroupLayoutEntries::sequential(
ShaderStages::COMPUTE,
(
uniform_buffer::<GlobalsUniform>(false),
uniform_buffer::<AutoExposureUniform>(false),
texture_2d(TextureSampleType::Float { filterable: false }),
texture_2d(TextureSampleType::Float { filterable: false }),
texture_1d(TextureSampleType::Float { filterable: false }),
uniform_buffer::<AutoExposureCompensationCurveUniform>(false),
storage_buffer_sized(false, NonZero::<u64>::new(HISTOGRAM_BIN_COUNT * 4)),
storage_buffer_sized(false, NonZero::<u64>::new(4)),
storage_buffer::<ViewUniform>(true),
),
let layout = render_device.create_bind_group_layout(
"compute histogram bind group",
&BindGroupLayoutEntries::sequential(
ShaderStages::COMPUTE,
(
uniform_buffer::<GlobalsUniform>(false),
uniform_buffer::<AutoExposureUniform>(false),
texture_2d(TextureSampleType::Float { filterable: false }),
texture_2d(TextureSampleType::Float { filterable: false }),
texture_1d(TextureSampleType::Float { filterable: false }),
uniform_buffer::<AutoExposureCompensationCurveUniform>(false),
storage_buffer_sized(false, NonZero::<u64>::new(HISTOGRAM_BIN_COUNT * 4)),
storage_buffer_sized(false, NonZero::<u64>::new(4)),
storage_buffer::<ViewUniform>(true),
),
),
histogram_shader: load_embedded_asset!(world, "auto_exposure.wgsl"),
}
);

let shader = load_embedded_asset!(world, "auto_exposure.wgsl");

let base_descriptor = ComputePipelineDescriptor {
layout: vec![layout.clone()],
shader,
..default()
};

let variants = SpecializedCache::new(AutoExposureSpecializer, None, base_descriptor);

Self { layout, variants }
}
}

impl SpecializedComputePipeline for AutoExposurePipeline {
impl Specializer<ComputePipeline> for AutoExposureSpecializer {
type Key = AutoExposurePass;

fn specialize(&self, pass: AutoExposurePass) -> ComputePipelineDescriptor {
ComputePipelineDescriptor {
label: Some("luminance compute pipeline".into()),
layout: vec![self.histogram_layout.clone()],
shader: self.histogram_shader.clone(),
shader_defs: vec![],
entry_point: Some(match pass {
AutoExposurePass::Histogram => "compute_histogram".into(),
AutoExposurePass::Average => "compute_average".into(),
}),
..default()
}
fn specialize(
&self,
key: Self::Key,
descriptor: &mut ComputePipelineDescriptor,
) -> Result<Canonical<Self::Key>, BevyError> {
let (label, entry_point) = match key {
AutoExposurePass::Histogram => (
"auto_exposure_compute_histogram".into(),
"compute_histogram".into(),
),
AutoExposurePass::Average => (
"auto_exposure_compute_average".into(),
"compute_average".into(),
),
};

descriptor.label = Some(label);
descriptor.entry_point = Some(entry_point);

Ok(key)
}
}
105 changes: 55 additions & 50 deletions crates/bevy_core_pipeline/src/bloom/downsampling_pipeline.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::FullscreenShader;

use super::{Bloom, BLOOM_TEXTURE_FORMAT};
use bevy_asset::{load_embedded_asset, Handle};
use bevy_asset::load_embedded_asset;
use bevy_ecs::{
error::BevyError,
prelude::{Component, Entity},
resource::Resource,
system::{Commands, Query, Res, ResMut},
Expand All @@ -29,14 +30,13 @@ pub struct BloomDownsamplingPipeline {
/// Layout with a texture, a sampler, and uniforms
pub bind_group_layout: BindGroupLayout,
pub sampler: Sampler,
/// The asset handle for the fullscreen vertex shader.
pub fullscreen_shader: FullscreenShader,
/// The fragment shader asset handle.
pub fragment_shader: Handle<Shader>,
pub specialized_cache: SpecializedCache<RenderPipeline, BloomDownsamplingSpecializer>,
}

#[derive(PartialEq, Eq, Hash, Clone)]
pub struct BloomDownsamplingPipelineKeys {
pub struct BloomDownsamplingSpecializer;

#[derive(PartialEq, Eq, Hash, Clone, SpecializerKey)]
pub struct BloomDownsamplingKey {
prefilter: bool,
first_downsample: bool,
uniform_scale: bool,
Expand Down Expand Up @@ -82,28 +82,57 @@ impl FromWorld for BloomDownsamplingPipeline {
..Default::default()
});

let fullscreen_shader = world.resource::<FullscreenShader>().clone();
let fragment_shader = load_embedded_asset!(world, "bloom.wgsl");
let base_descriptor = RenderPipelineDescriptor {
layout: vec![bind_group_layout.clone()],
vertex: fullscreen_shader.to_vertex_state(),
fragment: Some(FragmentState {
shader: fragment_shader.clone(),
targets: vec![Some(ColorTargetState {
format: BLOOM_TEXTURE_FORMAT,
blend: None,
write_mask: ColorWrites::ALL,
})],
..default()
}),
..default()
};

let specialized_cache =
SpecializedCache::new(BloomDownsamplingSpecializer, None, base_descriptor);

BloomDownsamplingPipeline {
bind_group_layout,
sampler,
fullscreen_shader: world.resource::<FullscreenShader>().clone(),
fragment_shader: load_embedded_asset!(world, "bloom.wgsl"),
specialized_cache,
}
}
}

impl SpecializedRenderPipeline for BloomDownsamplingPipeline {
type Key = BloomDownsamplingPipelineKeys;
impl Specializer<RenderPipeline> for BloomDownsamplingSpecializer {
type Key = BloomDownsamplingKey;

fn specialize(
&self,
key: Self::Key,
descriptor: &mut RenderPipelineDescriptor,
) -> Result<Canonical<Self::Key>, BevyError> {
descriptor.label = Some(if key.first_downsample {
"bloom_downsampling_pipeline_first".into()
} else {
"bloom_downsampling_pipeline".into()
});

fn specialize(&self, key: Self::Key) -> RenderPipelineDescriptor {
let layout = vec![self.bind_group_layout.clone()];
let fragment = descriptor.fragment_mut()?;

let entry_point = if key.first_downsample {
fragment.entry_point = Some(if key.first_downsample {
"downsample_first".into()
} else {
"downsample".into()
};
});

let mut shader_defs = vec![];
let shader_defs = &mut fragment.shader_defs;

if key.first_downsample {
shader_defs.push("FIRST_DOWNSAMPLE".into());
Expand All @@ -117,61 +146,36 @@ impl SpecializedRenderPipeline for BloomDownsamplingPipeline {
shader_defs.push("UNIFORM_SCALE".into());
}

RenderPipelineDescriptor {
label: Some(
if key.first_downsample {
"bloom_downsampling_pipeline_first"
} else {
"bloom_downsampling_pipeline"
}
.into(),
),
layout,
vertex: self.fullscreen_shader.to_vertex_state(),
fragment: Some(FragmentState {
shader: self.fragment_shader.clone(),
shader_defs,
entry_point: Some(entry_point),
targets: vec![Some(ColorTargetState {
format: BLOOM_TEXTURE_FORMAT,
blend: None,
write_mask: ColorWrites::ALL,
})],
}),
..default()
}
Ok(key)
}
}

pub fn prepare_downsampling_pipeline(
mut commands: Commands,
pipeline_cache: Res<PipelineCache>,
mut pipelines: ResMut<SpecializedRenderPipelines<BloomDownsamplingPipeline>>,
pipeline: Res<BloomDownsamplingPipeline>,
mut pipeline: ResMut<BloomDownsamplingPipeline>,
views: Query<(Entity, &Bloom)>,
) {
) -> Result<(), BevyError> {
for (entity, bloom) in &views {
let prefilter = bloom.prefilter.threshold > 0.0;

let pipeline_id = pipelines.specialize(
let pipeline_id = pipeline.specialized_cache.specialize(
&pipeline_cache,
&pipeline,
BloomDownsamplingPipelineKeys {
BloomDownsamplingKey {
prefilter,
first_downsample: false,
uniform_scale: bloom.scale == Vec2::ONE,
},
);
)?;

let pipeline_first_id = pipelines.specialize(
let pipeline_first_id = pipeline.specialized_cache.specialize(
&pipeline_cache,
&pipeline,
BloomDownsamplingPipelineKeys {
BloomDownsamplingKey {
prefilter,
first_downsample: true,
uniform_scale: bloom.scale == Vec2::ONE,
},
);
)?;

commands
.entity(entity)
Expand All @@ -180,4 +184,5 @@ pub fn prepare_downsampling_pipeline(
main: pipeline_id,
});
}
Ok(())
}
2 changes: 0 additions & 2 deletions crates/bevy_core_pipeline/src/bloom/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,6 @@ impl Plugin for BloomPlugin {
return;
};
render_app
.init_resource::<SpecializedRenderPipelines<BloomDownsamplingPipeline>>()
.init_resource::<SpecializedRenderPipelines<BloomUpsamplingPipeline>>()
.add_systems(
Render,
(
Expand Down
Loading
Loading