ray-tracing2/ray-tracing-egui/src/render.rs

145 lines
4.3 KiB
Rust

use std::sync::{
mpsc::{Receiver, Sender},
Arc,
};
use rand::{rngs::SmallRng, SeedableRng};
use ray_tracing_core::{camera::BasicCamera, renderer::ClassicalRenderer, scene::Scene};
use ray_tracing_renderer::{
depth_renderer::DepthRenderer, next_event_estimation::NextEventEstimation,
path_tracer::PathTracer, path_tracer_importance::PathTracerImportance,
};
use ray_tracing_scene::examples::{self, example_scenes, ExampleScene};
use rayon::{
iter::{IndexedParallelIterator, ParallelIterator},
slice::ParallelSliceMut,
};
use vulkano::{
buffer::{Buffer, BufferCreateInfo, BufferUsage, Subbuffer},
memory::allocator::{
AllocationCreateInfo, FreeListAllocator, GenericMemoryAllocator, MemoryAllocator,
MemoryTypeFilter,
},
};
type DynRenderer =
dyn ClassicalRenderer<SmallRng, Box<dyn Scene<SmallRng> + Sync>, BasicCamera> + Sync;
pub const RENDERER: [(&str, fn(u32, u32) -> Box<DynRenderer>); 4] = [
("Depth", |w, h| {
Box::new(DepthRenderer::new(w, h)) as Box<DynRenderer>
}),
("Path tracer", |w, h| {
Box::new(PathTracer::new(w, h)) as Box<DynRenderer>
}),
("Path tracer importance", |w, h| {
Box::new(PathTracerImportance::new(w, h)) as Box<DynRenderer>
}),
("Next event estimation", |w, h| {
Box::new(NextEventEstimation::new(w, h)) as Box<DynRenderer>
}),
];
#[derive(Debug, Clone)]
pub struct RenderSettings {
pub width: u32,
pub height: u32,
pub scene: &'static str,
pub renderer_id: usize,
}
pub enum ControlMessages {
SetSettings(RenderSettings),
}
pub struct Data {
pub width: u32,
pub height: u32,
pub samples: u32,
pub buffer: Subbuffer<[f32]>,
}
pub fn render_thread(
s: RenderSettings,
rx: Receiver<RenderSettings>,
tx: Sender<Data>,
allocator: Arc<impl MemoryAllocator>,
) {
let mut settings = s;
let mut buffer = vec![0.0; settings.width as usize * settings.height as usize * 3];
let example_scenes = examples::example_scenes::<SmallRng>();
let e = example_scenes[settings.scene]();
let mut scene = (e.scene)();
let mut camera = BasicCamera::new(
settings.width,
settings.height,
e.camera_pos,
e.camera_dir,
e.camera_up,
e.horizontal_fov,
);
let mut renderer = (RENDERER[settings.renderer_id].1)(settings.width, settings.height);
let mut samples = 0;
loop {
while let Ok(s) = rx.try_recv() {
println!("Settings changed.");
settings = s;
let e = example_scenes[settings.scene]();
scene = (e.scene)();
camera = BasicCamera::new(
settings.width,
settings.height,
e.camera_pos,
e.camera_dir,
e.camera_up,
e.horizontal_fov,
);
buffer = vec![0.0; settings.width as usize * settings.height as usize * 3];
renderer = (RENDERER[settings.renderer_id].1)(settings.width, settings.height);
samples = 0;
}
buffer.par_chunks_mut(3).enumerate().for_each(|(i, c)| {
let x = (i % settings.width as usize) as u32;
let y = (i / settings.height as usize) as u32;
let mut rng = SmallRng::seed_from_u64(
(x + y * settings.width) as u64
+ (settings.width as u64 * settings.height as u64 * samples as u64),
);
let r = renderer.render_pixel(&scene, &camera, x, y, &mut rng);
c[0] += r.r();
c[1] += r.g();
c[2] += r.b();
});
samples += 1;
let data = Data {
width: settings.width,
height: settings.height,
samples,
buffer: Buffer::from_iter(
allocator.clone(),
BufferCreateInfo {
usage: BufferUsage::STORAGE_BUFFER,
..Default::default()
},
AllocationCreateInfo {
memory_type_filter: MemoryTypeFilter::PREFER_DEVICE
| MemoryTypeFilter::HOST_SEQUENTIAL_WRITE,
..Default::default()
},
buffer.iter().copied(),
)
.unwrap(),
};
let _ = tx.send(data);
}
}