ray-tracing2/ray-tracing-egui/src/render.rs
2025-01-14 16:39:57 +01:00

143 lines
4.4 KiB
Rust

use std::sync::{
mpsc::{Receiver, Sender},
Arc,
};
use rand::{rngs::SmallRng, SeedableRng};
use ray_tracing_core::{
camera::BasicCamera, prelude::*, renderer::ClassicalRenderer, scene::Scene,
};
use ray_tracing_renderer::{
depth_renderer::DepthRenderer, mis::MIS, next_event_estimation::NextEventEstimation,
path_tracer::PathTracer, path_tracer_importance::PathTracerImportance,
};
use ray_tracing_scene::examples;
use rayon::{
iter::{IndexedParallelIterator, ParallelIterator},
slice::ParallelSliceMut,
};
use vulkano::{
buffer::{Buffer, BufferCreateInfo, BufferUsage, Subbuffer},
memory::allocator::{AllocationCreateInfo, MemoryAllocator, MemoryTypeFilter},
};
type DynRenderer =
dyn ClassicalRenderer<SmallRng, Box<dyn Scene<SmallRng> + Sync>, BasicCamera> + Sync;
pub const RENDERER: [(&str, fn(u32, u32) -> Box<DynRenderer>); 5] = [
("Depth", |w, h| {
Box::new(DepthRenderer::new(w, h)) as Box<DynRenderer>
}),
("Path tracer", |w, h| {
Box::new(PathTracer::new(w, h)) as Box<DynRenderer>
}),
("Path tracer importance", |w, h| {
Box::new(PathTracerImportance::new(w, h)) as Box<DynRenderer>
}),
("Next event estimation", |w, h| {
Box::new(NextEventEstimation::new(w, h)) as Box<DynRenderer>
}),
("MIS", |w, h| Box::new(MIS::new(w, h)) as Box<DynRenderer>),
];
#[derive(Debug, Clone)]
pub struct RenderSettings {
pub width: u32,
pub height: u32,
pub scene: &'static str,
pub renderer_id: usize,
pub camera_pos: Pos3,
pub camera_look_at: Pos3,
pub camera_up: Dir3,
pub camera_horizontal_fov: Float,
}
pub struct Data {
pub width: u32,
pub height: u32,
pub samples: u32,
pub buffer: Subbuffer<[f32]>,
}
pub fn render_thread(
s: RenderSettings,
rx: Receiver<RenderSettings>,
tx: Sender<Data>,
allocator: Arc<impl MemoryAllocator>,
) {
let mut settings = s;
let mut buffer = vec![0.0; settings.width as usize * settings.height as usize * 3];
let example_scenes = examples::example_scenes::<SmallRng>();
let mut scene = example_scenes[settings.scene].get_scene();
let mut camera = BasicCamera::from_look_at(
settings.width,
settings.height,
settings.camera_pos,
settings.camera_look_at,
settings.camera_up,
settings.camera_horizontal_fov,
);
let mut renderer = (RENDERER[settings.renderer_id].1)(settings.width, settings.height);
let mut samples = 0;
loop {
while let Ok(s) = rx.try_recv() {
println!("Settings changed.");
settings = s;
scene = example_scenes[settings.scene].get_scene();
camera = BasicCamera::from_look_at(
settings.width,
settings.height,
settings.camera_pos,
settings.camera_look_at,
settings.camera_up,
settings.camera_horizontal_fov,
);
buffer = vec![0.0; settings.width as usize * settings.height as usize * 3];
renderer = (RENDERER[settings.renderer_id].1)(settings.width, settings.height);
samples = 0;
}
buffer.par_chunks_mut(3).enumerate().for_each(|(i, c)| {
let x = (i % settings.width as usize) as u32;
let y = (i / settings.width as usize) as u32;
let mut rng = SmallRng::seed_from_u64(
(x + y * settings.width) as u64
+ (settings.width as u64 * settings.height as u64 * samples as u64),
);
let r = renderer.render_pixel(&scene, &camera, x, y, &mut rng);
c[0] += r.r();
c[1] += r.g();
c[2] += r.b();
});
samples += 1;
let data = Data {
width: settings.width,
height: settings.height,
samples,
buffer: Buffer::from_iter(
allocator.clone(),
BufferCreateInfo {
usage: BufferUsage::STORAGE_BUFFER,
..Default::default()
},
AllocationCreateInfo {
memory_type_filter: MemoryTypeFilter::PREFER_DEVICE
| MemoryTypeFilter::HOST_SEQUENTIAL_WRITE,
..Default::default()
},
buffer.iter().copied(),
)
.unwrap(),
};
let _ = tx.send(data);
}
}