ray-tracing2/ray-tracing-egui/src/main.rs

392 lines
15 KiB
Rust

use egui::Widget;
use egui_winit_vulkano::{Gui, GuiConfig};
use ray_tracing_core::prelude::*;
use ray_tracing_scene::examples::example_scenes;
use render::{render_thread, RENDERER};
use setup::{get_compute_pipeline, get_framebuffers, SetupResult};
use std::{collections::HashSet, sync::Arc};
use vulkano::{
buffer::{Buffer, BufferCreateInfo, BufferUsage},
command_buffer::{allocator::StandardCommandBufferAllocator, AutoCommandBufferBuilder},
descriptor_set::{
allocator::StandardDescriptorSetAllocator, PersistentDescriptorSet, WriteDescriptorSet,
},
memory::allocator::{AllocationCreateInfo, MemoryTypeFilter, StandardMemoryAllocator},
pipeline::Pipeline,
swapchain::{self, SwapchainCreateInfo, SwapchainPresentInfo},
sync::{self, future::FenceSignalFuture, GpuFuture},
Validated, VulkanError,
};
use winit::{
event::{Event, WindowEvent},
event_loop::ControlFlow,
};
mod render;
mod setup;
mod cs {
vulkano_shaders::shader! {
ty: "compute",
src: "
#version 460
layout(local_size_x = 8, local_size_y = 8, local_size_z = 1) in;
layout(set = 0, binding = 0) uniform writeonly image2D outputImage;
layout(set = 0, binding = 1) uniform GPUSettings {
uint samples;
} settings;
layout(set = 0, binding = 2) buffer Data {
float data[];
} buf;
void main() {
uvec2 size = imageSize(outputImage);
uvec2 pos = gl_GlobalInvocationID.xy;
if (pos.x < size.x && pos.y < size.y) {
uint index = (pos.y * size.x + pos.x) * 3;
vec3 color = vec3(buf.data[index], buf.data[index + 1], buf.data[index + 2]);
vec4 to_write = vec4(color / settings.samples, 1.0);
imageStore(outputImage, ivec2(pos), to_write);
}
}
"
}
}
fn main() {
let SetupResult(event_loop, physical_device, device, queue, window, surface) = setup::setup();
let (mut swapchain, images) = setup::create_swapchain(
physical_device.clone(),
device.clone(),
surface.clone(),
window.inner_size(),
);
let mut gui = Gui::new(
&event_loop,
surface.clone(),
queue.clone(),
swapchain.image_format(),
GuiConfig {
is_overlay: true,
..Default::default()
},
);
let memory_allocator = Arc::new(StandardMemoryAllocator::new_default(device.clone()));
let command_buffer_allocator =
StandardCommandBufferAllocator::new(device.clone(), Default::default());
let descriptor_set_allocator =
StandardDescriptorSetAllocator::new(device.clone(), Default::default());
let (control_tx, control_rx) = std::sync::mpsc::channel();
let (data_tx, data_rx) = std::sync::mpsc::channel();
let scenes = example_scenes::<rand::rngs::SmallRng>();
let mut settings = {
let scene = scenes.iter().next().unwrap();
let e = scene.1;
render::RenderSettings {
width: window.inner_size().width,
height: window.inner_size().height,
scene: scene.0,
renderer_id: 0,
camera_pos: e.get_camera_pos(),
camera_look_at: e.get_camera_look_at(),
camera_up: e.get_camera_up(),
camera_horizontal_fov: e.get_horizontal_fov(),
}
};
let send_settings = settings.clone();
let send_memory_allocator = memory_allocator.clone();
std::thread::spawn(move || {
render_thread(send_settings, control_rx, data_tx, send_memory_allocator)
});
let render_pass = setup::get_render_pass(device.clone(), &swapchain);
let mut framebuffers = setup::get_framebuffers(&images, &render_pass);
let mut recreate_swapchain = false;
let mut fences: Vec<Option<Arc<FenceSignalFuture<_>>>> = vec![None; images.len()];
let mut previous_fence_i = 0;
let mut buffer = None;
let cs = cs::load(device.clone()).unwrap();
let mut keypressed = HashSet::new();
let cpu_buffers: Vec<_> = framebuffers
.iter()
.map(|_| {
Buffer::from_data(
memory_allocator.clone(),
BufferCreateInfo {
usage: BufferUsage::UNIFORM_BUFFER,
..Default::default()
},
AllocationCreateInfo {
memory_type_filter: MemoryTypeFilter::PREFER_DEVICE
| MemoryTypeFilter::HOST_SEQUENTIAL_WRITE,
..Default::default()
},
cs::GPUSettings { samples: 1 },
)
.unwrap()
})
.collect();
let mut pipeline = get_compute_pipeline(device.clone(), cs.clone());
event_loop.run(move |event, _, control_flow| match event {
Event::WindowEvent {
window_id: _,
event,
} => {
if !gui.update(&event) {
if let WindowEvent::KeyboardInput { input, .. } = event {
if let Some(virt) = input.virtual_keycode {
match input.state {
winit::event::ElementState::Pressed => keypressed.insert(virt),
winit::event::ElementState::Released => keypressed.remove(&virt),
};
};
};
}
match event {
WindowEvent::Resized(_) => {
recreate_swapchain = true;
window.request_redraw();
settings.width = window.inner_size().width;
settings.height = window.inner_size().height;
let _ = control_tx.send(settings.clone());
}
WindowEvent::CloseRequested => {
*control_flow = ControlFlow::Exit;
}
_ => (),
}
}
Event::MainEventsCleared => {
if recreate_swapchain {
let (new_swapchain, new_images) = swapchain
.recreate(SwapchainCreateInfo {
image_extent: window.inner_size().into(),
..swapchain.create_info()
})
.expect("failed to recreate swapchain: {e}");
swapchain = new_swapchain;
framebuffers = get_framebuffers(&new_images, &render_pass);
pipeline = get_compute_pipeline(device.clone(), cs.clone());
}
let mut settings_changed = false;
if keypressed.contains(&winit::event::VirtualKeyCode::Right) {
let translation = Dir3::cross(
settings.camera_look_at - settings.camera_pos,
settings.camera_up,
)
.normalize();
settings.camera_pos = settings.camera_pos + translation * 0.1;
settings.camera_look_at = settings.camera_look_at + translation * 0.1;
settings_changed = true;
}
if keypressed.contains(&winit::event::VirtualKeyCode::Left) {
let translation = Dir3::cross(
settings.camera_look_at - settings.camera_pos,
settings.camera_up,
)
.normalize();
settings.camera_pos = settings.camera_pos - translation * 0.1;
settings.camera_look_at = settings.camera_look_at - translation * 0.1;
settings_changed = true;
}
gui.immediate_ui(|gui| {
let ctx = gui.context();
egui::Window::new("panel").show(&ctx, |ui| {
ui.collapsing("Scene Settings:", |ui| {
egui::ComboBox::from_label("Scene")
.selected_text(settings.scene)
.show_ui(ui, |ui| {
let mut scene_changed = false;
for &label in scenes.keys() {
scene_changed |= ui
.selectable_value(&mut settings.scene, label, label)
.changed();
}
if scene_changed {
settings_changed = true;
settings.camera_pos = scenes[settings.scene].get_camera_pos();
settings.camera_look_at =
scenes[settings.scene].get_camera_look_at();
settings.camera_up = scenes[settings.scene].get_camera_up();
settings.camera_horizontal_fov =
scenes[settings.scene].get_horizontal_fov();
}
});
});
settings_changed |= egui::ComboBox::from_label("Renderer")
.selected_text(RENDERER[settings.renderer_id])
.show_index(ui, &mut settings.renderer_id, RENDERER.len(), |i| {
RENDERER[i]
})
.changed();
settings_changed |= egui::Slider::new(
&mut settings.camera_horizontal_fov,
0.1..=FloatConsts::PI - 0.1,
)
.ui(ui)
.changed();
ui.collapsing("Render Settings:", |ui| {
ui.label(format!("{:#?}", &settings));
});
});
});
if settings_changed {
let _ = control_tx.send(settings.clone());
}
let (image_i, suboptimal, acquire_future) =
match swapchain::acquire_next_image(swapchain.clone(), None)
.map_err(Validated::unwrap)
{
Ok(r) => r,
Err(e) => panic!("failed to acquire next image: {e}"),
};
let previous_future = match fences[previous_fence_i as usize].clone() {
// Create a NowFuture
None => {
let mut now = sync::now(device.clone());
now.cleanup_finished();
now.boxed()
}
// Use the existing FenceSignalFuture
Some(fence) => fence.boxed(),
};
if suboptimal {
recreate_swapchain = true;
}
if let Some(image_fence) = &fences[image_i as usize] {
image_fence.wait(None).unwrap();
}
while let Ok(b) = data_rx.try_recv() {
buffer = Some(b);
}
if buffer.as_ref().is_some_and(|b| {
b.width != window.inner_size().width || b.height != window.inner_size().height
}) {
buffer = None;
}
let compute_future = match &buffer {
Some(b) => {
match cpu_buffers[image_i as usize].write() {
Ok(mut local) => {
local.samples = b.samples;
}
Err(e) => {
println!("{}", e);
}
}
let mut builder = AutoCommandBufferBuilder::primary(
&command_buffer_allocator,
queue.queue_family_index(),
vulkano::command_buffer::CommandBufferUsage::OneTimeSubmit,
)
.unwrap();
let pipeline_layout = pipeline.layout();
let descriptor_set_layouts = pipeline_layout.set_layouts();
let descriptor_set_layout = descriptor_set_layouts.first().unwrap();
let descriptor_set = PersistentDescriptorSet::new(
&descriptor_set_allocator,
descriptor_set_layout.clone(),
[
WriteDescriptorSet::image_view(
0,
framebuffers[image_i as usize].attachments()[0].clone(),
),
WriteDescriptorSet::buffer(1, cpu_buffers[image_i as usize].clone()),
WriteDescriptorSet::buffer(2, b.buffer.clone()),
],
[],
)
.unwrap();
builder
.bind_pipeline_compute(pipeline.clone())
.unwrap()
.bind_descriptor_sets(
vulkano::pipeline::PipelineBindPoint::Compute,
pipeline_layout.clone(),
0,
descriptor_set,
)
.unwrap()
.dispatch([b.width.div_ceil(8), b.height.div_ceil(8), 1])
.unwrap();
let command = builder.build().unwrap();
acquire_future
.join(previous_future)
.then_execute(queue.clone(), command)
.unwrap()
.then_signal_fence()
.boxed()
}
None => acquire_future.join(previous_future).boxed(),
};
let gui_future = gui.draw_on_image(
compute_future,
framebuffers[image_i as usize].attachments()[0].clone(),
);
let future = gui_future
.then_swapchain_present(
queue.clone(),
SwapchainPresentInfo::swapchain_image_index(swapchain.clone(), image_i),
)
.then_signal_fence_and_flush();
fences[image_i as usize] = match future.map_err(Validated::unwrap) {
#[allow(clippy::arc_with_non_send_sync)]
Ok(value) => Some(Arc::new(value)),
Err(VulkanError::OutOfDate) => {
recreate_swapchain = true;
None
}
Err(e) => {
println!("failed to flush future: {e}");
None
}
};
previous_fence_i = image_i;
}
_ => (),
})
}