diff --git a/ray-tracing-pbrt-scene/src/lib.rs b/ray-tracing-pbrt-scene/src/lib.rs index 8629f2a..f6a3751 100644 --- a/ray-tracing-pbrt-scene/src/lib.rs +++ b/ray-tracing-pbrt-scene/src/lib.rs @@ -1,7 +1,7 @@ use crate::{ scene::PbrtScene, shape::{Shape, ShapeAlpha, ShapeType}, - tokenizer::Tokenizer, + tokenizer::{Token, Tokenizer}, }; use material::PbrtMaterial; use miette::{IntoDiagnostic, Result, bail, miette}; @@ -76,7 +76,7 @@ enum Statement { CoordinateSystem(String), CoordSysTransform(String), Shape(ShapeType, ShapeAlpha), - Unknown(String, Vec), + Unknown(String, Vec), Transform(AffineTransform), Texture(String, Arc), Material(Arc), @@ -96,10 +96,12 @@ fn parse_look_at(iter: &mut Tokenizer) -> Result { } fn parse_shape(iter: &mut Tokenizer) -> Result { - let shape_type = iter.next().ok_or(miette!("unable to get shape type"))??; + let shape_type = iter + .next_if_string_value() + .ok_or(miette!("unable to get shape type"))??; match shape_type.as_str() { - "\"sphere\"" => { + "sphere" => { let t = parse_dict!(iter => radius, Float, 1.0; zmin, Float, {-radius}; @@ -107,12 +109,12 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { phimax, Float, 360.0; alpha, ShapeAlpha, ShapeAlpha::None => - radius, "\"float radius\"", iter.parse_parameter()?; - zmin, "\"float zmin\"", iter.parse_parameter()?; - zmax, "\"float zmax\"", iter.parse_parameter()?; - phimax, "\"float phimax\"", iter.parse_parameter()?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + radius, "float radius", iter.parse_parameter()?; + zmin, "float zmin", iter.parse_parameter()?; + zmax, "float zmax", iter.parse_parameter()?; + phimax, "float phimax", iter.parse_parameter()?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); Ok(Statement::Shape( ShapeType::Sphere { @@ -124,7 +126,7 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { t.alpha, )) } - "\"trianglemesh\"" => { + "trianglemesh" => { let t = parse_dict!(iter => p, Vec, Vec::new(); n, Vec, Vec::new(); @@ -133,13 +135,13 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { indices, Vec, Vec::new(); alpha, ShapeAlpha, ShapeAlpha::None => - p, "\"point3 P\"", iter.parse_list_3(Pos3::new)?; - n, "\"normal N\"", iter.parse_list_3(Dir3::new)?; - s, "\"normal S\"", iter.parse_list_3(Dir3::new)?; - uv, "\"point2 uv\"", iter.parse_list_2(|u, v| [u, v])?; - indices, "\"integer indices\"", iter.parse_list()?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + p, "point3 P", iter.parse_list_3(Pos3::new)?; + n, "normal N", iter.parse_list_3(Dir3::new)?; + s, "normal S", iter.parse_list_3(Dir3::new)?; + uv, "point2 uv", iter.parse_list_2(|u, v| [u, v])?; + indices, "integer indices", iter.parse_list()?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); if t.p.len() < 3 { @@ -178,7 +180,7 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { t.alpha, )) } - "\"bilinearmesh\"" => { + "bilinearmesh" => { let t = parse_dict!(iter => p, Vec, Vec::new(); n, Vec, Vec::new(); @@ -186,12 +188,12 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { indices, Vec, Vec::new(); alpha, ShapeAlpha, ShapeAlpha::None => - p, "\"point3 P\"", iter.parse_list_3(Pos3::new)?; - n, "\"normal N\"", iter.parse_list_3(Dir3::new)?; - uv, "\"point2 uv\"", iter.parse_list_2(|u, v| [u, v])?; - indices, "\"integer indices\"", iter.parse_list()?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + p, "point3 P", iter.parse_list_3(Pos3::new)?; + n, "normal N", iter.parse_list_3(Dir3::new)?; + uv, "point2 uv", iter.parse_list_2(|u, v| [u, v])?; + indices, "integer indices", iter.parse_list()?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); if t.p.len() < 4 { @@ -226,18 +228,18 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { t.alpha, )) } - "\"loopsubdiv\"" => { + "loopsubdiv" => { let t = parse_dict!(iter => levels, u32, 3; indices, Vec, Vec::new(); p, Vec, Vec::new(); alpha, ShapeAlpha, ShapeAlpha::None => - levels, "\"integer levels\"", iter.parse_parameter()?; - indices, "\"integer indices\"", iter.parse_list()?; - p, "\"point3 P\"", iter.parse_list_3(Pos3::new)?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + levels, "integer levels", iter.parse_parameter()?; + indices, "integer indices", iter.parse_list()?; + p, "point3 P", iter.parse_list_3(Pos3::new)?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); if t.indices.is_empty() { @@ -257,7 +259,7 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { t.alpha, )) } - "\"disk\"" => { + "disk" => { let t = parse_dict!(iter => height, Float, 0.0; radius, Float, 1.0; @@ -265,12 +267,12 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { phimax, Float, 360.0; alpha, ShapeAlpha, ShapeAlpha::None => - height, "\"float height\"", iter.parse_parameter()?; - radius, "\"float radius\"", iter.parse_parameter()?; - innerradius, "\"float innerradius\"", iter.parse_parameter()?; - phimax, "\"float phimax\"", iter.parse_parameter()?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + height, "float height", iter.parse_parameter()?; + radius, "float radius", iter.parse_parameter()?; + innerradius, "float innerradius", iter.parse_parameter()?; + phimax, "float phimax", iter.parse_parameter()?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); Ok(Statement::Shape( @@ -283,18 +285,18 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { t.alpha, )) } - "\"plymesh\"" => { + "plymesh" => { let t = parse_dict!(iter => filename, String, String::new(); displacement, Option, None; edgelength, Float, 1.0; alpha, ShapeAlpha, ShapeAlpha::None => - filename, "\"string filename\"", iter.parse_parameter()?; - displacement, "\"string displacement\"", Some(iter.parse_parameter()?); - edgelength, "\"float edgelength\"", iter.parse_parameter()?; - alpha, "\"float alpha\"", ShapeAlpha::Value(iter.parse_parameter()?); - alpha, "\"texture alpha\"", ShapeAlpha::Texture(iter.parse_parameter()?) + filename, "string filename", iter.parse_parameter()?; + displacement, "string displacement", Some(iter.parse_parameter()?); + edgelength, "float edgelength", iter.parse_parameter()?; + alpha, "float alpha", ShapeAlpha::Value(iter.parse_parameter()?); + alpha, "texture alpha", ShapeAlpha::Texture(iter.parse_parameter()?) ); Ok(Statement::Shape( @@ -312,11 +314,11 @@ fn parse_shape(iter: &mut Tokenizer) -> Result { fn parse_camera(tokenizer: &mut Tokenizer) -> Result { let camera_type = tokenizer - .next() + .next_if_string_value() .ok_or(miette!("unable to get shape type"))??; match camera_type.as_str() { - "\"orthographic\"" => { + "orthographic" => { let t = parse_dict!(tokenizer => shutteropen, Float, 0.0; shutterclose, Float, 1.0; @@ -325,12 +327,12 @@ fn parse_camera(tokenizer: &mut Tokenizer) -> Result { lens_radius, Float, 0.0; focal_distance, Float, Float::powi(10.0, 30) => - shutteropen, "\"float shutteropen\"", tokenizer.parse_parameter()?; - shutterclose, "\"float shutterclose\"", tokenizer.parse_parameter()?; - frame_aspect_ratio, "\"float frameaspectratio\"", Some(tokenizer.parse_parameter()?); - screen_window, "\"float screenwindow\"", Some(tokenizer.parse_parameter()?); - lens_radius, "\"float lensradius\"", tokenizer.parse_parameter()?; - focal_distance, "\"float focaldistance\"", tokenizer.parse_parameter()? + shutteropen, "float shutteropen", tokenizer.parse_parameter()?; + shutterclose, "float shutterclose", tokenizer.parse_parameter()?; + frame_aspect_ratio, "float frameaspectratio", Some(tokenizer.parse_parameter()?); + screen_window, "float screenwindow", Some(tokenizer.parse_parameter()?); + lens_radius, "float lensradius", tokenizer.parse_parameter()?; + focal_distance, "float focaldistance", tokenizer.parse_parameter()? ); Ok(Statement::Camera(PbrtCamera { @@ -344,7 +346,7 @@ fn parse_camera(tokenizer: &mut Tokenizer) -> Result { shutter_close: t.shutterclose, })) } - "\"perspective\"" => { + "perspective" => { let t = parse_dict!(tokenizer => shutteropen, Float, 0.0; shutterclose, Float, 1.0; @@ -354,13 +356,13 @@ fn parse_camera(tokenizer: &mut Tokenizer) -> Result { focal_distance, Float, Float::powi(10.0, 30); fov, Float, 90.0 => - shutteropen, "\"float shutteropen\"", tokenizer.parse_parameter()?; - shutterclose, "\"float shutterclose\"", tokenizer.parse_parameter()?; - frame_aspect_ratio, "\"float frameaspectratio\"", Some(tokenizer.parse_parameter()?); - screen_window, "\"float screenwindow\"", Some(tokenizer.parse_parameter()?); - lens_radius, "\"float lensradius\"", tokenizer.parse_parameter()?; - focal_distance, "\"float focaldistance\"", tokenizer.parse_parameter()?; - fov, "\"float fov\"", tokenizer.parse_parameter()? + shutteropen, "float shutteropen", tokenizer.parse_parameter()?; + shutterclose, "float shutterclose", tokenizer.parse_parameter()?; + frame_aspect_ratio, "float frameaspectratio", Some(tokenizer.parse_parameter()?); + screen_window, "float screenwindow", Some(tokenizer.parse_parameter()?); + lens_radius, "float lensradius", tokenizer.parse_parameter()?; + focal_distance, "float focaldistance", tokenizer.parse_parameter()?; + fov, "float fov", tokenizer.parse_parameter()? ); Ok(Statement::Camera(PbrtCamera { @@ -382,16 +384,15 @@ fn parse_camera(tokenizer: &mut Tokenizer) -> Result { impl Lexer { fn next(&mut self, context: &PbrtContext) -> Option> { match self.input.next() { - Some(Ok(s)) => match s.as_str() { + Some(Ok(Token::Identifier(s))) => match s.as_str() { "AttributeBegin" => Some(Ok(Statement::AttributeBegin)), "AttributeEnd" => Some(Ok(Statement::AttributeEnd)), "Include" => { let s = self .input - .next() + .next_if_string_value() .unwrap() .unwrap() - .trim_matches('"') .to_string(); Some(Ok(Statement::Include(s))) @@ -434,9 +435,8 @@ impl Lexer { } else { let mut v = Vec::new(); - while let Some(p) = self - .input - .next_if(|s| !s.starts_with(|c: char| c.is_ascii_alphabetic())) + while let Some(p) = + self.input.next_if(|s| !matches!(s, Token::Identifier(_))) { match p { Ok(c) => v.push(c), @@ -448,6 +448,10 @@ impl Lexer { } } }, + Some(Ok(s)) => Some(Err(miette!( + labels = vec![self.input.last_span_labeled(Some("here"))], + "expected identifier got {s:?}" + ))), Some(Err(e)) => Some(Err(e)), None => None, } @@ -455,29 +459,14 @@ impl Lexer { } fn parse_transform(input: &mut Tokenizer) -> Result { - if !input - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) - { - bail!("expected list.") - } - + input.next_expect_bracket_open()?; let mut v = [0.0; 16]; for i in &mut v { - *i = input - .next() - .ok_or(miette!("value expected"))?? - .parse::() - .into_diagnostic()?; + *i = input.parse_next()?; } - if !input - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) - { - bail!("expected list end.") - } + input.next_expect_bracket_close()?; if v[3] != 0.0 || v[7] != 0.0 || v[11] != 0.0 || v[15] != 1.0 { bail!("invalid transform entry") @@ -509,24 +498,11 @@ fn parse_scale(iter: &mut Tokenizer) -> Result { } fn parse_rotate(iter: &mut Tokenizer) -> Result { - let angle = iter - .next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?; + let angle = iter.parse_parameter()?; let dir = Dir3::new( - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, + iter.parse_parameter()?, + iter.parse_parameter()?, + iter.parse_parameter()?, ); Ok(Statement::ConcatTransform(AffineTransform::rotation( @@ -721,7 +697,7 @@ fn inner_parse_pbrt(path: impl AsRef + std::fmt::Debug) -> Result { return Err(miette!("The camera can only be set once.")); } camera = Some((c, context.get_ctm())); - named_transforms.insert(String::from("\"camera\""), context.get_ctm()); + named_transforms.insert(String::from("camera"), context.get_ctm()); } Statement::CoordinateSystem(s) => { named_transforms.insert(s, context.get_ctm()); diff --git a/ray-tracing-pbrt-scene/src/material.rs b/ray-tracing-pbrt-scene/src/material.rs index 933993d..dfc4649 100644 --- a/ray-tracing-pbrt-scene/src/material.rs +++ b/ray-tracing-pbrt-scene/src/material.rs @@ -13,9 +13,9 @@ pub fn parse_make_named_material( input: &mut Tokenizer, context: &PbrtContext, ) -> Result<(String, Arc)> { - let name = input.parse_next()?; + let name = input.next_string_value()?; - if input.parse_next::()?.as_str() != "\"string type\"" { + if input.next_string_value()?.as_str() != "string type" { return Err(miette!( "first element of make named material dict has to be type" )); @@ -71,7 +71,7 @@ fn parse_2d_float_texture( input: &mut Tokenizer, context: &PbrtContext, ) -> Result> { - let n = input.parse_parameter()?; + let n = input.next_string_value()?; context .get_texture(&n) @@ -83,7 +83,7 @@ fn parse_2d_spectrum_texture( input: &mut Tokenizer, context: &PbrtContext, ) -> Result> { - let n = input.parse_parameter()?; + let n = input.next_string_value()?; context .get_texture(&n) @@ -95,51 +95,51 @@ pub fn parse_material( input: &mut Tokenizer, context: &PbrtContext, ) -> Result> { - let material: String = input.parse_parameter()?; + let material: String = input.next_string_value()?; match material.as_str() { - "\"diffuse\"" => Ok(Arc::new(parse_dict2!(input, PbrtDiffuseMaterial; + "diffuse" => Ok(Arc::new(parse_dict2!(input, PbrtDiffuseMaterial; reflectance, Either::A(Color::black()), [ - "\"rgb reflectance\"", Either::A(texture::parse_rgb(input)?); - "\"spectrum reflectance\"", Either::A(texture::parse_spectrum(input)?); - "\"texture reflectance\"", Either::B(parse_2d_spectrum_texture(input, context)?) + "rgb reflectance", Either::A(texture::parse_rgb(input)?); + "spectrum reflectance", Either::A(texture::parse_spectrum(input)?); + "texture reflectance", Either::B(parse_2d_spectrum_texture(input, context)?) ] ))), - "\"coateddiffuse\"" => Ok(Arc::new(parse_dict2!(input, PbrtCoatedDiffuseMaterial; + "coateddiffuse" => Ok(Arc::new(parse_dict2!(input, PbrtCoatedDiffuseMaterial; albedo, Either::A(Color::black()), [ - "\"rgb albedo\"", Either::A(texture::parse_rgb(input)?); - "\"spectrum albedo\"", Either::A(texture::parse_spectrum(input)?); - "\"texture albedo\"", Either::B(parse_2d_spectrum_texture(input, context)?) + "rgb albedo", Either::A(texture::parse_rgb(input)?); + "spectrum albedo", Either::A(texture::parse_spectrum(input)?); + "texture albedo", Either::B(parse_2d_spectrum_texture(input, context)?) ]; g, Either::A(0.0), [ - "\"float g\"", Either::A(input.parse_parameter()?); - "\"texture g\"", Either::B(parse_2d_float_texture(input, context)?) + "float g", Either::A(input.parse_parameter()?); + "texture g", Either::B(parse_2d_float_texture(input, context)?) ]; - maxdepth, 10, ["\"integer maxdepth\"", input.parse_parameter()?]; - nsamples, 1, ["\"integer nsamples\"", input.parse_parameter()?]; - thickness, 0.01, ["\"float thickness\"", input.parse_parameter()?]; + maxdepth, 10, ["integer maxdepth", input.parse_parameter()?]; + nsamples, 1, ["integer nsamples", input.parse_parameter()?]; + thickness, 0.01, ["float thickness", input.parse_parameter()?]; roughness, Either::A(0.0), [ - "\"float roughness\"", Either::A(input.parse_parameter()?); - "\"texture roughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float roughness", Either::A(input.parse_parameter()?); + "texture roughness", Either::B(parse_2d_float_texture(input, context)?) ]; uroughness, Either::A(0.0), [ - "\"float uroughness\"", Either::A(input.parse_parameter()?); - "\"texture uroughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float uroughness", Either::A(input.parse_parameter()?); + "texture uroughness", Either::B(parse_2d_float_texture(input, context)?) ]; vroughness, Either::A(0.0), [ - "\"float vroughness\"", Either::A(input.parse_parameter()?); - "\"texture vroughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float vroughness", Either::A(input.parse_parameter()?); + "texture vroughness", Either::B(parse_2d_float_texture(input, context)?) ]; reflectance, Either::A(Color::black()), [ - "\"rgb reflectance\"", Either::A(texture::parse_rgb(input)?); - "\"spectrum reflectance\"", Either::A(texture::parse_spectrum(input)?); - "\"texture reflectance\"", Either::B(parse_2d_spectrum_texture(input, context)?) + "rgb reflectance", Either::A(texture::parse_rgb(input)?); + "spectrum reflectance", Either::A(texture::parse_spectrum(input)?); + "texture reflectance", Either::B(parse_2d_spectrum_texture(input, context)?) ]; eta, Either::A(Either::A(1.5)), [ - "\"float eta\"", Either::A(Either::A(input.parse_parameter()?)); - "\"rgb eta\"", Either::B(Either::A(parse_rgb(input)?)); - "\"spectrum eta\"", Either::B(Either::A(texture::parse_spectrum(input)?)); - "\"texture eta\"", { + "float eta", Either::A(Either::A(input.parse_parameter()?)); + "rgb eta", Either::B(Either::A(parse_rgb(input)?)); + "spectrum eta", Either::B(Either::A(texture::parse_spectrum(input)?)); + "texture eta", { let n = input.parse_parameter()?; let t = context.get_texture(&n).ok_or_else(|| miette!("Unknown texture"))?; match Arc::clone(t).get_2d_spectrum_texture() { @@ -149,12 +149,12 @@ pub fn parse_material( } ] ))), - "\"dielectric\"" => Ok(Arc::new(parse_dict2!(input, PbrtDielectricMaterial; + "dielectric" => Ok(Arc::new(parse_dict2!(input, PbrtDielectricMaterial; eta, Either::A(Either::A(1.5)), [ - "\"float eta\"", Either::A(Either::A(input.parse_parameter()?)); - "\"rgb eta\"", Either::B(Either::A(parse_rgb(input)?)); - "\"spectrum eta\"", Either::B(Either::A(texture::parse_spectrum(input)?)); - "\"texture eta\"", { + "float eta", Either::A(Either::A(input.parse_parameter()?)); + "rgb eta", Either::B(Either::A(parse_rgb(input)?)); + "spectrum eta", Either::B(Either::A(texture::parse_spectrum(input)?)); + "texture eta", { let n = input.parse_parameter()?; let t = context.get_texture(&n).ok_or_else(|| miette!("Unknown texture"))?; match Arc::clone(t).get_2d_spectrum_texture() { @@ -164,16 +164,16 @@ pub fn parse_material( } ]; roughness, Either::A(0.0), [ - "\"float roughness\"", Either::A(input.parse_parameter()?); - "\"texture roughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float roughness", Either::A(input.parse_parameter()?); + "texture roughness", Either::B(parse_2d_float_texture(input, context)?) ]; uroughness, Either::A(0.0), [ - "\"float uroughness\"", Either::A(input.parse_parameter()?); - "\"texture uroughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float uroughness", Either::A(input.parse_parameter()?); + "texture uroughness", Either::B(parse_2d_float_texture(input, context)?) ]; vroughness, Either::A(0.0), [ - "\"float vroughness\"", Either::A(input.parse_parameter()?); - "\"texture vroughness\"", Either::B(parse_2d_float_texture(input, context)?) + "float vroughness", Either::A(input.parse_parameter()?); + "texture vroughness", Either::B(parse_2d_float_texture(input, context)?) ] ))), _ => Err(miette!("Unknown material {material}")), diff --git a/ray-tracing-pbrt-scene/src/texture.rs b/ray-tracing-pbrt-scene/src/texture.rs index ef4afd8..8549f6d 100644 --- a/ray-tracing-pbrt-scene/src/texture.rs +++ b/ray-tracing-pbrt-scene/src/texture.rs @@ -48,8 +48,8 @@ enum TextureMapping { impl TextureMapping { fn new(x: String) -> Result { match x.as_str() { - "\"uv\"" => Ok(TextureMapping::UV), - "\"spherical\"" => Ok(TextureMapping::Spherical), + "uv" => Ok(TextureMapping::UV), + "spherical" => Ok(TextureMapping::Spherical), _ => Err(miette!("Error")), } } @@ -69,10 +69,10 @@ fn parse_float_texture( input: &mut Tokenizer, context: &PbrtContext, ) -> Result> { - let texture_class: String = input.parse_next()?; + let texture_class = input.next_string_value()?; match texture_class.as_str() { - "\"checkerboard\"" => { + "checkerboard" => { let t = parse_dict!(input => mapping, TextureMapping, TextureMapping::UV; uscale, Float, 1.0; @@ -83,16 +83,16 @@ fn parse_float_texture( tex1, Either, Either::A(1.0); tex2, Either, Either::A(0.0) => - mapping, "\"string mapping\"", TextureMapping::new(input.parse_parameter()?)?; - uscale, "\"float uscale\"", input.parse_parameter()?; - vscale, "\"float vscale\"", input.parse_parameter()?; - udelta, "\"float udelta\"", input.parse_parameter()?; - vdelta, "\"float vdelta\"", input.parse_parameter()?; - dimension, "\"integer dimension\"", TextureDimension::new(input.parse_parameter()?)?; - tex1, "\"float tex1\"", Either::A(input.parse_parameter()?); - tex1, "\"texture tex1\"", Either::B(input.parse_parameter()?); - tex2, "\"float tex2\"", Either::A(input.parse_parameter()?); - tex2, "\"texture tex2\"", Either::B(input.parse_parameter()?) + mapping, "string mapping", TextureMapping::new(input.next_string_value()?)?; + uscale, "float uscale", input.parse_parameter()?; + vscale, "float vscale", input.parse_parameter()?; + udelta, "float udelta", input.parse_parameter()?; + vdelta, "float vdelta", input.parse_parameter()?; + dimension, "integer dimension", TextureDimension::new(input.parse_parameter()?)?; + tex1, "float tex1", Either::A(input.parse_parameter()?); + tex1, "texture tex1", Either::B(input.next_string_value()?); + tex2, "float tex2", Either::A(input.parse_parameter()?); + tex2, "texture tex2", Either::B(input.next_string_value()?) ); @@ -130,7 +130,11 @@ fn parse_float_texture( } } - _ => Err(miette!("unknown error {texture_class}")), + _ => Err(miette!( + labels = vec![input.last_span_labeled(Some("here"))], + "unknown error {texture_class}" + ) + .with_source_code(input.get_src())), } } @@ -138,9 +142,9 @@ fn parse_spectrum_texture( input: &mut Tokenizer, context: &PbrtContext, ) -> Result> { - let texture_class: String = input.parse_next()?; + let texture_class = input.next_string_value()?; match texture_class.as_str() { - "\"checkerboard\"" => { + "checkerboard" => { let t = parse_dict!(input => mapping, TextureMapping, TextureMapping::UV; uscale, Float, 1.0; @@ -151,18 +155,18 @@ fn parse_spectrum_texture( tex1, Either, Either::A(Color::white()); tex2, Either, Either::A(Color::black()) => - mapping, "\"string mapping\"", TextureMapping::new(input.parse_parameter()?)?; - uscale, "\"float uscale\"", input.parse_parameter()?; - vscale, "\"float vscale\"", input.parse_parameter()?; - udelta, "\"float udelta\"", input.parse_parameter()?; - vdelta, "\"float vdelta\"", input.parse_parameter()?; - dimension, "\"integer dimension\"", TextureDimension::new(input.parse_parameter()?)?; - tex1, "\"rgb tex1\"", Either::A(parse_rgb(input)?); - tex1, "\"spectrum tex1\"", Either::A(parse_spectrum(input)?); - tex1, "\"texture tex1\"", Either::B(input.parse_parameter()?); - tex2, "\"rgb tex2\"", Either::A(parse_rgb(input)?); - tex2, "\"spectrum tex2\"", Either::A(parse_spectrum(input)?); - tex2, "\"texture tex2\"", Either::B(input.parse_parameter()?) + mapping, "string mapping", TextureMapping::new(input.next_string_value()?)?; + uscale, "float uscale", input.parse_parameter()?; + vscale, "float vscale", input.parse_parameter()?; + udelta, "float udelta", input.parse_parameter()?; + vdelta, "float vdelta", input.parse_parameter()?; + dimension, "integer dimension", TextureDimension::new(input.parse_parameter()?)?; + tex1, "rgb tex1", Either::A(parse_rgb(input)?); + tex1, "spectrum tex1", Either::A(parse_spectrum(input)?); + tex1, "texture tex1", Either::B(input.next_string_value()?); + tex2, "rgb tex2", Either::A(parse_rgb(input)?); + tex2, "spectrum tex2", Either::A(parse_spectrum(input)?); + tex2, "texture tex2", Either::B(input.next_string_value()?) ); @@ -201,7 +205,7 @@ fn parse_spectrum_texture( } } } - "\"imagemap\"" => { + "imagemap" => { let t = parse_dict!(input => mapping, TextureMapping, TextureMapping::UV; uscale, Float, 1.0; @@ -213,22 +217,22 @@ fn parse_spectrum_texture( encoding, ImageMapEncoding, ImageMapEncoding::SRGB; scale, Float, 1.0 => - mapping, "\"string mapping\"", TextureMapping::new(input.parse_parameter()?)?; - uscale, "\"float uscale\"", input.parse_parameter()?; - vscale, "\"float vscale\"", input.parse_parameter()?; - udelta, "\"float udelta\"", input.parse_parameter()?; - vdelta, "\"float vdelta\"", input.parse_parameter()?; - filename, "\"string filename\"", input.parse_parameter()?; - wrap, "\"string wrap\"", ImageMapWrap::new(input.parse_parameter()?)?; - encoding, "\"string encoding\"", ImageMapEncoding::new(input.parse_parameter()?)?; - scale, "\"float scale\"", input.parse_parameter()? + mapping, "string mapping", TextureMapping::new(input.next_string_value()?)?; + uscale, "float uscale", input.parse_parameter()?; + vscale, "float vscale", input.parse_parameter()?; + udelta, "float udelta", input.parse_parameter()?; + vdelta, "float vdelta", input.parse_parameter()?; + filename, "string filename", input.next_string_value()?; + wrap, "string wrap", ImageMapWrap::new(input.next_string_value()?)?; + encoding, "string encoding", ImageMapEncoding::new(input.next_string_value()?)?; + scale, "float scale", input.parse_parameter()? ); let mapping = UVMapping::new(t.mapping, t.uscale, t.vscale, t.udelta, t.vdelta); - let path = dbg!(input.get_base_path()).join(t.filename.trim_matches('\"')); + let path = dbg!(input.get_base_path()).join(t.filename); Ok(Arc::new(SpectrumImageMapTexture::new( mapping, @@ -239,16 +243,16 @@ fn parse_spectrum_texture( dbg!(path), )?)) } - "\"scale\"" => { + "scale" => { let t = parse_dict!(input => tex, Either, Either::A(Color::white()); scale, Either, Either::A(1.0) => - tex, "\"rgb tex\"", Either::A(parse_rgb(input)?); - tex, "\"spectrum tex\"", Either::A(parse_spectrum(input)?); - tex, "\"texture tex\"", Either::B(input.parse_parameter()?); - scale, "\"float scale\"", Either::A(input.parse_parameter()?); - scale, "\"texture scale\"", Either::B(input.parse_parameter()?) + tex, "rgb tex", Either::A(parse_rgb(input)?); + tex, "spectrum tex", Either::A(parse_spectrum(input)?); + tex, "texture tex", Either::B(input.next_string_value()?); + scale, "float scale", Either::A(input.parse_parameter()?); + scale, "texture scale", Either::B(input.next_string_value()?) ); Ok(Arc::new(SpectrumScaleTexture2d { @@ -284,12 +288,12 @@ pub fn parse_texture( input: &mut Tokenizer, context: &PbrtContext, ) -> Result<(String, Arc)> { - let texture_name: String = input.parse_next()?; - let texture_type: String = input.parse_next()?; + let texture_name: String = input.next_string_value()?; + let texture_type: String = input.next_string_value()?; match texture_type.as_str() { - "\"spectrum\"" => parse_spectrum_texture(input, context).map(|t| (texture_name, t)), - "\"float\"" => parse_float_texture(input, context).map(|t| (texture_name, t)), + "spectrum" => parse_spectrum_texture(input, context).map(|t| (texture_name, t)), + "float" => parse_float_texture(input, context).map(|t| (texture_name, t)), _ => Err(miette!("Texture type has to be spectrum or float")), } } @@ -303,6 +307,18 @@ struct UVMapping { mapping: TextureMapping, } +impl std::default::Default for UVMapping { + fn default() -> Self { + Self { + uscale: 1.0, + vscale: 1.0, + udelta: 0.0, + vdelta: 0.0, + mapping: TextureMapping::UV, + } + } +} + impl UVMapping { fn new( mapping: TextureMapping, diff --git a/ray-tracing-pbrt-scene/src/texture/imagemap.rs b/ray-tracing-pbrt-scene/src/texture/imagemap.rs index a892179..1e40957 100644 --- a/ray-tracing-pbrt-scene/src/texture/imagemap.rs +++ b/ray-tracing-pbrt-scene/src/texture/imagemap.rs @@ -14,9 +14,9 @@ pub(super) enum ImageMapWrap { impl ImageMapWrap { pub(super) fn new(x: String) -> Result { match x.as_str() { - "\"repeat\"" => Ok(Self::Repeat), - "\"black\"" => Ok(Self::Black), - "\"clamp\"" => Ok(Self::Clamp), + "repeat" => Ok(Self::Repeat), + "black" => Ok(Self::Black), + "clamp" => Ok(Self::Clamp), _ => Err(miette!("error image map wrap")), } } @@ -33,15 +33,11 @@ pub(super) enum ImageMapEncoding { impl ImageMapEncoding { pub(super) fn new(x: String) -> Result { match x.as_str() { - "\"sRGB\"" => Ok(Self::SRGB), - "\"linear\"" => Ok(Self::Linear), - s if s.starts_with("\"gamma ") => Ok(Self::Gamma( - s.split_at(7) - .1 - .trim_matches('\"') - .parse() - .into_diagnostic()?, - )), + "sRGB" => Ok(Self::SRGB), + "linear" => Ok(Self::Linear), + s if s.starts_with("gamma ") => { + Ok(Self::Gamma(s.split_at(7).1.parse().into_diagnostic()?)) + } _ => Err(miette!("error image map encoding")), } } diff --git a/ray-tracing-pbrt-scene/src/tokenizer.rs b/ray-tracing-pbrt-scene/src/tokenizer.rs index 7fa1403..9b75ac9 100644 --- a/ray-tracing-pbrt-scene/src/tokenizer.rs +++ b/ray-tracing-pbrt-scene/src/tokenizer.rs @@ -1,5 +1,5 @@ use crate::{BytesToChar, error::SourceFile}; -use miette::{Diagnostic, IntoDiagnostic, Report, Result, SourceSpan}; +use miette::{Diagnostic, IntoDiagnostic, LabeledSpan, Report, Result, SourceSpan, miette}; use std::{ fs::File, io::{BufReader, Bytes, Read}, @@ -12,7 +12,7 @@ pub struct Tokenizer { inner: InnerTokenizer, path: PathBuf, base_path: PathBuf, - peeked: Option>>, + peeked: Option>>, last_span: SourceSpan, } @@ -47,8 +47,17 @@ impl InnerTokenizer { } } +#[derive(Debug)] +pub enum Token { + Identifier(String), + BracketOpen, + BracketClose, + Value(String), + StringValue(String), +} + impl InnerTokenizer { - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option> { while self.input_iterator.peek().is_some_and(|c| { c.as_ref() .is_ok_and(|&(_, c)| c.is_whitespace() || c == '#') @@ -72,10 +81,10 @@ impl InnerTokenizer { } match self.input_iterator.next() { - Some(Ok((i, '['))) => Some(Ok((String::from('['), SourceSpan::new(i.into(), 1)))), - Some(Ok((i, ']'))) => Some(Ok((String::from(']'), SourceSpan::new(i.into(), 1)))), + Some(Ok((i, '['))) => Some(Ok((Token::BracketOpen, SourceSpan::new(i.into(), 1)))), + Some(Ok((i, ']'))) => Some(Ok((Token::BracketClose, SourceSpan::new(i.into(), 1)))), Some(Ok((i, '"'))) => { - let mut r = String::from('"'); + let mut r = String::new(); while let Some(p) = self .input_iterator .next_if(|c| c.as_ref().is_ok_and(|&(_, c)| c != '"')) @@ -97,9 +106,11 @@ impl InnerTokenizer { bad_bit: SourceSpan::new(i.into(), r.len()), }))); }; - r.push('"'); let len = r.len(); - Some(Ok((r, SourceSpan::new(i.into(), len)))) + Some(Ok(( + Token::StringValue(r), + SourceSpan::new(i.into(), len + 2), + ))) } Some(Ok((i, c))) => { let mut r = String::new(); @@ -114,7 +125,12 @@ impl InnerTokenizer { } } let len = r.len(); - Some(Ok((r, SourceSpan::new(i.into(), len)))) + if r.starts_with(|c: char| c.is_ascii_alphabetic()) && (r != "true" || r != "false") + { + Some(Ok((Token::Identifier(r), SourceSpan::new(i.into(), len)))) + } else { + Some(Ok((Token::Value(r), SourceSpan::new(i.into(), len)))) + } } Some(Err(e)) => Some(Err(e)), None => None, @@ -123,14 +139,14 @@ impl InnerTokenizer { } impl Tokenizer { - pub fn next_inner(&mut self) -> Option> { + pub fn next_inner(&mut self) -> Option> { match self.peeked.take() { Some(v) => v, None => self.inner.next(), } } - pub fn next(&mut self) -> Option> { + pub fn next(&mut self) -> Option> { let v = self.next_inner(); if let Some(Ok((_, s))) = v { self.last_span = s; @@ -138,9 +154,66 @@ impl Tokenizer { v.map(|o| o.map(|i| i.0)) } - pub fn next_if(&mut self, func: impl FnOnce(&String) -> bool) -> Option> { + pub fn next_if(&mut self, f: impl FnOnce(&Token) -> bool) -> Option> { match self.next_inner() { - Some(Ok((matched, s))) if func(&matched) => { + Some(Ok((matched, s))) if f(&matched) => { + self.last_span = s; + Some(Ok(matched)) + } + other => { + assert!(self.peeked.is_none()); + self.peeked = Some(other); + None + } + } + } + + pub fn next_if_value(&mut self) -> Option> { + match self.next_inner() { + Some(Ok((Token::Value(matched), s))) => { + self.last_span = s; + Some(Ok(matched)) + } + other => { + assert!(self.peeked.is_none()); + self.peeked = Some(other); + None + } + } + } + + pub fn next_string_value(&mut self) -> Result { + self.next_if_string_value().ok_or_else(|| { + miette!( + labels = vec![self.last_span_labeled(Some("here"))], + "expected a string value" + ) + .with_source_code(self.get_src()) + })? + } + + pub fn next_if_string_value(&mut self) -> Option> { + match self.next_inner() { + Some(Ok((Token::BracketOpen, s1))) => match self.next_inner() { + Some(Ok((Token::StringValue(s), s2))) => { + if let Err(e) = self.next_expect_bracket_close() { + Some(Err(e)) + } else { + self.last_span = SourceSpan::new( + s1.offset().into(), + s2.offset() - s1.offset() + s2.len(), + ); + Some(Ok(s)) + } + } + Some(Ok((t, s2))) => Some(Err(miette!( + labels = vec![LabeledSpan::new_with_span(Some("here".to_string()), s2)], + "expected string value got {t:?}" + ))), + Some(Err(e)) => Some(Err(e)), + None => None, + }, + Some(Ok((Token::StringValue(matched), s))) => { self.last_span = s; Some(Ok(matched)) } @@ -158,32 +231,79 @@ impl Tokenizer { ::Err: std::error::Error + std::marker::Send + std::marker::Sync + 'static, { - let s = self.next().ok_or_else(|| MissingValueError { + if let Token::Value(s) = self.next().ok_or_else(|| MissingValueError { src: SourceFile { path: self.path.to_path_buf(), }, bad_bit: self.last_span(), - })??; - - s.parse::().into_diagnostic().map_err(|e| { - ParsingError { - src: SourceFile { - path: self.path.clone(), - }, - bad_bit: self.last_span, - error: Some(e), - } - .into() - }) + })?? { + s.parse::().into_diagnostic().map_err(|e| { + ParsingError { + src: SourceFile { + path: self.path.clone(), + }, + bad_bit: self.last_span, + error: Some(e), + } + .into() + }) + } else { + Err(miette!( + labels = vec![self.last_span_labeled(Some("here"))], + "expected a value" + ) + .with_source_code(self.get_src())) + } } - pub fn parse_next_if(&mut self, func: impl FnOnce(&String) -> bool) -> Option> + pub fn next_expect_bracket_open(&mut self) -> Result<()> { + let p = self.next().ok_or_else(|| { + miette!( + labels = vec![self.last_span_labeled(Some("here"))], + "expected opening brackets" + ) + .with_source_code(self.get_src()) + })??; + + match &p { + Token::BracketOpen => Ok(()), + s => Err(miette!( + labels = vec![self.last_span_labeled(Some("here"))], + "expected opening brackets got {s:?}" + ) + .with_source_code(self.get_src())), + } + } + + pub fn next_expect_bracket_close(&mut self) -> Result<()> { + let p = self.next().ok_or_else(|| { + miette!( + labels = vec![LabeledSpan::new_with_span( + Some("here".to_string()), + self.last_span() + )], + "expected closing brackets" + ) + .with_source_code(self.get_src()) + })??; + + match &p { + Token::BracketClose => Ok(()), + s => Err(miette!( + labels = vec![self.last_span_labeled(Some("here"))], + "expected closing brackets got {s:?}" + ) + .with_source_code(self.get_src())), + } + } + + pub fn parse_next_if_value(&mut self) -> Option> where T: std::str::FromStr, ::Err: std::error::Error + std::marker::Send + std::marker::Sync + 'static, { - let s = self.next_if(func)?; + let s = self.next_if_value()?; Some(match s { Ok(s) => s.parse::().into_diagnostic().map_err(|e| { @@ -204,6 +324,10 @@ impl Tokenizer { self.last_span } + pub fn last_span_labeled(&self, label: Option) -> LabeledSpan { + LabeledSpan::new_with_span(label.map(|s| s.to_string()), self.last_span()) + } + pub fn parse_parameter(&mut self) -> Result where T: std::str::FromStr, @@ -217,25 +341,23 @@ impl Tokenizer { bad_bit: self.last_span(), })??; - match p.as_str() { - "[" => { + match p { + Token::BracketOpen => { let d = self.parse_next()?; - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) - { - return Err(ListEndError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } + + self.next_expect_bracket_close()?; Ok(d) } - s => Ok(s.parse::().into_diagnostic()?), + Token::Value(s) => Ok(s.parse::().into_diagnostic()?), + _ => Err(miette!( + labels = vec![LabeledSpan::new_with_span( + Some("here".to_string()), + self.last_span() + )], + "expected value or [ got {p:?}" + ) + .with_source_code(self.get_src())), } } @@ -246,35 +368,14 @@ impl Tokenizer { std::marker::Send + std::marker::Sync + std::error::Error + 'static, { let mut data = Vec::new(); - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) - { - return Err(ListBeginError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } - while let Some(p) = self.parse_next_if(|p| p != "]").transpose()? { + self.next_expect_bracket_open()?; + + while let Some(p) = self.parse_next_if_value().transpose()? { data.push(p); } - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) - { - return Err(ListEndError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } + self.next_expect_bracket_close()?; Ok(data) } @@ -287,21 +388,11 @@ impl Tokenizer { F: Fn(T, T) -> P, { let mut data = Vec::new(); - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) - { - return Err(ListBeginError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } - while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? { - if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? { + self.next_expect_bracket_open()?; + + while let Some(pa) = self.parse_next_if_value().transpose()? { + if let Some(pb) = self.parse_next_if_value().transpose()? { data.push(f(pa, pb)); } else { return Err(UncompleteError { @@ -314,18 +405,7 @@ impl Tokenizer { } } - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) - { - return Err(ListEndError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } + self.next_expect_bracket_close()?; Ok(data) } @@ -338,22 +418,12 @@ impl Tokenizer { F: Fn(T, T, T) -> P, { let mut data = Vec::new(); - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) - { - return Err(ListBeginError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } - while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? { - if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? { - if let Some(pc) = self.parse_next_if(|p| p != "]").transpose()? { + self.next_expect_bracket_open()?; + + while let Some(pa) = self.parse_next_if_value().transpose()? { + if let Some(pb) = self.parse_next_if_value().transpose()? { + if let Some(pc) = self.parse_next_if_value().transpose()? { data.push(f(pa, pb, pc)); } else { return Err(UncompleteError { @@ -375,18 +445,7 @@ impl Tokenizer { } } - if !self - .next() - .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) - { - return Err(ListEndError { - src: SourceFile { - path: self.path.to_path_buf(), - }, - bad_bit: self.last_span(), - } - .into()); - } + self.next_expect_bracket_close()?; Ok(data) } @@ -406,7 +465,7 @@ macro_rules! parse_dict { let mut $name_decl = None; )+ - while let Some(p) = $tokenizer.next_if(|p| p.starts_with('"')).transpose()? { + while let Some(p) = $tokenizer.next_if_string_value().transpose()? { match p.as_str() { $( $expr => { @@ -449,7 +508,7 @@ macro_rules! parse_dict2 { let mut $name = None; )+ - while let Some(p) = $tokenizer.next_if(|p| p.starts_with('"')).transpose()? { + while let Some(p) = $tokenizer.next_if_string_value().transpose()? { match p.as_str() { $( $( @@ -548,17 +607,6 @@ struct ListBeginError { bad_bit: SourceSpan, } -#[derive(Error, Debug, Diagnostic)] -#[error("list error")] -#[diagnostic(help("expected list to end"))] -struct ListEndError { - #[source_code] - src: SourceFile, - - #[label("Here")] - bad_bit: SourceSpan, -} - #[derive(Error, Debug, Diagnostic)] #[error("value expected")] #[diagnostic(help("expected a value"))]