From 5b5e491cb29d109651e147f46c4a89b54617b2fb Mon Sep 17 00:00:00 2001 From: hal8174 Date: Fri, 8 Aug 2025 12:57:46 +0200 Subject: [PATCH] Add more error handling --- ray-tracing-pbrt-scene/example.pbrt | 2 - ray-tracing-pbrt-scene/src/error.rs | 2 +- ray-tracing-pbrt-scene/src/lib.rs | 30 +---- ray-tracing-pbrt-scene/src/tokenizer.rs | 167 ++++++++++++++++++++---- 4 files changed, 144 insertions(+), 57 deletions(-) diff --git a/ray-tracing-pbrt-scene/example.pbrt b/ray-tracing-pbrt-scene/example.pbrt index dbc11c6..87e1f7b 100644 --- a/ray-tracing-pbrt-scene/example.pbrt +++ b/ray-tracing-pbrt-scene/example.pbrt @@ -34,5 +34,3 @@ AttributeBegin "point3 P" [ -20 -20 0 20 -20 0 -20 20 0 20 20 0 ] "point2 uv" [ 0 0 1 0 1 1 0 1 ] AttributeEnd - -" fd diff --git a/ray-tracing-pbrt-scene/src/error.rs b/ray-tracing-pbrt-scene/src/error.rs index 51f3613..1371295 100644 --- a/ray-tracing-pbrt-scene/src/error.rs +++ b/ray-tracing-pbrt-scene/src/error.rs @@ -62,7 +62,7 @@ impl SourceCode for SourceFile { } }) .map(|x| x + span.offset()) - .unwrap_or(data[span.offset()..].len()); + .unwrap_or(data.len()); let compact_data = &data[start_pos..end_pos]; diff --git a/ray-tracing-pbrt-scene/src/lib.rs b/ray-tracing-pbrt-scene/src/lib.rs index b6e514c..ac4b750 100644 --- a/ray-tracing-pbrt-scene/src/lib.rs +++ b/ray-tracing-pbrt-scene/src/lib.rs @@ -12,18 +12,6 @@ use thiserror::Error; mod tokenizer; mod error; - -#[derive(Error, Debug, Diagnostic)] -#[error("oops!")] -#[diagnostic(help("justus ist doof"))] -struct MyBad { - #[source_code] - src: SourceFile, - - #[label("Here")] - bad_bit: SourceSpan, -} - struct Lexer { input: Tokenizer, } @@ -40,6 +28,7 @@ impl Lexer { enum Statement { AttributeBegin, AttributeEnd, + WorldBegin, Include(String), ConcatTransform(AffineTransform), Shape(ShapeType), @@ -295,6 +284,7 @@ impl Lexer { "ConcatTransform" => { Some(parse_transform(&mut self.input).map(Statement::ConcatTransform)) } + "WorldBegin" => Some(Ok(Statement::WorldBegin)), _ => { if s.chars().any(|c| !c.is_ascii_alphabetic()) { Some(Err(miette!("malformed identifier"))) @@ -359,20 +349,7 @@ fn parse_transform(input: &mut Tokenizer) -> Result { } fn parse_translate(iter: &mut Tokenizer) -> Result { - let pos = Pos3::new( - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, - iter.next() - .ok_or(miette!("missing argument"))?? - .parse() - .into_diagnostic()?, - ); + let pos = Pos3::new(iter.parse_next()?, iter.parse_next()?, iter.parse_next()?); Ok(Statement::ConcatTransform(AffineTransform::translation( pos, @@ -592,6 +569,7 @@ fn inner_parse_pbrt( Statement::Unknown(s, items) => { eprintln!("Unknown statement: {s}") } + Statement::WorldBegin => (), } } diff --git a/ray-tracing-pbrt-scene/src/tokenizer.rs b/ray-tracing-pbrt-scene/src/tokenizer.rs index f291e61..1cbbc7c 100644 --- a/ray-tracing-pbrt-scene/src/tokenizer.rs +++ b/ray-tracing-pbrt-scene/src/tokenizer.rs @@ -7,7 +7,7 @@ use std::{ }; use thiserror::Error; -use crate::{BytesToChar, MyBad, SourceFile}; +use crate::{BytesToChar, SourceFile}; pub struct Tokenizer { inner: InnerTokenizer, @@ -85,11 +85,11 @@ impl InnerTokenizer { .next() .is_none_or(|c| !c.is_ok_and(|(_, c)| c == '"')) { - return Some(Err(From::from(MyBad { + return Some(Err(From::from(UnfinishedString { src: SourceFile { path: PathBuf::from("ray-tracing-pbrt-scene/example.pbrt"), }, - bad_bit: SourceSpan::new(40.into(), 4), + bad_bit: SourceSpan::new(i.into(), r.len()), }))); }; r.push('"'); @@ -153,7 +153,12 @@ impl Tokenizer { ::Err: std::error::Error + std::marker::Send + std::marker::Sync + 'static, { - let s = self.next().ok_or_else(|| miette!("Value expected"))??; + let s = self.next().ok_or_else(|| MissingValueError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + })??; s.parse::().into_diagnostic().map_err(|e| { ParsingError { @@ -167,6 +172,29 @@ impl Tokenizer { }) } + pub fn parse_next_if(&mut self, func: impl FnOnce(&String) -> bool) -> Option> + where + T: std::str::FromStr, + ::Err: + std::error::Error + std::marker::Send + std::marker::Sync + 'static, + { + let s = self.next_if(func)?; + + Some(match s { + Ok(s) => s.parse::().into_diagnostic().map_err(|e| { + ParsingError { + src: SourceFile { + path: self.path.clone(), + }, + bad_bit: self.last_span, + error: Some(e), + } + .into() + }), + Err(e) => Err(e), + }) + } + pub fn last_span(&self) -> SourceSpan { self.last_span } @@ -177,7 +205,12 @@ impl Tokenizer { ::Err: std::marker::Send + std::marker::Sync + std::error::Error + 'static, { - let p = self.next().ok_or(miette!("value expected"))??; + let p = self.next().ok_or_else(|| ListBeginError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + })??; match p.as_str() { "[" => { @@ -186,7 +219,13 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) { - bail!("expected closing bracket.") + return Err(ListEndError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } Ok(d) @@ -205,18 +244,30 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) { - bail!("expected list.") + return Err(ListBeginError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } - while let Some(p) = self.next_if(|p| p != "]").transpose()? { - data.push(p.parse().into_diagnostic()?); + while let Some(p) = self.parse_next_if(|p| p != "]").transpose()? { + data.push(p); } if !self .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) { - bail!("expected list end.") + return Err(ListEndError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } Ok(()) @@ -233,15 +284,18 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) { - bail!("expected list.") + return Err(ListBeginError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } - while let Some(pa) = self.next_if(|p| p != "]").transpose()? { - if let Some(pb) = self.next_if(|p| p != "]").transpose()? { - data.push(f( - pa.parse().into_diagnostic()?, - pb.parse().into_diagnostic()?, - )); + while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? { + if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? { + data.push(f(pa, pb)); } else { return Err(UncompleteError { src: SourceFile { @@ -257,7 +311,13 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) { - bail!("expected list end.") + return Err(ListEndError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } Ok(()) @@ -274,17 +334,19 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "[")) { - bail!("expected list.") + return Err(ListBeginError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } - while let Some(pa) = self.next_if(|p| p != "]").transpose()? { - if let Some(pb) = self.next_if(|p| p != "]").transpose()? { - if let Some(pc) = self.next_if(|p| p != "]").transpose()? { - data.push(f( - pa.parse().into_diagnostic()?, - pb.parse().into_diagnostic()?, - pc.parse().into_diagnostic()?, - )); + while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? { + if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? { + if let Some(pc) = self.parse_next_if(|p| p != "]").transpose()? { + data.push(f(pa, pb, pc)); } else { return Err(UncompleteError { src: SourceFile { @@ -309,7 +371,13 @@ impl Tokenizer { .next() .is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]")) { - bail!("expected list end.") + return Err(ListEndError { + src: SourceFile { + path: self.path.to_path_buf(), + }, + bad_bit: self.last_span(), + } + .into()); } Ok(()) @@ -340,3 +408,46 @@ struct UncompleteError { #[label("Here")] bad_bit: SourceSpan, } + +#[derive(Error, Debug, Diagnostic)] +#[error("Unfinished string")] +struct UnfinishedString { + #[source_code] + src: SourceFile, + + #[label("Here")] + bad_bit: SourceSpan, +} + +#[derive(Error, Debug, Diagnostic)] +#[error("list error")] +#[diagnostic(help("expected list"))] +struct ListBeginError { + #[source_code] + src: SourceFile, + + #[label("Here")] + bad_bit: SourceSpan, +} + +#[derive(Error, Debug, Diagnostic)] +#[error("list error")] +#[diagnostic(help("expected list to end"))] +struct ListEndError { + #[source_code] + src: SourceFile, + + #[label("Here")] + bad_bit: SourceSpan, +} + +#[derive(Error, Debug, Diagnostic)] +#[error("value expected")] +#[diagnostic(help("expected a value"))] +struct MissingValueError { + #[source_code] + src: SourceFile, + + #[label("Here")] + bad_bit: SourceSpan, +}