Add more error handling

This commit is contained in:
hal8174 2025-08-08 12:57:46 +02:00
parent 9065bfd7b4
commit 5b5e491cb2
Signed by: hal8174
SSH key fingerprint: SHA256:NN98ZYwnrreQLSOV/g+amY7C3yL/mS1heD7bi5t6PPw
4 changed files with 144 additions and 57 deletions

View file

@ -34,5 +34,3 @@ AttributeBegin
"point3 P" [ -20 -20 0 20 -20 0 -20 20 0 20 20 0 ]
"point2 uv" [ 0 0 1 0 1 1 0 1 ]
AttributeEnd
" fd

View file

@ -62,7 +62,7 @@ impl SourceCode for SourceFile {
}
})
.map(|x| x + span.offset())
.unwrap_or(data[span.offset()..].len());
.unwrap_or(data.len());
let compact_data = &data[start_pos..end_pos];

View file

@ -12,18 +12,6 @@ use thiserror::Error;
mod tokenizer;
mod error;
#[derive(Error, Debug, Diagnostic)]
#[error("oops!")]
#[diagnostic(help("justus ist doof"))]
struct MyBad {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}
struct Lexer {
input: Tokenizer,
}
@ -40,6 +28,7 @@ impl Lexer {
enum Statement {
AttributeBegin,
AttributeEnd,
WorldBegin,
Include(String),
ConcatTransform(AffineTransform),
Shape(ShapeType),
@ -295,6 +284,7 @@ impl Lexer {
"ConcatTransform" => {
Some(parse_transform(&mut self.input).map(Statement::ConcatTransform))
}
"WorldBegin" => Some(Ok(Statement::WorldBegin)),
_ => {
if s.chars().any(|c| !c.is_ascii_alphabetic()) {
Some(Err(miette!("malformed identifier")))
@ -359,20 +349,7 @@ fn parse_transform(input: &mut Tokenizer) -> Result<AffineTransform> {
}
fn parse_translate(iter: &mut Tokenizer) -> Result<Statement> {
let pos = Pos3::new(
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
);
let pos = Pos3::new(iter.parse_next()?, iter.parse_next()?, iter.parse_next()?);
Ok(Statement::ConcatTransform(AffineTransform::translation(
pos,
@ -592,6 +569,7 @@ fn inner_parse_pbrt(
Statement::Unknown(s, items) => {
eprintln!("Unknown statement: {s}")
}
Statement::WorldBegin => (),
}
}

View file

@ -7,7 +7,7 @@ use std::{
};
use thiserror::Error;
use crate::{BytesToChar, MyBad, SourceFile};
use crate::{BytesToChar, SourceFile};
pub struct Tokenizer {
inner: InnerTokenizer,
@ -85,11 +85,11 @@ impl InnerTokenizer {
.next()
.is_none_or(|c| !c.is_ok_and(|(_, c)| c == '"'))
{
return Some(Err(From::from(MyBad {
return Some(Err(From::from(UnfinishedString {
src: SourceFile {
path: PathBuf::from("ray-tracing-pbrt-scene/example.pbrt"),
},
bad_bit: SourceSpan::new(40.into(), 4),
bad_bit: SourceSpan::new(i.into(), r.len()),
})));
};
r.push('"');
@ -153,7 +153,12 @@ impl Tokenizer {
<T as std::str::FromStr>::Err:
std::error::Error + std::marker::Send + std::marker::Sync + 'static,
{
let s = self.next().ok_or_else(|| miette!("Value expected"))??;
let s = self.next().ok_or_else(|| MissingValueError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
})??;
s.parse::<T>().into_diagnostic().map_err(|e| {
ParsingError {
@ -167,6 +172,29 @@ impl Tokenizer {
})
}
pub fn parse_next_if<T>(&mut self, func: impl FnOnce(&String) -> bool) -> Option<Result<T>>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::error::Error + std::marker::Send + std::marker::Sync + 'static,
{
let s = self.next_if(func)?;
Some(match s {
Ok(s) => s.parse::<T>().into_diagnostic().map_err(|e| {
ParsingError {
src: SourceFile {
path: self.path.clone(),
},
bad_bit: self.last_span,
error: Some(e),
}
.into()
}),
Err(e) => Err(e),
})
}
pub fn last_span(&self) -> SourceSpan {
self.last_span
}
@ -177,7 +205,12 @@ impl Tokenizer {
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
{
let p = self.next().ok_or(miette!("value expected"))??;
let p = self.next().ok_or_else(|| ListBeginError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
})??;
match p.as_str() {
"[" => {
@ -186,7 +219,13 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected closing bracket.")
return Err(ListEndError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
Ok(d)
@ -205,18 +244,30 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
return Err(ListBeginError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
while let Some(p) = self.next_if(|p| p != "]").transpose()? {
data.push(p.parse().into_diagnostic()?);
while let Some(p) = self.parse_next_if(|p| p != "]").transpose()? {
data.push(p);
}
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
return Err(ListEndError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
Ok(())
@ -233,15 +284,18 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
return Err(ListBeginError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
while let Some(pa) = self.next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.next_if(|p| p != "]").transpose()? {
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
));
while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? {
data.push(f(pa, pb));
} else {
return Err(UncompleteError {
src: SourceFile {
@ -257,7 +311,13 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
return Err(ListEndError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
Ok(())
@ -274,17 +334,19 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
return Err(ListBeginError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
while let Some(pa) = self.next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.next_if(|p| p != "]").transpose()? {
if let Some(pc) = self.next_if(|p| p != "]").transpose()? {
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
pc.parse().into_diagnostic()?,
));
while let Some(pa) = self.parse_next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.parse_next_if(|p| p != "]").transpose()? {
if let Some(pc) = self.parse_next_if(|p| p != "]").transpose()? {
data.push(f(pa, pb, pc));
} else {
return Err(UncompleteError {
src: SourceFile {
@ -309,7 +371,13 @@ impl Tokenizer {
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
return Err(ListEndError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
Ok(())
@ -340,3 +408,46 @@ struct UncompleteError {
#[label("Here")]
bad_bit: SourceSpan,
}
#[derive(Error, Debug, Diagnostic)]
#[error("Unfinished string")]
struct UnfinishedString {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}
#[derive(Error, Debug, Diagnostic)]
#[error("list error")]
#[diagnostic(help("expected list"))]
struct ListBeginError {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}
#[derive(Error, Debug, Diagnostic)]
#[error("list error")]
#[diagnostic(help("expected list to end"))]
struct ListEndError {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}
#[derive(Error, Debug, Diagnostic)]
#[error("value expected")]
#[diagnostic(help("expected a value"))]
struct MissingValueError {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}