Some more refactoring.

This commit is contained in:
hal8174 2025-08-06 21:03:04 +02:00
parent 3873ba91d8
commit d799bb51bc
Signed by: hal8174
SSH key fingerprint: SHA256:NN98ZYwnrreQLSOV/g+amY7C3yL/mS1heD7bi5t6PPw
3 changed files with 253 additions and 224 deletions

View file

@ -8,6 +8,8 @@ Integrator "volpath"
Film "rgb" "string filename" "simple.png"
"integer xresolution" [400] "integer yresolution" [400]
Scale 123 123 -2421
WorldBegin
# uniform blue-ish illumination from all directions

View file

@ -6,10 +6,7 @@ use ray_tracing_core::{
math::{Dir3, Pos3},
prelude::Float,
};
use std::{
iter::Peekable,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};
use thiserror::Error;
mod tokenizer;
@ -100,153 +97,6 @@ fn parse_look_at(iter: &mut Tokenizer) -> Result<Statement> {
))
}
fn parse_parameter<T>(iter: &mut Tokenizer) -> Result<T>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
{
let p = iter.next().ok_or(miette!("value expected"))??;
match p.as_str() {
"[" => {
let s = iter.next().ok_or(miette!("value expected"))??;
let d = s.parse::<T>().into_diagnostic()?;
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected closing bracket.")
}
Ok(d)
}
s => Ok(s.parse::<T>().into_diagnostic()?),
}
}
fn parse_list<T>(iter: &mut Tokenizer, data: &mut Vec<T>) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
{
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(p) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
data.push(p.parse().into_diagnostic()?);
}
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
fn parse_list_2<T, P, F>(iter: &mut Tokenizer, data: &mut Vec<P>, f: F) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
F: Fn(T, T) -> P,
{
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(pa) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
if let Some(pb) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
));
} else {
bail!("Unfinished group")
}
}
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
fn parse_list_3<T, P, F>(iter: &mut Tokenizer, data: &mut Vec<P>, f: F) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
F: Fn(T, T, T) -> P,
{
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(pa) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
if let Some(pb) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
if let Some(pc) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p != "]"))
.transpose()?
{
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
pc.parse().into_diagnostic()?,
));
} else {
bail!("Unfinished group")
}
} else {
bail!("Unfinished group")
}
}
if !iter
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let shape_type = iter.next().ok_or(miette!("unable to get shape type"))??;
@ -257,13 +107,10 @@ fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let zmax = radius;
let phimax = 360.0;
while let Some(p) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p.starts_with('"')))
.transpose()?
{
while let Some(p) = iter.next_if(|p| p.starts_with('"')).transpose()? {
match p.as_str() {
"\"float radius\"" => {
radius = parse_parameter(iter)?;
radius = iter.parse_parameter()?;
}
_ => {
bail!("unknown argument {}", p)
@ -285,25 +132,22 @@ fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let mut s = Vec::new();
let mut uv = Vec::new();
while let Some(q) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p.starts_with('"')))
.transpose()?
{
while let Some(q) = iter.next_if(|p| p.starts_with('"')).transpose()? {
match q.as_str() {
"\"integer indices\"" => {
parse_list(iter, &mut indices)?;
iter.parse_list(&mut indices)?;
}
"\"point3 P\"" => {
parse_list_3(iter, &mut p, Pos3::new)?;
iter.parse_list_3(&mut p, Pos3::new)?;
}
"\"normal N\"" => {
parse_list_3(iter, &mut n, Dir3::new)?;
iter.parse_list_3(&mut n, Dir3::new)?;
}
"\"vector S\"" => {
parse_list_3(iter, &mut s, Dir3::new)?;
iter.parse_list_3(&mut s, Dir3::new)?;
}
"\"point2 uv\"" => {
parse_list_2(iter, &mut uv, |u, v| [u, v])?;
iter.parse_list_2(&mut uv, |u, v| [u, v])?;
}
_ => {
bail!("unknown argument {}", q)
@ -348,22 +192,19 @@ fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let mut n = Vec::new();
let mut uv = Vec::new();
while let Some(q) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p.starts_with('"')))
.transpose()?
{
while let Some(q) = iter.next_if(|p| p.starts_with('"')).transpose()? {
match q.as_str() {
"\"integer indices\"" => {
parse_list(iter, &mut indices)?;
iter.parse_list(&mut indices)?;
}
"\"point3 P\"" => {
parse_list_3(iter, &mut p, Pos3::new)?;
iter.parse_list_3(&mut p, Pos3::new)?;
}
"\"point2 uv\"" => {
parse_list_2(iter, &mut uv, |u, v| [u, v])?;
iter.parse_list_2(&mut uv, |u, v| [u, v])?;
}
"\"vector N\"" => {
parse_list_3(iter, &mut n, Dir3::new)?;
iter.parse_list_3(&mut n, Dir3::new)?;
}
_ => {
bail!("unknown argument {}", q)
@ -406,19 +247,16 @@ fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let mut p = Vec::new();
while let Some(q) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p.starts_with('"')))
.transpose()?
{
while let Some(q) = iter.next_if(|p| p.starts_with('"')).transpose()? {
match q.as_str() {
"\"point3 P\"" => {
parse_list_3(iter, &mut p, Pos3::new)?;
iter.parse_list_3(&mut p, Pos3::new)?;
}
"\"integer indices\"" => {
parse_list(iter, &mut indices)?;
iter.parse_list(&mut indices)?;
}
"\"integer levels\"" => {
levels = parse_parameter(iter)?;
levels = iter.parse_parameter()?;
}
_ => {
bail!("unknown argument {}", q)
@ -446,15 +284,12 @@ fn parse_shape(iter: &mut Tokenizer) -> Result<Statement> {
let mut innerradius = 0.0;
let mut phimax = 360.0;
while let Some(q) = iter
.next_if(|p| p.as_ref().is_ok_and(|p| p.starts_with('"')))
.transpose()?
{
while let Some(q) = iter.next_if(|p| p.starts_with('"')).transpose()? {
match q.as_str() {
"\"float height\"" => height = parse_parameter(iter)?,
"\"float radius\"" => radius = parse_parameter(iter)?,
"\"float innerradius\"" => innerradius = parse_parameter(iter)?,
"\"float phimax\"" => phimax = parse_parameter(iter)?,
"\"float height\"" => height = iter.parse_parameter()?,
"\"float radius\"" => radius = iter.parse_parameter()?,
"\"float innerradius\"" => innerradius = iter.parse_parameter()?,
"\"float phimax\"" => phimax = iter.parse_parameter()?,
_ => {
bail!("unknown argument {}", q)
}
@ -495,11 +330,9 @@ impl Lexer {
"Scale" => Some(parse_scale(&mut self.input)),
"Shape" => Some(parse_shape(&mut self.input)),
"Rotate" => Some(parse_rotate(&mut self.input)),
"Transform" => {
Some(parse_transform(&mut self.input).map(|a| Statement::Transform(a)))
}
"Transform" => Some(parse_transform(&mut self.input).map(Statement::Transform)),
"ConcatTransform" => {
Some(parse_transform(&mut self.input).map(|a| Statement::ConcatTransform(a)))
Some(parse_transform(&mut self.input).map(Statement::ConcatTransform))
}
_ => {
if s.chars().any(|c| !c.is_ascii_alphabetic()) {
@ -507,10 +340,10 @@ impl Lexer {
} else {
let mut v = Vec::new();
while let Some(p) = self.input.next_if(|s| {
!s.as_ref()
.is_ok_and(|s| s.starts_with(|c: char| c.is_ascii_alphabetic()))
}) {
while let Some(p) = self
.input
.next_if(|s| !s.starts_with(|c: char| c.is_ascii_alphabetic()))
{
match p {
Ok(c) => v.push(c),
Err(e) => return Some(Err(e)),
@ -587,18 +420,9 @@ fn parse_translate(iter: &mut Tokenizer) -> Result<Statement> {
fn parse_scale(iter: &mut Tokenizer) -> Result<Statement> {
Ok(Statement::ConcatTransform(AffineTransform::scale(
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
iter.next()
.ok_or(miette!("missing argument"))??
.parse()
.into_diagnostic()?,
iter.parse_next()?,
iter.parse_next()?,
iter.parse_next()?,
)))
}

View file

@ -1,16 +1,18 @@
use miette::{IntoDiagnostic, Result, SourceSpan};
use miette::{Diagnostic, IntoDiagnostic, Result, SourceSpan, bail, miette};
use std::{
fs::File,
io::{BufReader, Bytes, Read},
iter::Peekable,
path::{Path, PathBuf},
};
use thiserror::Error;
use crate::{BytesToChar, MyBad, SourceFile};
pub struct Tokenizer {
inner: InnerTokenizer,
peeked: Option<Option<Result<String>>>,
path: PathBuf,
peeked: Option<Option<Result<(String, SourceSpan)>>>,
last_span: SourceSpan,
}
@ -18,6 +20,7 @@ impl Tokenizer {
pub fn new(path: impl AsRef<Path>) -> Result<Self> {
Ok(Self {
peeked: None,
path: PathBuf::from(path.as_ref()),
inner: InnerTokenizer::new(path)?,
last_span: SourceSpan::new(0.into(), 0),
})
@ -40,7 +43,7 @@ impl InnerTokenizer {
}
impl InnerTokenizer {
fn next(&mut self) -> Option<Result<String>> {
fn next(&mut self) -> Option<Result<(String, SourceSpan)>> {
while self.input_iterator.peek().is_some_and(|c| {
c.as_ref()
.is_ok_and(|&(_, c)| c.is_whitespace() || c == '#')
@ -64,9 +67,9 @@ impl InnerTokenizer {
}
match self.input_iterator.next() {
Some(Ok((_, '['))) => Some(Ok(String::from('['))),
Some(Ok((_, ']'))) => Some(Ok(String::from(']'))),
Some(Ok((_, '"'))) => {
Some(Ok((i, '['))) => Some(Ok((String::from('['), SourceSpan::new(i.into(), 1)))),
Some(Ok((i, ']'))) => Some(Ok((String::from(']'), SourceSpan::new(i.into(), 1)))),
Some(Ok((i, '"'))) => {
let mut r = String::from('"');
while let Some(p) = self
.input_iterator
@ -90,9 +93,10 @@ impl InnerTokenizer {
})));
};
r.push('"');
Some(Ok(r))
let len = r.len();
Some(Ok((r, SourceSpan::new(i.into(), len))))
}
Some(Ok((_, c))) => {
Some(Ok((i, c))) => {
let mut r = String::new();
r.push(c);
while let Some(p) = self.input_iterator.next_if(|c| {
@ -104,7 +108,8 @@ impl InnerTokenizer {
Err(e) => return Some(Err(e)),
}
}
Some(Ok(r))
let len = r.len();
Some(Ok((r, SourceSpan::new(i.into(), len))))
}
Some(Err(e)) => Some(Err(e)),
None => None,
@ -113,19 +118,27 @@ impl InnerTokenizer {
}
impl Tokenizer {
pub fn next(&mut self) -> Option<Result<String>> {
pub fn next_inner(&mut self) -> Option<Result<(String, SourceSpan)>> {
match self.peeked.take() {
Some(v) => v,
None => self.inner.next(),
}
}
pub fn next_if(
&mut self,
func: impl FnOnce(&Result<String>) -> bool,
) -> Option<Result<String>> {
match self.next() {
Some(matched) if func(&matched) => Some(matched),
pub fn next(&mut self) -> Option<Result<String>> {
let v = self.next_inner();
if let Some(Ok((_, s))) = v {
self.last_span = s;
}
v.map(|o| o.map(|i| i.0))
}
pub fn next_if(&mut self, func: impl FnOnce(&String) -> bool) -> Option<Result<String>> {
match self.next_inner() {
Some(Ok((matched, s))) if func(&matched) => {
self.last_span = s;
Some(Ok(matched))
}
other => {
assert!(self.peeked.is_none());
self.peeked = Some(other);
@ -133,4 +146,194 @@ impl Tokenizer {
}
}
}
pub fn parse_next<T>(&mut self) -> Result<T>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::error::Error + std::marker::Send + std::marker::Sync + 'static,
{
let s = self.next().ok_or_else(|| miette!("Value expected"))??;
s.parse::<T>().map_err(|_e| {
ParsingError {
src: SourceFile {
path: self.path.clone(),
},
bad_bit: self.last_span,
}
.into()
})
}
pub fn last_span(&self) -> SourceSpan {
self.last_span
}
pub fn parse_parameter<T>(&mut self) -> Result<T>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
{
let p = self.next().ok_or(miette!("value expected"))??;
match p.as_str() {
"[" => {
let d = self.parse_next()?;
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected closing bracket.")
}
Ok(d)
}
s => Ok(s.parse::<T>().into_diagnostic()?),
}
}
pub fn parse_list<T>(&mut self, data: &mut Vec<T>) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
{
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(p) = self.next_if(|p| p != "]").transpose()? {
data.push(p.parse().into_diagnostic()?);
}
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
pub fn parse_list_2<T, P, F>(&mut self, data: &mut Vec<P>, f: F) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
F: Fn(T, T) -> P,
{
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(pa) = self.next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.next_if(|p| p != "]").transpose()? {
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
));
} else {
return Err(UncompleteError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
}
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
pub fn parse_list_3<T, P, F>(&mut self, data: &mut Vec<P>, f: F) -> Result<()>
where
T: std::str::FromStr,
<T as std::str::FromStr>::Err:
std::marker::Send + std::marker::Sync + std::error::Error + 'static,
F: Fn(T, T, T) -> P,
{
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "["))
{
bail!("expected list.")
}
while let Some(pa) = self.next_if(|p| p != "]").transpose()? {
if let Some(pb) = self.next_if(|p| p != "]").transpose()? {
if let Some(pc) = self.next_if(|p| p != "]").transpose()? {
data.push(f(
pa.parse().into_diagnostic()?,
pb.parse().into_diagnostic()?,
pc.parse().into_diagnostic()?,
));
} else {
return Err(UncompleteError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
} else {
return Err(UncompleteError {
src: SourceFile {
path: self.path.to_path_buf(),
},
bad_bit: self.last_span(),
}
.into());
}
}
if !self
.next()
.is_none_or(|p| p.is_ok_and(|p| p.as_str() == "]"))
{
bail!("expected list end.")
}
Ok(())
}
}
#[derive(Error, Debug, Diagnostic)]
#[error("Parsing error")]
#[diagnostic(help("could not parse value"))]
struct ParsingError {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}
#[derive(Error, Debug, Diagnostic)]
#[error("Unfinished group")]
#[diagnostic(help("groups are not completed"))]
struct UncompleteError {
#[source_code]
src: SourceFile,
#[label("Here")]
bad_bit: SourceSpan,
}