add statements and bug fix for arrays

This commit is contained in:
2024-10-19 00:22:40 -04:00
parent f2cfb03fa1
commit 44e5d74e5e
4 changed files with 180 additions and 145 deletions

View File

@@ -375,6 +375,7 @@ where
Value::Array(_, x) => Ok(x.last().ok_or(RuntimeError::EmptyArray)?.clone()),
t => Err(RuntimeError::NoOverloadForTypes("fini".into(), vec![t]))
},
ParseTree::Nop => Ok(Value::Nil),
}
}
}

View File

@@ -41,11 +41,11 @@ impl Function {
}
}
pub fn name(&self) -> Option<&str> {
pub(crate) fn name(&self) -> Option<&str> {
self.name.as_ref().map(|x| x.as_str())
}
pub fn get_type(&self) -> FunctionType {
pub(crate) fn get_type(&self) -> FunctionType {
self.t.clone()
}

View File

@@ -89,6 +89,7 @@ pub(crate) enum ParseTree {
// Misc
Print(Box<ParseTree>),
Nop,
}
/// Parses input tokens and produces ParseTrees for an Executor
@@ -133,9 +134,7 @@ impl<'a, I: Iterator<Item = Result<Token, TokenizeError>>> Parser<'a, I> {
}
fn parse(&mut self) -> Result<ParseTree, ParseError> {
match self.tokens.next() {
Some(Ok(token)) => {
match token {
match self.tokens.next().ok_or(ParseError::NoInput)?.map_err(|e| ParseError::TokenizeError(e))? {
Token::Constant(c) => Ok(ParseTree::Constant(c)),
Token::Identifier(ident) => {
match self.get_object_type(&ident)? {
@@ -250,6 +249,42 @@ impl<'a, I: Iterator<Item = Result<Token, TokenizeError>>> Parser<'a, I> {
Ok(tree)
}
Op::OpenStatement => {
let mut depth = 1;
// take tokens until we reach the end of this array
// if we don't collect them here it causes rust to overflow computing the types
let tokens = self.tokens.by_ref().take_while(|t| match t {
Ok(Token::Operator(Op::OpenStatement)) => {
depth += 1;
true
},
Ok(Token::Operator(Op::CloseStatement)) => {
depth -= 1;
depth > 0
}
_ => true,
}).collect::<Result<Vec<_>, TokenizeError>>().map_err(|e| ParseError::TokenizeError(e))?;
let mut tokens = tokens
.into_iter()
.map(|t| Ok(t))
.collect::<Vec<Result<Token, TokenizeError>>>()
.into_iter()
.peekable();
let trees: Vec<ParseTree> = Parser::new(&mut tokens)
.globals(self.globals.to_owned())
.locals(self.locals.to_owned())
.collect::<Result<_, ParseError>>()?;
let tree = trees.into_iter().fold(
ParseTree::Nop,
|acc, x| ParseTree::Compose(Box::new(acc), Box::new(x.clone())),
);
Ok(tree)
}
Op::Empty => Ok(ParseTree::Constant(Value::Array(Type::Any, vec![]))),
Op::CloseArray => Err(ParseError::UnmatchedArrayClose),
Op::NotEqualTo => Ok(ParseTree::NotEqualTo(Box::new(self.parse()?), Box::new(self.parse()?))),
@@ -272,10 +307,6 @@ impl<'a, I: Iterator<Item = Result<Token, TokenizeError>>> Parser<'a, I> {
}
t => Err(ParseError::UnwantedToken(t)),
}
},
Some(Err(e)) => Err(ParseError::TokenizeError(e)),
None => Err(ParseError::NoInput),
}
}
fn parse_lambda(&mut self, arg_count: usize) -> Result<Function, ParseError> {
@@ -316,7 +347,6 @@ impl<'a, I: Iterator<Item = Result<Token, TokenizeError>>> Parser<'a, I> {
.map(|_| Self::parse_function_declaration_parameter(tokens))
.collect::<Result<_, _>>()?;
let (types, names): (Vec<_>, Vec<_>) = args.into_iter().unzip();
let mut ret = Type::Any;

View File

@@ -74,6 +74,8 @@ pub enum Op {
Print,
OpenArray,
CloseArray,
OpenStatement,
CloseStatement,
Empty,
And,
Or,
@@ -84,7 +86,7 @@ pub enum Op {
Fini,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, PartialEq)]
pub enum Token {
Identifier(String),
Operator(Op),
@@ -196,6 +198,8 @@ impl<R: BufRead> Tokenizer<R> {
("!=", Op::NotEqualTo),
("[", Op::OpenArray),
("]", Op::CloseArray),
("(", Op::OpenStatement),
(")", Op::CloseStatement),
("!", Op::Not),
("&&", Op::And),
("||", Op::Or),