diff --git a/src/executor.rs b/src/executor.rs index 88d2a30..ef6fc50 100644 --- a/src/executor.rs +++ b/src/executor.rs @@ -317,6 +317,8 @@ where }, ParseTree::FunctionCall(ident, args) => { let obj = self.get_object_mut(&ident)?; + let globals = Self::obj_globals(obj); + let locals = Self::obj_locals(obj); let v = Self::eval(obj)?; match v { @@ -325,7 +327,7 @@ where .map(|x| Object::variable(x, self.globals.clone(), self.locals.clone())) .collect(); - f.call(Self::obj_globals(obj), Self::obj_locals(obj), args) + f.call(globals, locals, args) }, _ => Err(RuntimeError::FunctionUndefined(ident.clone())) } diff --git a/src/function.rs b/src/function.rs index bf9fd7c..49db30a 100644 --- a/src/function.rs +++ b/src/function.rs @@ -18,7 +18,7 @@ impl Display for FunctionType { #[derive(Clone, Debug, PartialEq)] pub struct Function { - name: Option, + pub(crate) name: Option, t: FunctionType, arg_names: Vec, body: Box, diff --git a/src/parser.rs b/src/parser.rs index dac0235..a577859 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -112,13 +112,16 @@ impl<'a, I: Iterator>> Parser<'a, I> { } } + pub fn add_global(self, k: String, v: Type) -> Self { self.globals.insert(k, v); self } pub fn add_globals>(self, items: Items) -> Self { - items.for_each(|(name, t)| _ = self.globals.insert(name, t)); + items.for_each(|(name, t)| { + self.globals.insert(name, t); + }); self } @@ -133,7 +136,9 @@ impl<'a, I: Iterator>> Parser<'a, I> { } pub fn add_locals>(mut self, items: Items) -> Self { - items.for_each(|(name, t)| _ = self.locals.insert(name, t)); + items.for_each(|(name, t)| { + self.locals.insert(name, t); + }); self } @@ -369,22 +374,27 @@ impl<'a, I: Iterator>> Parser<'a, I> { .locals(locals).parse()?))) } - fn parse_function_declaration(tokens: &mut Peekable, arg_count: usize) -> Result<(FunctionType, Vec), ParseError> { + fn parse_function_declaration( + tokens: &mut Peekable, + arg_count: usize) -> Result<(FunctionType, Vec), ParseError> + { let args: Vec<(Type, String)> = (0..arg_count) .map(|_| Self::parse_function_declaration_parameter(tokens)) .collect::>()?; let (types, names): (Vec<_>, Vec<_>) = args.into_iter().unzip(); - let mut ret = Type::Any; - - if tokens.next_if(|x| matches!(x, Ok(Token::Operator(Op::Arrow)))).is_some() { - ret = Self::parse_type(tokens)?; - } + let ret = if tokens.next_if(|x| matches!(x, Ok(Token::Operator(Op::Arrow)))).is_some() { + Self::parse_type(tokens)? + } else { + Type::Any + }; Ok((FunctionType(Box::new(ret), types), names)) } - fn parse_function_declaration_parameter(mut tokens: &mut Peekable) -> Result<(Type, String), ParseError> { + fn parse_function_declaration_parameter( + mut tokens: &mut Peekable) -> Result<(Type, String), ParseError> + { match tokens.next() { // untyped variable Some(Ok(Token::Identifier(x))) => Ok((Type::Any, x)), @@ -412,7 +422,7 @@ impl<'a, I: Iterator>> Parser<'a, I> { let mut ret = Type::Any; // this is annoying - // inside of the next_if closure, we already can know that its an error + // inside the next_if closure, we already can know that its an error // and return it, but we cannot return out of a closure if let Some(t) = tokens.next_if(|x| matches!(x, Ok(Token::Operator(Op::Arrow)))) { @@ -433,20 +443,16 @@ impl<'a, I: Iterator>> Parser<'a, I> { } } - fn parse_type(tokens: &mut Peekable) -> Result { + // for some dumbass reason, + // this is the only code that breaks if it doesn't take an impl Iterator instead of simply I ... + fn parse_type(tokens: &mut Peekable>>) -> Result { match tokens.next() { Some(Ok(Token::Type(t))) => Ok(t), - Some(Ok(Token::Operator(Op::FunctionDefine(n)))) => { - let args: Vec = (0..n) - .map(|_| Self::parse_type(tokens)) - .collect::>()?; - - let rett = Self::parse_type(tokens)?; - - Ok(Type::Function(FunctionType(Box::new(rett), args.clone()))) - }, Some(Ok(Token::Operator(Op::OpenArray))) => { let mut depth = 1; + + // take tokens until we reach the end of this array + // if we don't collect them here it causes rust to overflow computing the types let array_tokens = tokens.by_ref().take_while(|t| match t { Ok(Token::Operator(Op::OpenArray)) => { depth += 1; @@ -459,18 +465,25 @@ impl<'a, I: Iterator>> Parser<'a, I> { _ => true, }).collect::, TokenizeError>>().map_err(|e| ParseError::TokenizeError(e))?; - if array_tokens.len() == 0 { - return Ok(Type::Array(Box::new(Type::Any))); - } + // ... thanks to this conversion here. The compiler complains that the types don't + // match. there is code elsewhere in this codebase that looks exactly like this and + // still simply uses &mut Peekable as the type. I don't understand why this code + // is special, but we have to do horribleness for it to work. + let mut array_tokens = array_tokens + .into_iter() + .map(|t| Ok(t)) + .collect::>>() + .into_iter() + .peekable(); - let t = Self::parse_type(tokens)?; - let _ = match tokens.next() { - Some(Ok(Token::Operator(Op::CloseArray))) => (), - _ => return Err(ParseError::UnmatchedArrayClose), + let t = match Self::parse_type(&mut array_tokens) { + Ok(t) => t, + Err(ParseError::UnexpectedEndInput) => Type::Any, + Err(e) => return Err(e), }; Ok(Type::Array(Box::new(t))) - } + }, Some(Ok(t)) => Err(ParseError::UnwantedToken(t.clone())), Some(Err(e)) => Err(ParseError::TokenizeError(e)), None => Err(ParseError::UnexpectedEndInput), diff --git a/src/tokenizer.rs b/src/tokenizer.rs index b116b42..34df5da 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -419,15 +419,28 @@ impl std::iter::Iterator for Tokenizer { #[cfg(test)] mod tests { use std::str::FromStr; - + use crate::parser::Parser; use super::*; #[test] - fn a() { - let program = ":. map : f .? x [Any] -> [Any]"; + fn tokenizer() { + let program = ": length ?. x [] -> Int ?? x + 1 length tail x 0 length [ 1 2 3 ]"; let tokens: Vec = Tokenizer::from_str(program).unwrap().collect::>().unwrap(); - println!("{tokens:?}"); + println!("{tokens:#?}"); + } + + #[test] + fn a() { + let program = ": length ?. x [] -> Int ?? x + 1 length tail x 0 length [ 1 2 3 ]"; + + let mut tokenizer = Tokenizer::from_str(program).unwrap().peekable(); + + let mut globals = HashMap::new(); + let mut parser = Parser::new(&mut tokenizer, &mut globals); + + let tree = parser.next(); + println!("{tree:#?}"); } } \ No newline at end of file