fix many warnings
This commit is contained in:
@@ -1,13 +1,7 @@
|
|||||||
|
|
||||||
use lamm::{Tokenizer, Parser, Executor};
|
|
||||||
use std::io::{self, BufReader};
|
use std::io::{self, BufReader};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let tokenizer = Tokenizer::new(BufReader::new(io::stdin()));
|
for value in lamm::evaluate(BufReader::new(io::stdin())) {
|
||||||
let parser = Parser::new(tokenizer);
|
|
||||||
let values = Executor::new(parser);
|
|
||||||
|
|
||||||
for value in values {
|
|
||||||
match value {
|
match value {
|
||||||
Ok(v) => println!("{v}"),
|
Ok(v) => println!("{v}"),
|
||||||
Err(e) => eprintln!("{e}"),
|
Err(e) => eprintln!("{e}"),
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
use super::{Value, Type, FunctionDeclaration};
|
use super::{Value, Type, FunctionDeclaration};
|
||||||
use super::parser::{ParseTree, ParseError};
|
use super::parser::{ParseTree, ParseError};
|
||||||
use super::tokenizer::Op;
|
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
@@ -186,8 +185,6 @@ impl<I: Iterator<Item = Result<ParseTree, ParseError>>> Executor<I> {
|
|||||||
self.exec(*scope, &mut Cow::Borrowed(&locals))
|
self.exec(*scope, &mut Cow::Borrowed(&locals))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ParseTree::GlobalEqu(ident, body) => todo!(),
|
|
||||||
ParseTree::LazyGlobalEqu(ident, body) => todo!(),
|
|
||||||
ParseTree::FunctionDefinition(ident, args, r, body, scope) => {
|
ParseTree::FunctionDefinition(ident, args, r, body, scope) => {
|
||||||
let existing = locals.get(&ident).or(self.globals.get(&ident)).cloned();
|
let existing = locals.get(&ident).or(self.globals.get(&ident)).cloned();
|
||||||
|
|
||||||
@@ -197,7 +194,7 @@ impl<I: Iterator<Item = Result<ParseTree, ParseError>>> Executor<I> {
|
|||||||
let locals = locals.to_mut();
|
let locals = locals.to_mut();
|
||||||
|
|
||||||
locals.insert(ident.clone(), Object::Function(Function {
|
locals.insert(ident.clone(), Object::Function(Function {
|
||||||
decl: FunctionDeclaration { name: ident.clone(), r, args },
|
decl: FunctionDeclaration { _name: ident.clone(), _r: r, args },
|
||||||
body: Some(body)
|
body: Some(body)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
|||||||
19
src/lib.rs
19
src/lib.rs
@@ -3,21 +3,18 @@ mod tokenizer;
|
|||||||
mod parser;
|
mod parser;
|
||||||
mod executor;
|
mod executor;
|
||||||
|
|
||||||
pub use tokenizer::{Tokenizer, TokenizeError};
|
|
||||||
pub use parser::{Parser, ParseError};
|
|
||||||
pub use executor::{Executor, RuntimeError};
|
|
||||||
|
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
|
use std::io::BufRead;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) enum Type {
|
pub enum Type {
|
||||||
Float,
|
Float,
|
||||||
Int,
|
Int,
|
||||||
Bool,
|
Bool,
|
||||||
String,
|
String,
|
||||||
Nil,
|
Nil,
|
||||||
Any,
|
Any,
|
||||||
Function(Box<Type>, Vec<Type>),
|
_Function(Box<Type>, Vec<Type>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Type {
|
impl Display for Type {
|
||||||
@@ -29,7 +26,7 @@ impl Display for Type {
|
|||||||
Self::String => "String".into(),
|
Self::String => "String".into(),
|
||||||
Self::Nil => "Nil".into(),
|
Self::Nil => "Nil".into(),
|
||||||
Self::Any => "Any".into(),
|
Self::Any => "Any".into(),
|
||||||
Self::Function(r, _) => format!("Function -> {}", *r)
|
Self::_Function(r, _) => format!("Function -> {}", *r)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -70,7 +67,11 @@ impl Display for Value {
|
|||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub(crate) struct FunctionDeclaration {
|
pub(crate) struct FunctionDeclaration {
|
||||||
name: String,
|
_name: String,
|
||||||
r: Type,
|
_r: Type,
|
||||||
args: Vec<(String, Type)>,
|
args: Vec<(String, Type)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn evaluate<R: BufRead>(r: R) -> impl Iterator<Item = Result<Value, executor::RuntimeError>> {
|
||||||
|
executor::Executor::new(parser::Parser::new(tokenizer::Tokenizer::new(r)))
|
||||||
|
}
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ impl Display for ParseError {
|
|||||||
impl error::Error for ParseError {}
|
impl error::Error for ParseError {}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum ParseTree {
|
pub(crate) enum ParseTree {
|
||||||
// Mathematical Operators
|
// Mathematical Operators
|
||||||
Add(Box<ParseTree>, Box<ParseTree>),
|
Add(Box<ParseTree>, Box<ParseTree>),
|
||||||
Sub(Box<ParseTree>, Box<ParseTree>),
|
Sub(Box<ParseTree>, Box<ParseTree>),
|
||||||
@@ -54,8 +54,6 @@ pub enum ParseTree {
|
|||||||
// Defining Objects
|
// Defining Objects
|
||||||
Equ(String, Box<ParseTree>, Box<ParseTree>),
|
Equ(String, Box<ParseTree>, Box<ParseTree>),
|
||||||
LazyEqu(String, Box<ParseTree>, Box<ParseTree>),
|
LazyEqu(String, Box<ParseTree>, Box<ParseTree>),
|
||||||
GlobalEqu(String, Box<ParseTree>),
|
|
||||||
LazyGlobalEqu(String, Box<ParseTree>),
|
|
||||||
FunctionDefinition(String, Vec<(String, Type)>, Type, Box<ParseTree>, Box<ParseTree>),
|
FunctionDefinition(String, Vec<(String, Type)>, Type, Box<ParseTree>, Box<ParseTree>),
|
||||||
|
|
||||||
// Functional Operations
|
// Functional Operations
|
||||||
@@ -129,7 +127,7 @@ impl ParseTree {
|
|||||||
Box::new(ParseTree::parse(tokens, globals, locals)?),
|
Box::new(ParseTree::parse(tokens, globals, locals)?),
|
||||||
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
||||||
)),
|
)),
|
||||||
Op::Equ | Op::LazyEqu | Op::GlobalEqu | Op::LazyGlobalEqu => {
|
Op::Equ | Op::LazyEqu => {
|
||||||
let token = tokens.next()
|
let token = tokens.next()
|
||||||
.ok_or(ParseError::UnexpectedEndInput)?
|
.ok_or(ParseError::UnexpectedEndInput)?
|
||||||
.map_err(|e| ParseError::TokenizeError(e))?;
|
.map_err(|e| ParseError::TokenizeError(e))?;
|
||||||
@@ -144,12 +142,6 @@ impl ParseTree {
|
|||||||
Box::new(ParseTree::parse(tokens, globals, locals)?),
|
Box::new(ParseTree::parse(tokens, globals, locals)?),
|
||||||
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
||||||
)),
|
)),
|
||||||
Op::GlobalEqu => Ok(ParseTree::GlobalEqu(ident.clone(),
|
|
||||||
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
|
||||||
)),
|
|
||||||
Op::LazyGlobalEqu => Ok(ParseTree::LazyGlobalEqu(ident.clone(),
|
|
||||||
Box::new(ParseTree::parse(tokens, globals, locals)?)
|
|
||||||
)),
|
|
||||||
_ => panic!("Operator literally changed under your nose"),
|
_ => panic!("Operator literally changed under your nose"),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@@ -173,8 +165,8 @@ impl ParseTree {
|
|||||||
let locals = locals.to_mut();
|
let locals = locals.to_mut();
|
||||||
|
|
||||||
locals.insert(ident.clone(), FunctionDeclaration {
|
locals.insert(ident.clone(), FunctionDeclaration {
|
||||||
name: ident.clone(),
|
_name: ident.clone(),
|
||||||
r: Type::Any,
|
_r: Type::Any,
|
||||||
args: args.clone(),
|
args: args.clone(),
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -240,7 +232,7 @@ impl ParseTree {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Parses input tokens and produces ParseTrees for an Executor
|
/// Parses input tokens and produces ParseTrees for an Executor
|
||||||
pub struct Parser<I: Iterator<Item = Result<Token, TokenizeError>>> {
|
pub(crate) struct Parser<I: Iterator<Item = Result<Token, TokenizeError>>> {
|
||||||
tokens: I,
|
tokens: I,
|
||||||
|
|
||||||
// These are used to keep track of functions in the current context
|
// These are used to keep track of functions in the current context
|
||||||
@@ -258,11 +250,6 @@ impl<I: Iterator<Item = Result<Token, TokenizeError>>> Parser<I> {
|
|||||||
locals: HashMap::new()
|
locals: HashMap::new()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn globals(mut self, g: HashMap<String, FunctionDeclaration>) -> Self {
|
|
||||||
self.globals = g;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<I: Iterator<Item = Result<Token, TokenizeError>>> Iterator for Parser<I> {
|
impl<I: Iterator<Item = Result<Token, TokenizeError>>> Iterator for Parser<I> {
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ impl Display for TokenizeError {
|
|||||||
impl error::Error for TokenizeError {}
|
impl error::Error for TokenizeError {}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Op {
|
pub(crate) enum Op {
|
||||||
Add,
|
Add,
|
||||||
Sub,
|
Sub,
|
||||||
Mul,
|
Mul,
|
||||||
@@ -42,8 +42,6 @@ pub enum Op {
|
|||||||
Equ,
|
Equ,
|
||||||
Mod,
|
Mod,
|
||||||
LazyEqu,
|
LazyEqu,
|
||||||
GlobalEqu,
|
|
||||||
LazyGlobalEqu,
|
|
||||||
FunctionDeclare(usize),
|
FunctionDeclare(usize),
|
||||||
Compose,
|
Compose,
|
||||||
Id,
|
Id,
|
||||||
@@ -62,7 +60,7 @@ pub enum Op {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum Token {
|
pub(crate) enum Token {
|
||||||
Identifier(String),
|
Identifier(String),
|
||||||
Operator(Op),
|
Operator(Op),
|
||||||
Constant(Value),
|
Constant(Value),
|
||||||
@@ -100,8 +98,6 @@ impl Token {
|
|||||||
"%" => Ok(Token::Operator(Op::Mod)),
|
"%" => Ok(Token::Operator(Op::Mod)),
|
||||||
"=" => Ok(Token::Operator(Op::Equ)),
|
"=" => Ok(Token::Operator(Op::Equ)),
|
||||||
"." => Ok(Token::Operator(Op::LazyEqu)),
|
"." => Ok(Token::Operator(Op::LazyEqu)),
|
||||||
"=>" => Ok(Token::Operator(Op::GlobalEqu)),
|
|
||||||
".>" => Ok(Token::Operator(Op::LazyGlobalEqu)),
|
|
||||||
"~" => Ok(Token::Operator(Op::Compose)),
|
"~" => Ok(Token::Operator(Op::Compose)),
|
||||||
"," => Ok(Token::Operator(Op::Id)),
|
"," => Ok(Token::Operator(Op::Id)),
|
||||||
"?" => Ok(Token::Operator(Op::If)),
|
"?" => Ok(Token::Operator(Op::If)),
|
||||||
@@ -149,7 +145,7 @@ impl Token {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Tokenize an input stream of source code for a Parser
|
/// Tokenize an input stream of source code for a Parser
|
||||||
pub struct Tokenizer<R: BufRead> {
|
pub(crate) struct Tokenizer<R: BufRead> {
|
||||||
reader: R,
|
reader: R,
|
||||||
tokens: VecDeque<Token>,
|
tokens: VecDeque<Token>,
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user