Compare commits
12 Commits
c29f689252
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 995be33854 | |||
| c872cc9341 | |||
| db08c6d0b5 | |||
| b00e1fd0b4 | |||
| 48d2c009fb | |||
| 4c614c1937 | |||
| a1ebbcac03 | |||
| 5ee06de1ba | |||
| db6750c481 | |||
| 83ac160a42 | |||
|
|
c5dcf88f6b | ||
|
|
7a76943120 |
19
.github/workflows/notify-bot-updated.yml
vendored
19
.github/workflows/notify-bot-updated.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Trigger Bot Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
trigger-bot-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send repository dispatch
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.LAMM_TOKEN }}
|
||||
run: |
|
||||
curl -X POST -H "Accept: application/vnd.github.everest-preview+json" \
|
||||
-H "Authorization: token $GITHUB_TOKEN" \
|
||||
https://api.github.com/repos/minneelyyyy/bot/dispatches \
|
||||
-d '{"event_type": "lamm-updated"}'
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "lamm"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
description = "a simple, functional paradigm programming language which uses Polish notation"
|
||||
|
||||
@@ -64,12 +64,12 @@ You can introduce a variable to global scope using the `export` builtin function
|
||||
```
|
||||
# A very useful constant
|
||||
= pi 3.1415926
|
||||
export ["pi"]
|
||||
export pi
|
||||
|
||||
# Some more useful constants
|
||||
= e 2.71828
|
||||
= phi 1.6180339887
|
||||
export ["e" "phi"]
|
||||
export (e phi)
|
||||
```
|
||||
|
||||
## Functions
|
||||
|
||||
6
TODO.md
6
TODO.md
@@ -9,14 +9,10 @@
|
||||
- tuples
|
||||
- `extern "C"` functions
|
||||
- modules (`import` function)
|
||||
- a standard library
|
||||
- structs
|
||||
- data types (need an IO object for stateful functions to return)
|
||||
- unpacking type parameters (`(x:xs)` in Haskell for example)
|
||||
- type variables in function parameters and data types
|
||||
- automatic Int to Float casting if a parameter expects a float
|
||||
- `[x..y]` array generators
|
||||
- `(+)` = `;.x y + x y`
|
||||
|
||||
## Maybe Add
|
||||
|
||||
- `/` for float division and `//` for integer division
|
||||
10
src/error.rs
10
src/error.rs
@@ -54,17 +54,21 @@ impl fmt::Display for Error {
|
||||
|
||||
if let Some(code) = &self.code {
|
||||
let mut lines = code.lines();
|
||||
let linect = match lines.nth(*line) {
|
||||
let linect = match lines.nth(*line - 1) {
|
||||
Some(l) => l,
|
||||
None => return Ok(()), // there should probably be an error if the line number is somehow out of range
|
||||
};
|
||||
|
||||
write!(f, "\n| --> {filename}:{line}:{}\n| {linect}\n", loc.start)?;
|
||||
let numspaces = " ".repeat((*line as f64).log10() as usize + 1);
|
||||
|
||||
write!(f, "\n --> {filename}:{line}:{}\n", loc.start)?;
|
||||
write!(f, "{numspaces} |\n")?;
|
||||
write!(f, "{line} | {linect}\n")?;
|
||||
|
||||
let spaces = " ".repeat(loc.start);
|
||||
let pointers: String = loc.clone().map(|_| '^').collect();
|
||||
|
||||
write!(f, "|{spaces}{pointers}")?;
|
||||
write!(f, "{numspaces} |{spaces}{pointers}")?;
|
||||
|
||||
if let Some(note) = &self.note {
|
||||
write!(f, " {note}")?;
|
||||
|
||||
@@ -20,7 +20,7 @@ impl Executor {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn values<I>(mut self, iter: I) -> impl Iterator<Item = Result<Value, Error>>
|
||||
pub(crate) fn _values<I>(mut self, iter: I) -> impl Iterator<Item = Result<Value, Error>>
|
||||
where
|
||||
I: Iterator<Item = Result<ParseTree, Error>>
|
||||
{
|
||||
@@ -46,6 +46,11 @@ impl Executor {
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn add_local_mut(&mut self, k: String, v: Arc<Mutex<Object>>) -> &mut Self {
|
||||
self.locals.insert(k, v);
|
||||
self
|
||||
}
|
||||
|
||||
fn _get_object(&self, ident: &String) -> Result<&Arc<Mutex<Object>>, Error> {
|
||||
self.locals.get(ident).or(self.globals.get(ident))
|
||||
.ok_or(Error::new(format!("undefined identifier {}", ident.clone())))
|
||||
@@ -314,10 +319,12 @@ impl Executor {
|
||||
let value = self.exec(*body)?;
|
||||
let g = self.globals.clone();
|
||||
|
||||
Executor::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(ident, Arc::new(Mutex::new(Object::value(value, g, self.locals.to_owned()))))
|
||||
.exec(*scope)
|
||||
let r = self.add_local_mut(ident.clone(), Arc::new(Mutex::new(Object::value(value, g, self.locals.to_owned()))))
|
||||
.exec(*scope);
|
||||
|
||||
self.locals.remove(&ident);
|
||||
|
||||
r
|
||||
}
|
||||
},
|
||||
ParseTree::LazyEqu(ident, body, scope) => {
|
||||
@@ -325,22 +332,27 @@ impl Executor {
|
||||
Err(Error::new(format!("attempt to override value of variable {ident}")))
|
||||
} else {
|
||||
let g = self.globals.clone();
|
||||
Executor::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(ident, Arc::new(Mutex::new(Object::variable(*body, g, self.locals.to_owned()))))
|
||||
.exec(*scope)
|
||||
let r = self.add_local_mut(ident.clone(), Arc::new(Mutex::new(Object::variable(*body, g, self.locals.to_owned()))))
|
||||
.exec(*scope);
|
||||
|
||||
self.locals.remove(&ident);
|
||||
|
||||
r
|
||||
}
|
||||
},
|
||||
ParseTree::FunctionDefinition(func, scope) => {
|
||||
let name = func.name().unwrap().to_string();
|
||||
let g = self.globals.clone();
|
||||
Executor::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(func.name().unwrap().to_string(),
|
||||
let r = self.add_local_mut(name.clone(),
|
||||
Arc::new(Mutex::new(Object::function(
|
||||
func
|
||||
.globals(g)
|
||||
.locals(self.locals.clone()), HashMap::new(), HashMap::new()))))
|
||||
.exec(*scope)
|
||||
.exec(*scope);
|
||||
|
||||
self.locals.remove(&name);
|
||||
|
||||
r
|
||||
},
|
||||
ParseTree::FunctionCall(ident, args) => {
|
||||
let obj = self.get_object_mut(&ident)?;
|
||||
|
||||
31
src/lib.rs
31
src/lib.rs
@@ -196,7 +196,7 @@ impl<R: BufRead> CodeIter<R> {
|
||||
reader,
|
||||
code: String::new(),
|
||||
pos: 0,
|
||||
line: 0,
|
||||
line: 1,
|
||||
column: 0,
|
||||
}
|
||||
}
|
||||
@@ -246,7 +246,7 @@ impl<R: BufRead> Iterator for CodeIter<R> {
|
||||
self.column = 0;
|
||||
self.pos += 1;
|
||||
|
||||
None
|
||||
Some('\n')
|
||||
},
|
||||
c => {
|
||||
self.column += 1;
|
||||
@@ -305,27 +305,18 @@ impl<R: BufRead> Runtime<R> {
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_globals<Globals: IntoIterator<Item = (String, Value)>>(self, globals: Globals) -> Self {
|
||||
globals.into_iter().fold(self, |acc, (key, value)| acc.add_global(&key, value))
|
||||
}
|
||||
|
||||
impl<R: BufRead> Iterator for Runtime<R> {
|
||||
type Item = Result<Value, Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
pub fn values(&self) -> impl Iterator<Item = Result<Value, Error>> + use<'_, R> {
|
||||
let tokenizer = Tokenizer::new(self.reader.clone());
|
||||
let parser = Parser::new().add_globals(self.global_types.clone());
|
||||
|
||||
let tree = Parser::new()
|
||||
.add_globals(self.global_types.clone())
|
||||
.parse(&mut tokenizer.peekable());
|
||||
|
||||
let tree = match tree.map_err(|e| e
|
||||
.code(self.code())
|
||||
.file(self.filename.clone()))
|
||||
{
|
||||
Ok(Some(tree)) => tree,
|
||||
Ok(None) => return None,
|
||||
Err(e) => return Some(Err(e))
|
||||
};
|
||||
|
||||
Some(Executor::new().add_globals(self.globals.clone()).exec(tree))
|
||||
Executor::new()
|
||||
.add_globals(self.globals.clone())
|
||||
._values(parser.trees(tokenizer.peekable()))
|
||||
.map(|r| r.map_err(|e| e.code(self.code()).file(self.filename.clone())))
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@ use std::io::{self, BufReader};
|
||||
fn main() {
|
||||
let runtime = lamm::Runtime::new(BufReader::new(io::stdin()), "<stdin>");
|
||||
|
||||
for value in runtime {
|
||||
for value in runtime.values() {
|
||||
match value {
|
||||
Ok(v) => println!("=> {v}"),
|
||||
Err(e) => eprintln!("error: {e}"),
|
||||
|
||||
101
src/parser.rs
101
src/parser.rs
@@ -1,4 +1,3 @@
|
||||
|
||||
use super::{Value, Type, Function, FunctionType};
|
||||
use super::tokenizer::{Token, TokenType, Op};
|
||||
use super::error::Error;
|
||||
@@ -67,20 +66,25 @@ impl Parser {
|
||||
items.into_iter().fold(self, |acc, (k, v)| acc.add_global(k, v))
|
||||
}
|
||||
|
||||
pub(crate) fn locals(mut self, locals: HashMap<String, Type>) -> Self {
|
||||
self.locals = locals;
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn add_local(mut self, k: String, v: Type) -> Self {
|
||||
pub(crate) fn _add_local(mut self, k: String, v: Type) -> Self {
|
||||
self.locals.insert(k, v);
|
||||
self
|
||||
}
|
||||
|
||||
pub(crate) fn _add_locals<Items: Iterator<Item = (String, Type)>>(mut self, items: Items) -> Self {
|
||||
items.for_each(|(name, t)| {
|
||||
pub(crate) fn _add_locals<Items: Iterator<Item = (String, Type)>>(self, items: Items) -> Self {
|
||||
items.fold(self, |acc, (key, value)| acc._add_local(key, value))
|
||||
}
|
||||
|
||||
fn add_local_mut(&mut self, k: String, v: Type) -> &mut Self {
|
||||
self.locals.insert(k, v);
|
||||
self
|
||||
}
|
||||
|
||||
fn add_locals_mut<Items: IntoIterator<Item = (String, Type)>>(&mut self, items: Items) -> &mut Self {
|
||||
for (name, t) in items {
|
||||
self.locals.insert(name, t);
|
||||
});
|
||||
}
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
@@ -229,9 +233,7 @@ impl Parser {
|
||||
.into_iter()
|
||||
.peekable();
|
||||
|
||||
let trees: Vec<ParseTree> = Parser::new()
|
||||
.locals(self.locals.to_owned())
|
||||
.trees(array_tokens)
|
||||
let trees: Vec<ParseTree> = self.clone().trees(array_tokens)
|
||||
.collect::<Result<_, Error>>()?;
|
||||
|
||||
let tree = trees.into_iter().fold(
|
||||
@@ -268,9 +270,7 @@ impl Parser {
|
||||
.into_iter()
|
||||
.peekable();
|
||||
|
||||
let trees: Vec<ParseTree> = Parser::new()
|
||||
.locals(self.locals.to_owned())
|
||||
.trees(array_tokens)
|
||||
let trees: Vec<ParseTree> = self.clone().trees(array_tokens)
|
||||
.collect::<Result<_, Error>>()?;
|
||||
|
||||
let tree = trees.into_iter().fold(
|
||||
@@ -287,21 +287,24 @@ impl Parser {
|
||||
.note("expected an identifier after this token".into()))??;
|
||||
|
||||
if let TokenType::Identifier(ident) = token.token() {
|
||||
let body = Box::new(self.parse(tokens)?.ok_or(Error::new(format!("the variable `{ident}` has no value"))
|
||||
let body = self.parse(tokens)?.ok_or(Error::new(format!("the variable `{ident}` has no value"))
|
||||
.location(token.line, token.location.clone())
|
||||
.note("expected a value after this identifier".into()))?);
|
||||
.note("expected a value after this identifier".into()))?;
|
||||
|
||||
let scope = Parser::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(ident.clone(), Type::Any)
|
||||
let scope = self.add_local_mut(ident.clone(), Type::Any)
|
||||
.parse(tokens)?
|
||||
.ok_or(Error::new("variable declaration requires a scope defined after it".into())
|
||||
.location(token.line, token.location)
|
||||
.note(format!("this variable {ident} has no scope")))?;
|
||||
|
||||
// temporary fix: just remove the identifier
|
||||
// ignore errors removing, in the case that the symbol was already exported, it won't be present in locals
|
||||
// this comes down to a basic architectural error. globals need to stick to the parser while locals need to be scoped.
|
||||
self.locals.remove(&ident);
|
||||
|
||||
Ok(Some(ParseTree::Equ(
|
||||
ident.clone(),
|
||||
body,
|
||||
Box::new(body),
|
||||
Box::new(scope))
|
||||
))
|
||||
} else {
|
||||
@@ -310,7 +313,7 @@ impl Parser {
|
||||
},
|
||||
Op::LazyEqu => {
|
||||
let token = tokens.next()
|
||||
.ok_or(Error::new("no identifier given for = expression".into())
|
||||
.ok_or(Error::new("no identifier given for . expression".into())
|
||||
.location(token.line, token.location)
|
||||
.note("expected an identifier after this token".into()))??;
|
||||
|
||||
@@ -319,14 +322,16 @@ impl Parser {
|
||||
.location(token.line, token.location.clone())
|
||||
.note("expected a value after this identifier".into()))?);
|
||||
|
||||
let scope = Parser::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(ident.clone(), Type::Any)
|
||||
let scope = self.add_local_mut(ident.clone(), Type::Any)
|
||||
.parse(tokens)?
|
||||
.ok_or(Error::new("variable declaration requires a scope defined after it".into())
|
||||
.location(token.line, token.location)
|
||||
.note(format!("this variable {ident} has no scope")))?;
|
||||
|
||||
// temporary fix: just remove the identifier
|
||||
// ignore errors removing, in the case that the symbol was already exported, it won't be present in locals
|
||||
self.locals.remove(&ident);
|
||||
|
||||
Ok(Some(ParseTree::LazyEqu(
|
||||
ident.clone(),
|
||||
body,
|
||||
@@ -339,14 +344,14 @@ impl Parser {
|
||||
Op::FunctionDefine(arg_count) => {
|
||||
let f = self.parse_function_definition(tokens, arg_count)?;
|
||||
|
||||
let scope = Parser::new()
|
||||
.locals(self.locals.clone())
|
||||
.add_local(f.name().unwrap().to_string(), Type::Function(f.get_type()))
|
||||
let scope = self.add_local_mut(f.name().unwrap().to_string(), Type::Function(f.get_type()))
|
||||
.parse(tokens)?
|
||||
.ok_or(Error::new("function declaration requires a scope defined after it".into())
|
||||
.location(token.line, token.location)
|
||||
.note(format!("this function {} has no scope", f.name().unwrap())))?;
|
||||
|
||||
self.locals.remove(f.name().unwrap());
|
||||
|
||||
Ok(Some(ParseTree::FunctionDefinition( f.clone(), Box::new(scope))))
|
||||
},
|
||||
Op::LambdaDefine(arg_count) => Ok(Some(ParseTree::LambdaDefinition(self.parse_lambda_definition(tokens, arg_count)?))),
|
||||
@@ -379,7 +384,37 @@ impl Parser {
|
||||
Ok(Some(ParseTree::IfElse(
|
||||
Box::new(cond), Box::new(truebranch), Box::new(falsebranch))))
|
||||
},
|
||||
Op::Export => todo!(),
|
||||
Op::Export => {
|
||||
let token = tokens.next()
|
||||
.ok_or(Error::new("export expects one argument of [String], but found nothing".into())
|
||||
.location(token.line, token.location.clone()))??;
|
||||
|
||||
let names = match token.token() {
|
||||
TokenType::Identifier(ident) => vec![ident],
|
||||
TokenType::Operator(Op::OpenStatement) => {
|
||||
tokens
|
||||
.take_while(|token| !matches!(token.clone().map(|token| token.token()), Ok(TokenType::Operator(Op::CloseStatement))))
|
||||
.map(|token| token.map(|token| match token.token() {
|
||||
TokenType::Identifier(ident) => Ok(ident),
|
||||
_ => Err(Error::new(format!("expected an identifier")).location(token.line, token.location))
|
||||
})?)
|
||||
.collect::<Result<_, Error>>()?
|
||||
}
|
||||
_ => return Err(Error::new("export expects one or more identifiers".into()).location(token.line, token.location)),
|
||||
};
|
||||
|
||||
for name in &names {
|
||||
let (name, t) = self.locals.remove_entry(name)
|
||||
.ok_or(
|
||||
Error::new(format!("attempt to export {name}, which is not in local scope"))
|
||||
.location(token.line, token.location.clone())
|
||||
)?;
|
||||
|
||||
self.globals.insert(name, t);
|
||||
}
|
||||
|
||||
Ok(Some(ParseTree::Export(names)))
|
||||
},
|
||||
op => self.parse_operator(tokens, op).map(|x| Some(x)),
|
||||
},
|
||||
_ => Err(Error::new(format!("the token {} was unexpected", token.lexeme)).location(token.line, token.location)),
|
||||
@@ -396,8 +431,7 @@ impl Parser {
|
||||
}
|
||||
|
||||
Ok(Function::lambda(t, args, Box::new(
|
||||
Parser::new()
|
||||
.locals(locals).parse(tokens)?.ok_or(Error::new("lambda requires a body".into()))?)))
|
||||
self.clone().add_locals_mut(locals).parse(tokens)?.ok_or(Error::new("lambda requires a body".into()))?)))
|
||||
}
|
||||
|
||||
fn parse_function_definition<I: Iterator<Item = Result<Token, Error>>>(&mut self, tokens: &mut Peekable<I>, arg_count: usize) -> Result<Function, Error> {
|
||||
@@ -413,8 +447,7 @@ impl Parser {
|
||||
locals.insert(name.clone(), Type::Function(t.clone()));
|
||||
|
||||
Ok(Function::named(&name, t, args, Box::new(
|
||||
Parser::new()
|
||||
.locals(locals).parse(tokens)?.ok_or(Error::new("function requires a body".into()))?)))
|
||||
self.clone().add_locals_mut(locals).parse(tokens)?.ok_or(Error::new("function requires a body".into()))?)))
|
||||
}
|
||||
|
||||
fn parse_function_declaration<I: Iterator<Item = Result<Token, Error>>>(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use std::collections::{VecDeque, HashMap};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use crate::{CodeIter, Type};
|
||||
@@ -137,14 +137,12 @@ impl Token {
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct Tokenizer<R: BufRead> {
|
||||
reader: Arc<Mutex<CodeIter<R>>>,
|
||||
tokens: VecDeque<Token>,
|
||||
}
|
||||
|
||||
impl<R: BufRead> Tokenizer<R> {
|
||||
pub fn new(reader: Arc<Mutex<CodeIter<R>>>) -> Self {
|
||||
Self {
|
||||
reader,
|
||||
tokens: VecDeque::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -181,7 +179,7 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
}
|
||||
|
||||
/// Tokenizes more input and adds them to the internal queue
|
||||
fn tokenize(&mut self) -> Result<(), Error> {
|
||||
fn tokenize(&mut self) -> Result<Option<Token>, Error> {
|
||||
let operators: HashMap<&'static str, Op> = HashMap::from([
|
||||
("+", Op::Add),
|
||||
("-", Op::Sub),
|
||||
@@ -220,7 +218,7 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
let c = if let Some(c) = self.next_char() {
|
||||
c
|
||||
} else {
|
||||
return Ok(());
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
if c.is_alphanumeric() {
|
||||
@@ -232,9 +230,7 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
|
||||
let (line, column) = self.getpos();
|
||||
|
||||
self.tokens.push_back(Token::new(TokenType::parse(&token)
|
||||
.map_err(|e| e.location(line, column - token.len() + 1..column + 1))?, token.clone(), line, column - token.len() + 1));
|
||||
self.tokenize()
|
||||
Ok(Some(Token::new(TokenType::parse(&token).map_err(|e| e.location(line, column - token.len() + 1..column + 1))?, token.clone(), line, column - token.len() + 1)))
|
||||
} else if c == '#' {
|
||||
while self.next_char_if(|&c| c != '\n').is_some() {}
|
||||
self.tokenize()
|
||||
@@ -244,10 +240,15 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
|
||||
while let Some(c) = self.next_char() {
|
||||
match c {
|
||||
'"' => break,
|
||||
'"' => {
|
||||
let (line, col) = self.getpos();
|
||||
|
||||
return Ok(Some(Token::new(TokenType::Constant(
|
||||
Value::String(token.clone())), token, line, col)));
|
||||
}
|
||||
'\n' => return Err(
|
||||
Error::new("Unclosed string literal".into())
|
||||
.location(line, col..self.getpos().1)
|
||||
.location(line, col..col+token.len()+1)
|
||||
.note("newlines are not allowed in string literals (try \\n)".into())),
|
||||
'\\' => match self.next_char() {
|
||||
Some('\\') => token.push('\\'),
|
||||
@@ -258,20 +259,16 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
Some(c) => token.push(c),
|
||||
None => return Err(
|
||||
Error::new("Unclosed string literal".into())
|
||||
.location(line, col..self.getpos().1)
|
||||
.location(line, col..token.len()+1)
|
||||
.note("end of file found before \"".into())),
|
||||
}
|
||||
_ => token.push(c),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let (line, col) = self.getpos();
|
||||
|
||||
self.tokens.push_back(
|
||||
Token::new(TokenType::Constant(
|
||||
Value::String(token.clone())), token, line, col));
|
||||
|
||||
self.tokenize()
|
||||
Err(Error::new("Unclosed string literal".into())
|
||||
.location(line, col..self.getpos().1+1)
|
||||
.note("end of file found before \"".into()))
|
||||
} else if operators.keys().any(|x| x.starts_with(c)) {
|
||||
let mut token = String::from(c);
|
||||
|
||||
@@ -309,9 +306,7 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
|
||||
let token = Token::new(t, token, line, col);
|
||||
|
||||
self.tokens.push_back(token);
|
||||
|
||||
break;
|
||||
return Ok(Some(token));
|
||||
} else {
|
||||
let next = match self.next_char_if(is_expected) {
|
||||
Some(c) => c,
|
||||
@@ -345,8 +340,7 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
|
||||
let token = Token::new(t, token, line, col);
|
||||
|
||||
self.tokens.push_back(token);
|
||||
break;
|
||||
return Ok(Some(token))
|
||||
}
|
||||
};
|
||||
|
||||
@@ -354,16 +348,13 @@ impl<R: BufRead> Tokenizer<R> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.tokenize()
|
||||
} else if c.is_whitespace() {
|
||||
self.tokenize()
|
||||
} else {
|
||||
let (line, col) = self.getpos();
|
||||
|
||||
return Err(
|
||||
Error::new(format!("an unidentified character {c} was found"))
|
||||
.location(line, col - 1..col));
|
||||
Err(Error::new(format!("an unidentified character {c} was found"))
|
||||
.location(line, col..col+1))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -372,32 +363,6 @@ impl<R: BufRead> Iterator for Tokenizer<R> {
|
||||
type Item = Result<Token, Error>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(token) = self.tokens.pop_front() {
|
||||
return Some(Ok(token));
|
||||
} else {
|
||||
match self.tokenize() {
|
||||
Ok(_) => (),
|
||||
Err(e) => return Some(Err(e)),
|
||||
};
|
||||
|
||||
self.next()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
fn a() {
|
||||
let program = ": f a * 12 a f 12";
|
||||
|
||||
let tokenizer = Tokenizer::new(Arc::new(Mutex::new(CodeIter::new(Cursor::new(program)))));
|
||||
|
||||
let t: Vec<_> = tokenizer.collect();
|
||||
|
||||
println!("{t:#?}");
|
||||
self.tokenize().transpose()
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user