diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 9e59f5e..a42507f 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -185,7 +185,7 @@ impl std::iter::Iterator for Tokenizer { Ok(0) => None, Err(e) => Some(Err(TokenizeError::IO(e))), _ => { - let re = regex::Regex::new(r#"(\-?[a-zA-Z0-9\.'_]+)|[`~!@#\$%\^&\*\(\)\+-=\[\]\{\}\\|;:,<\.>/\?]+|("[^"]+")"#).expect("This wont fail promise :3"); + let re = regex::Regex::new(r#"[a-zA-Z0-9\.'_]+|[`~!@#\$%\^&\*\(\)\+-=\[\]\{\}\\|;:,<\.>/\?]+|("[^"]+")"#).expect("This wont fail promise :3"); for token in re.find_iter(input.as_str()).map(|mat| mat.as_str()).map(Token::parse) { match token {