Skip to content

Commit

Permalink
Remove TokenVector type
Browse files Browse the repository at this point in the history
  • Loading branch information
probablykasper committed Apr 21, 2021
1 parent c7ee968 commit 1abe436
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 21 deletions.
9 changes: 5 additions & 4 deletions src/lexer.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::str::FromStr;
use decimal::d128;
use crate::{Token, TokenVector};
use crate::Token;
use crate::Operator::{Caret, Divide, LeftParen, Minus, Modulo, Multiply, Plus, RightParen};
use crate::UnaryOperator::{Percent, Factorial};
use crate::TextOperator::{Of, To};
Expand All @@ -18,10 +18,11 @@ pub const fn is_alphabetic_extended(input: &char) -> bool {
}
}

/// Lex an input string and return a [`TokenVector`]
pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) -> Result<TokenVector, String> {
/// Lex an input string and returns [`Token`]s
pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) -> Result<Vec<Token>, String> {

let mut input = input.replace(",", ""); // ignore commas

input = input.to_lowercase();

if allow_trailing_operators {
Expand All @@ -34,7 +35,7 @@ pub fn lex(input: &str, allow_trailing_operators: bool, default_degree: Unit) ->
}

let mut chars = input.chars().peekable();
let mut tokens: TokenVector = vec![];
let mut tokens: Vec<Token> = vec![];
let max_word_length = 30;

let mut left_paren_count = 0;
Expand Down
9 changes: 3 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ use crate::units::Unit;

/// Units, and functions you can use with them
pub mod units;
/// Turns a string into a [`TokenVector`]
/// Turns a string into [`Token`]s
pub mod lexer;
/// Turns a [`TokenVector`] into an [`AstNode`](parser::AstNode)
/// Turns [`Token`]s into an [`AstNode`](parser::AstNode)
pub mod parser;
/// Turns an [`AstNode`](parser::AstNode) into a [`Number`]
pub mod evaluator;
Expand Down Expand Up @@ -123,7 +123,7 @@ pub enum NamedNumber {
}

#[derive(Clone, Debug)]
/// A constants like [`Pi`](Constant::Pi) or [`E`](Constant::E).
/// A constant like [`Pi`](Constant::Pi) or [`E`](Constant::E).
pub enum Constant {
Pi,
E,
Expand Down Expand Up @@ -191,9 +191,6 @@ pub enum Token {
Unit(units::Unit),
}

/// A vector of [`Token`]
pub type TokenVector = Vec<Token>;

/// Evaluates a string into a resulting [`Number`].
///
/// Example:
Expand Down
22 changes: 11 additions & 11 deletions src/parser.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use crate::{Token, TokenVector};
use crate::Token;
use crate::Operator::{Caret, Divide, LeftParen, Minus, Modulo, Multiply, Plus, RightParen};
use crate::UnaryOperator::{Percent, Factorial};
use crate::TextOperator::{To, Of};
Expand All @@ -22,8 +22,8 @@ impl AstNode {
}
}

/// Parse [`TokenVector`] into an Abstract Syntax Tree ([`AstNode`])
pub fn parse(tokens: &TokenVector) -> Result<AstNode, String> {
/// Parse [`Token`]s into an Abstract Syntax Tree ([`AstNode`])
pub fn parse(tokens: &Vec<Token>) -> Result<AstNode, String> {
parse_level_1(tokens, 0).and_then(|(ast, next_pos)| if next_pos == tokens.len() {
Ok(ast)
} else {
Expand All @@ -33,7 +33,7 @@ pub fn parse(tokens: &TokenVector) -> Result<AstNode, String> {

// level 1 precedence (lowest): to, of
/// Parse [`To`](crate::TextOperator::To) and [`Of`](crate::TextOperator::Of)
pub fn parse_level_1(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_1(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
// do higher precedences first, then come back down
let (mut node, mut pos) = parse_level_2(tokens, pos)?;
// now we loop through the next tokens
Expand Down Expand Up @@ -61,7 +61,7 @@ pub fn parse_level_1(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize

// level 2 precedence: +, -
/// Parse [`Plus`](crate::Operator::Plus) and [`Minus`](crate::Operator::Minus)
pub fn parse_level_2(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_2(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
let (mut node, mut pos) = parse_level_3(tokens, pos)?;
loop {
let token = tokens.get(pos);
Expand All @@ -83,7 +83,7 @@ pub fn parse_level_2(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize

// level 3 precedence: *, /, modulo, implicative multiplication, foot-inch 6'4"
/// Parse [`Multiply`](crate::Operator::Multiply), [`Divide`](crate::Operator::Divide), [`Modulo`](crate::Operator::Modulo) and implicative multiplication (for example`2pi`)
pub fn parse_level_3(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_3(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {

// parse foot-inch syntax 6'4"
let token0 = tokens.get(pos);
Expand Down Expand Up @@ -135,7 +135,7 @@ pub fn parse_level_3(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize
// such will only end up here if they were unable to be parsed as part of
// other operators.
// Note that this match statement matches an AstNode token, but the
// matches nested inside check the TokenVector. That's why we for example
// matches nested inside check the [`Token`]s. That's why we for example
// match a FunctionIdentifier, and inside that, a RightParen.

// pi2, )2
Expand Down Expand Up @@ -215,7 +215,7 @@ pub fn parse_level_3(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize

// level 4 precedence: ^
/// Parse [`Caret`](crate::Operator::Caret)
pub fn parse_level_4(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_4(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
let (mut node, mut pos) = parse_level_5(tokens, pos)?;
loop {
let token = tokens.get(pos);
Expand All @@ -237,7 +237,7 @@ pub fn parse_level_4(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize

// level 5 precedence: - (as in -5, but not 4-5)
/// Parse [`Negative`](Token::Negative)
pub fn parse_level_5(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_5(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
// Here we parse the negative unary operator. If the current token
// is a minus, we wrap the right_node inside a Negative AstNode.
//
Expand All @@ -264,7 +264,7 @@ pub fn parse_level_5(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize

// level 6 precedence: !, percent, units attached to values
/// Parse [`Factorial`](crate::UnaryOperator::Factorial) and [`Percent`](crate::UnaryOperator::Percent)
pub fn parse_level_6(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_6(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
let (mut node, mut pos) = parse_level_7(tokens, pos)?;
loop {
let token = tokens.get(pos);
Expand Down Expand Up @@ -300,7 +300,7 @@ pub fn parse_level_6(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize
/// [`Constant`](Token::Constant),
/// [`FunctionIdentifier`](Token::FunctionIdentifier),
/// [`Paren`](Token::Paren)
pub fn parse_level_7(tokens: &TokenVector, pos: usize) -> Result<(AstNode, usize), String> {
pub fn parse_level_7(tokens: &Vec<Token>, pos: usize) -> Result<(AstNode, usize), String> {
let token: &Token = tokens.get(pos).ok_or(format!("Unexpected end of input at {}", pos))?;
match token {
&Token::Number(_number) => {
Expand Down

0 comments on commit 1abe436

Please sign in to comment.