UPDATE: Dump of initial files

This commit is contained in:
Nicolás Hatcher
2023-11-18 21:26:18 +01:00
commit c5b8efd83d
279 changed files with 42654 additions and 0 deletions

View File

@@ -0,0 +1,6 @@
mod test_common;
mod test_language;
mod test_locale;
mod test_ranges;
mod test_tables;
mod test_util;

View File

@@ -0,0 +1,508 @@
#![allow(clippy::unwrap_used)]
use crate::language::get_language;
use crate::locale::get_locale;
use crate::expressions::{
lexer::{Lexer, LexerMode},
token::TokenType::*,
token::{Error, OpSum},
};
fn new_lexer(formula: &str, a1_mode: bool) -> Lexer {
let locale = get_locale("en").unwrap();
let language = get_language("en").unwrap();
let mode = if a1_mode {
LexerMode::A1
} else {
LexerMode::R1C1
};
Lexer::new(formula, mode, locale, language)
}
#[test]
fn test_number_zero() {
let mut lx = new_lexer("0", true);
assert_eq!(lx.next_token(), Number(0.0));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_integer() {
let mut lx = new_lexer("42", true);
assert_eq!(lx.next_token(), Number(42.0));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_pi() {
let mut lx = new_lexer("3.415", true);
assert_eq!(lx.next_token(), Number(3.415));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_less_than_one() {
let mut lx = new_lexer(".1415", true);
assert_eq!(lx.next_token(), Number(0.1415));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_less_than_one_bis() {
let mut lx = new_lexer("0.1415", true);
assert_eq!(lx.next_token(), Number(0.1415));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_scientific() {
let mut lx = new_lexer("1.1415e12", true);
assert_eq!(lx.next_token(), Number(1.1415e12));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_scientific_1() {
let mut lx = new_lexer("2.4e-12", true);
assert_eq!(lx.next_token(), Number(2.4e-12));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_number_scientific_1b() {
let mut lx = new_lexer("2.4E-12", true);
assert_eq!(lx.next_token(), Number(2.4e-12));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_not_a_number() {
let mut lx = new_lexer("..", true);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_string() {
let mut lx = new_lexer("\"Hello World!\"", true);
assert_eq!(lx.next_token(), String("Hello World!".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_string_unicode() {
let mut lx = new_lexer("\"你好,世界!\"", true);
assert_eq!(lx.next_token(), String("你好,世界!".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_boolean() {
let mut lx = new_lexer("FALSE", true);
assert_eq!(lx.next_token(), Boolean(false));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_boolean_true() {
let mut lx = new_lexer("True", true);
assert_eq!(lx.next_token(), Boolean(true));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference() {
let mut lx = new_lexer("A1", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 1,
row: 1,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_absolute() {
let mut lx = new_lexer("$A$1", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 1,
row: 1,
absolute_column: true,
absolute_row: true,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_absolute_1() {
let mut lx = new_lexer("AB$12", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 28,
row: 12,
absolute_column: false,
absolute_row: true,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_absolute_2() {
let mut lx = new_lexer("$CC234", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 81,
row: 234,
absolute_column: true,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_sheet() {
let mut lx = new_lexer("Sheet1!C34", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("Sheet1".to_string()),
column: 3,
row: 34,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_sheet_unicode() {
// Not that also tests the '!'
let mut lx = new_lexer("'A € world!'!C34", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("A € world!".to_string()),
column: 3,
row: 34,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_sheet_unicode_absolute() {
let mut lx = new_lexer("'A €'!$C$34", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("A €".to_string()),
column: 3,
row: 34,
absolute_column: true,
absolute_row: true,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_unmatched_quote() {
let mut lx = new_lexer("'A €!$C$34", true);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_sum() {
let mut lx = new_lexer("2.4+3.415", true);
assert_eq!(lx.next_token(), Number(2.4));
assert_eq!(lx.next_token(), Addition(OpSum::Add));
assert_eq!(lx.next_token(), Number(3.415));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_sum_1() {
let mut lx = new_lexer("A2 + 'First Sheet'!$B$3", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 1,
row: 2,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), Addition(OpSum::Add));
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("First Sheet".to_string()),
column: 2,
row: 3,
absolute_column: true,
absolute_row: true,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_value() {
let mut lx = new_lexer("#VALUE!", true);
assert_eq!(lx.next_token(), Error(Error::VALUE));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_error() {
let mut lx = new_lexer("#ERROR!", true);
assert_eq!(lx.next_token(), Error(Error::ERROR));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_div() {
let mut lx = new_lexer("#DIV/0!", true);
assert_eq!(lx.next_token(), Error(Error::DIV));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_na() {
let mut lx = new_lexer("#N/A", true);
assert_eq!(lx.next_token(), Error(Error::NA));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_name() {
let mut lx = new_lexer("#NAME?", true);
assert_eq!(lx.next_token(), Error(Error::NAME));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_num() {
let mut lx = new_lexer("#NUM!", true);
assert_eq!(lx.next_token(), Error(Error::NUM));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_calc() {
let mut lx = new_lexer("#CALC!", true);
assert_eq!(lx.next_token(), Error(Error::CALC));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_null() {
let mut lx = new_lexer("#NULL!", true);
assert_eq!(lx.next_token(), Error(Error::NULL));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_spill() {
let mut lx = new_lexer("#SPILL!", true);
assert_eq!(lx.next_token(), Error(Error::SPILL));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_circ() {
let mut lx = new_lexer("#CIRC!", true);
assert_eq!(lx.next_token(), Error(Error::CIRC));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_error_invalid() {
let mut lx = new_lexer("#VALU!", true);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_add_errors() {
let mut lx = new_lexer("#DIV/0!+#NUM!", true);
assert_eq!(lx.next_token(), Error(Error::DIV));
assert_eq!(lx.next_token(), Addition(OpSum::Add));
assert_eq!(lx.next_token(), Error(Error::NUM));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_variable_name() {
let mut lx = new_lexer("MyVar", true);
assert_eq!(lx.next_token(), Ident("MyVar".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_last_reference() {
let mut lx = new_lexer("XFD1048576", true);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 16384,
row: 1048576,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_not_a_reference() {
let mut lx = new_lexer("XFE10", true);
assert_eq!(lx.next_token(), Ident("XFE10".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_r1c1() {
let mut lx = new_lexer("R1C1", false);
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
column: 1,
row: 1,
absolute_column: true,
absolute_row: true,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_r1c1_true() {
let mut lx = new_lexer("R1C1", true);
// NOTE: This is what google docs does.
// Excel will not let you enter this formula.
// Online Excel will let you and will mark the cell as in Error
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_name_r1c1p() {
let mut lx = new_lexer("R1C1P", false);
assert_eq!(lx.next_token(), Ident("R1C1P".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_name_wrong_ref() {
let mut lx = new_lexer("Sheet1!2", false);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_1() {
let mut lx = new_lexer("Sheet1!R[1]C[2]", false);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("Sheet1".to_string()),
column: 2,
row: 1,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_quotes() {
let mut lx = new_lexer("'Sheet 1'!R[1]C[2]", false);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("Sheet 1".to_string()),
column: 2,
row: 1,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_escape_quotes() {
let mut lx = new_lexer("'Sheet ''one'' 1'!R[1]C[2]", false);
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("Sheet 'one' 1".to_string()),
column: 2,
row: 1,
absolute_column: false,
absolute_row: false,
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_reference_unfinished_quotes() {
let mut lx = new_lexer("'Sheet 1!R[1]C[2]", false);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_round_function() {
let mut lx = new_lexer("ROUND", false);
assert_eq!(lx.next_token(), Ident("ROUND".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_ident_with_underscore() {
let mut lx = new_lexer("_IDENT", false);
assert_eq!(lx.next_token(), Ident("_IDENT".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_ident_with_period() {
let mut lx = new_lexer("IDENT.IFIER", false);
assert_eq!(lx.next_token(), Ident("IDENT.IFIER".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_ident_cannot_start_with_period() {
let mut lx = new_lexer(".IFIER", false);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_xlfn() {
let mut lx = new_lexer("_xlfn.MyVar", true);
assert_eq!(lx.next_token(), Ident("_xlfn.MyVar".to_string()));
assert_eq!(lx.next_token(), EOF);
}

View File

@@ -0,0 +1,101 @@
#![allow(clippy::unwrap_used)]
use crate::{
expressions::{
lexer::{Lexer, LexerMode},
token::{Error, TokenType},
},
language::get_language,
locale::get_locale,
};
fn new_language_lexer(formula: &str, language: &str) -> Lexer {
let locale = get_locale("en").unwrap();
let language = get_language(language).unwrap();
Lexer::new(formula, LexerMode::A1, locale, language)
}
// Spanish
#[test]
fn test_verdadero_falso() {
let mut lx = new_language_lexer("IF(A1, VERDADERO, FALSO)", "es");
assert_eq!(lx.next_token(), TokenType::Ident("IF".to_string()));
assert_eq!(lx.next_token(), TokenType::LeftParenthesis);
assert!(matches!(lx.next_token(), TokenType::Reference { .. }));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(true));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(false));
assert_eq!(lx.next_token(), TokenType::RightParenthesis);
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_spanish_errors_ref() {
let mut lx = new_language_lexer("#¡REF!", "es");
assert_eq!(lx.next_token(), TokenType::Error(Error::REF));
assert_eq!(lx.next_token(), TokenType::EOF);
}
// German
#[test]
fn test_wahr_falsch() {
let mut lx = new_language_lexer("IF(A1, WAHR, FALSCH)", "de");
assert_eq!(lx.next_token(), TokenType::Ident("IF".to_string()));
assert_eq!(lx.next_token(), TokenType::LeftParenthesis);
assert!(matches!(lx.next_token(), TokenType::Reference { .. }));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(true));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(false));
assert_eq!(lx.next_token(), TokenType::RightParenthesis);
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_german_errors_ref() {
let mut lx = new_language_lexer("#BEZUG!", "de");
assert_eq!(lx.next_token(), TokenType::Error(Error::REF));
assert_eq!(lx.next_token(), TokenType::EOF);
}
// French
#[test]
fn test_vrai_faux() {
let mut lx = new_language_lexer("IF(A1, VRAI, FAUX)", "fr");
assert_eq!(lx.next_token(), TokenType::Ident("IF".to_string()));
assert_eq!(lx.next_token(), TokenType::LeftParenthesis);
assert!(matches!(lx.next_token(), TokenType::Reference { .. }));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(true));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Boolean(false));
assert_eq!(lx.next_token(), TokenType::RightParenthesis);
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_french_errors_ref() {
let mut lx = new_language_lexer("#REF!", "fr");
assert_eq!(lx.next_token(), TokenType::Error(Error::REF));
assert_eq!(lx.next_token(), TokenType::EOF);
}
// English with errors
#[test]
fn test_english_with_spanish_words() {
let mut lx = new_language_lexer("IF(A1, VERDADERO, FALSO)", "en");
assert_eq!(lx.next_token(), TokenType::Ident("IF".to_string()));
assert_eq!(lx.next_token(), TokenType::LeftParenthesis);
assert!(matches!(lx.next_token(), TokenType::Reference { .. }));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Ident("VERDADERO".to_string()));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Ident("FALSO".to_string()));
assert_eq!(lx.next_token(), TokenType::RightParenthesis);
assert_eq!(lx.next_token(), TokenType::EOF);
}

View File

@@ -0,0 +1,48 @@
#![allow(clippy::unwrap_used)]
use crate::{
expressions::{
lexer::{Lexer, LexerMode},
token::TokenType,
},
language::get_language,
locale::get_locale_fix,
};
fn new_language_lexer(formula: &str, locale: &str, language: &str) -> Lexer {
let locale = get_locale_fix(locale).unwrap();
let language = get_language(language).unwrap();
Lexer::new(formula, LexerMode::A1, locale, language)
}
#[test]
fn test_german_locale() {
let mut lx = new_language_lexer("2,34e-3", "de", "en");
assert_eq!(lx.next_token(), TokenType::Number(2.34e-3));
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_german_locale_does_not_parse() {
let mut lx = new_language_lexer("2.34e-3", "de", "en");
assert_eq!(lx.next_token(), TokenType::Number(2.0));
assert!(matches!(lx.next_token(), TokenType::Illegal { .. }));
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_english_locale() {
let mut lx = new_language_lexer("2.34e-3", "en", "en");
assert_eq!(lx.next_token(), TokenType::Number(2.34e-3));
assert_eq!(lx.next_token(), TokenType::EOF);
}
#[test]
fn test_english_locale_does_not_parse() {
// a comma is a separator
let mut lx = new_language_lexer("2,34e-3", "en", "en");
assert_eq!(lx.next_token(), TokenType::Number(2.0));
assert_eq!(lx.next_token(), TokenType::Comma);
assert_eq!(lx.next_token(), TokenType::Number(34e-3));
assert_eq!(lx.next_token(), TokenType::EOF);
}

View File

@@ -0,0 +1,487 @@
#![allow(clippy::unwrap_used)]
use crate::constants::{LAST_COLUMN, LAST_ROW};
use crate::expressions::lexer::LexerError;
use crate::expressions::{
lexer::{Lexer, LexerMode},
token::TokenType::*,
types::ParsedReference,
};
use crate::language::get_language;
use crate::locale::get_locale;
fn new_lexer(formula: &str) -> Lexer {
let locale = get_locale("en").unwrap();
let language = get_language("en").unwrap();
Lexer::new(formula, LexerMode::A1, locale, language)
}
#[test]
fn test_range() {
let mut lx = new_lexer("C4:D4");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 3,
row: 4,
absolute_column: false,
absolute_row: false,
},
right: ParsedReference {
column: 4,
row: 4,
absolute_column: false,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_absolute_column() {
let mut lx = new_lexer("$A1:B$4");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 1,
row: 1,
absolute_column: true,
absolute_row: false,
},
right: ParsedReference {
column: 2,
row: 4,
absolute_column: false,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_with_sheet() {
let mut lx = new_lexer("Sheet1!A1:B4");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("Sheet1".to_string()),
left: ParsedReference {
column: 1,
row: 1,
absolute_column: false,
absolute_row: false,
},
right: ParsedReference {
column: 2,
row: 4,
absolute_column: false,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_with_sheet_with_space() {
let mut lx = new_lexer("'New sheet'!$A$1:B44");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("New sheet".to_string()),
left: ParsedReference {
column: 1,
row: 1,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: 2,
row: 44,
absolute_column: false,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column() {
let mut lx = new_lexer("C:D");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 3,
row: 1,
absolute_column: false,
absolute_row: true,
},
right: ParsedReference {
column: 4,
row: LAST_ROW,
absolute_column: false,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column_out_of_range() {
let mut lx = new_lexer("C:XFE");
assert_eq!(
lx.next_token(),
Illegal(LexerError {
position: 5,
message: "Column is not valid.".to_string(),
})
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column_absolute1() {
let mut lx = new_lexer("$C:D");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 3,
row: 1,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: 4,
row: LAST_ROW,
absolute_column: false,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column_absolute2() {
let mut lx = new_lexer("$C:$AA");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 3,
row: 1,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: 27,
row: LAST_ROW,
absolute_column: true,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_rows() {
let mut lx = new_lexer("3:5");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: false,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 5,
absolute_column: true,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_rows_absolute1() {
let mut lx = new_lexer("$3:5");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 5,
absolute_column: true,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_rows_absolute2() {
let mut lx = new_lexer("$3:$55");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 55,
absolute_column: true,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column_sheet() {
let mut lx = new_lexer("Sheet1!C:D");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("Sheet1".to_string()),
left: ParsedReference {
column: 3,
row: 1,
absolute_column: false,
absolute_row: true,
},
right: ParsedReference {
column: 4,
row: LAST_ROW,
absolute_column: false,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_column_sheet_absolute() {
let mut lx = new_lexer("Sheet1!$C:$D");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("Sheet1".to_string()),
left: ParsedReference {
column: 3,
row: 1,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: 4,
row: LAST_ROW,
absolute_column: true,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
let mut lx = new_lexer("'Woops ans'!$C:$D");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("Woops ans".to_string()),
left: ParsedReference {
column: 3,
row: 1,
absolute_column: true,
absolute_row: true,
},
right: ParsedReference {
column: 4,
row: LAST_ROW,
absolute_column: true,
absolute_row: true,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_rows_sheet() {
let mut lx = new_lexer("'A new sheet'!3:5");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("A new sheet".to_string()),
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: false,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 5,
absolute_column: true,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
let mut lx = new_lexer("Sheet12!3:5");
assert_eq!(
lx.next_token(),
Range {
sheet: Some("Sheet12".to_string()),
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: false,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 5,
absolute_column: true,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
// Non ranges
#[test]
fn test_non_range_variable_name() {
let mut lx = new_lexer("AB");
assert_eq!(lx.next_token(), Ident("AB".to_string()));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_non_range_invalid_variable_name() {
let mut lx = new_lexer("$AB");
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_non_range_invalid_variable_name_a03() {
let mut lx = new_lexer("A03");
assert_eq!(
lx.next_token(),
Reference {
sheet: None,
row: 3,
column: 1,
absolute_column: false,
absolute_row: false
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_non_range_invalid_variable_name_sheet1_a03() {
let mut lx = new_lexer("Sheet1!A03");
assert_eq!(
lx.next_token(),
Reference {
sheet: Some("Sheet1".to_string()),
row: 3,
column: 1,
absolute_column: false,
absolute_row: false
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_rows_with_0() {
let mut lx = new_lexer("03:05");
assert_eq!(
lx.next_token(),
Range {
sheet: None,
left: ParsedReference {
column: 1,
row: 3,
absolute_column: true,
absolute_row: false,
},
right: ParsedReference {
column: LAST_COLUMN,
row: 5,
absolute_column: true,
absolute_row: false,
}
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_incomplete_row() {
let mut lx = new_lexer("R[");
lx.set_lexer_mode(LexerMode::R1C1);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn test_range_incomplete_column() {
let mut lx = new_lexer("R[3][");
lx.set_lexer_mode(LexerMode::R1C1);
assert!(matches!(lx.next_token(), Illegal(_)));
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn range_operator() {
let mut lx = new_lexer("A1:OFFSET(B1,1,2)");
lx.set_lexer_mode(LexerMode::A1);
assert!(matches!(lx.next_token(), Reference { .. }));
assert!(matches!(lx.next_token(), Colon));
assert!(matches!(lx.next_token(), Ident(_)));
assert!(matches!(lx.next_token(), LeftParenthesis));
assert!(matches!(lx.next_token(), Reference { .. }));
assert_eq!(lx.next_token(), Comma);
assert!(matches!(lx.next_token(), Number(_)));
assert_eq!(lx.next_token(), Comma);
assert!(matches!(lx.next_token(), Number(_)));
assert!(matches!(lx.next_token(), RightParenthesis));
assert_eq!(lx.next_token(), EOF);
}

View File

@@ -0,0 +1,73 @@
#![allow(clippy::unwrap_used)]
use crate::expressions::{
lexer::{Lexer, LexerMode},
token::{TableReference, TableSpecifier, TokenType::*},
};
use crate::language::get_language;
use crate::locale::get_locale;
fn new_lexer(formula: &str) -> Lexer {
let locale = get_locale("en").unwrap();
let language = get_language("en").unwrap();
Lexer::new(formula, LexerMode::A1, locale, language)
}
#[test]
fn table_this_row() {
let mut lx = new_lexer("tbInfo[[#This Row], [Jan]:[Dec]]");
assert_eq!(
lx.next_token(),
StructuredReference {
table_name: "tbInfo".to_string(),
specifier: Some(TableSpecifier::ThisRow),
table_reference: Some(TableReference::RangeReference((
"Jan".to_string(),
"Dec".to_string()
)))
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn table_no_specifier() {
let mut lx = new_lexer("tbInfo[December]");
assert_eq!(
lx.next_token(),
StructuredReference {
table_name: "tbInfo".to_string(),
specifier: None,
table_reference: Some(TableReference::ColumnReference("December".to_string()))
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn table_no_specifier_white_spaces() {
let mut lx = new_lexer("tbInfo[[First Month]]");
assert_eq!(
lx.next_token(),
StructuredReference {
table_name: "tbInfo".to_string(),
specifier: None,
table_reference: Some(TableReference::ColumnReference("First Month".to_string()))
}
);
assert_eq!(lx.next_token(), EOF);
}
#[test]
fn table_totals_no_reference() {
let mut lx = new_lexer("tbInfo[#Totals]");
assert_eq!(
lx.next_token(),
StructuredReference {
table_name: "tbInfo".to_string(),
specifier: Some(TableSpecifier::Totals),
table_reference: None
}
);
assert_eq!(lx.next_token(), EOF);
}

View File

@@ -0,0 +1,146 @@
use crate::expressions::{
lexer::util::get_tokens,
token::{OpCompare, OpSum, TokenType},
};
fn get_tokens_types(formula: &str) -> Vec<TokenType> {
let marked_tokens = get_tokens(formula);
marked_tokens.iter().map(|s| s.token.clone()).collect()
}
#[test]
fn test_get_tokens() {
let formula = "1+1";
let t = get_tokens(formula);
assert_eq!(t.len(), 3);
let formula = "1 + AA23 +";
let t = get_tokens(formula);
assert_eq!(t.len(), 4);
let l = t.get(2).expect("expected token");
assert_eq!(l.start, 3);
assert_eq!(l.end, 10);
}
#[test]
fn test_simple_tokens() {
assert_eq!(
get_tokens_types("()"),
vec![TokenType::LeftParenthesis, TokenType::RightParenthesis]
);
assert_eq!(
get_tokens_types("{}"),
vec![TokenType::LeftBrace, TokenType::RightBrace]
);
assert_eq!(
get_tokens_types("[]"),
vec![TokenType::LeftBracket, TokenType::RightBracket]
);
assert_eq!(get_tokens_types("&"), vec![TokenType::And]);
assert_eq!(
get_tokens_types("<"),
vec![TokenType::Compare(OpCompare::LessThan)]
);
assert_eq!(
get_tokens_types(">"),
vec![TokenType::Compare(OpCompare::GreaterThan)]
);
assert_eq!(
get_tokens_types("<="),
vec![TokenType::Compare(OpCompare::LessOrEqualThan)]
);
assert_eq!(
get_tokens_types(">="),
vec![TokenType::Compare(OpCompare::GreaterOrEqualThan)]
);
assert_eq!(
get_tokens_types("IF"),
vec![TokenType::Ident("IF".to_owned())]
);
assert_eq!(get_tokens_types("45"), vec![TokenType::Number(45.0)]);
// The lexer parses this as two tokens
assert_eq!(
get_tokens_types("-45"),
vec![TokenType::Addition(OpSum::Minus), TokenType::Number(45.0)]
);
assert_eq!(
get_tokens_types("23.45e-2"),
vec![TokenType::Number(23.45e-2)]
);
assert_eq!(
get_tokens_types("4-3"),
vec![
TokenType::Number(4.0),
TokenType::Addition(OpSum::Minus),
TokenType::Number(3.0)
]
);
assert_eq!(get_tokens_types("True"), vec![TokenType::Boolean(true)]);
assert_eq!(get_tokens_types("FALSE"), vec![TokenType::Boolean(false)]);
assert_eq!(
get_tokens_types("2,3.5"),
vec![
TokenType::Number(2.0),
TokenType::Comma,
TokenType::Number(3.5)
]
);
assert_eq!(
get_tokens_types("2.4;3.5"),
vec![
TokenType::Number(2.4),
TokenType::Semicolon,
TokenType::Number(3.5)
]
);
assert_eq!(
get_tokens_types("AB34"),
vec![TokenType::Reference {
sheet: None,
row: 34,
column: 28,
absolute_column: false,
absolute_row: false
}]
);
assert_eq!(
get_tokens_types("$A3"),
vec![TokenType::Reference {
sheet: None,
row: 3,
column: 1,
absolute_column: true,
absolute_row: false
}]
);
assert_eq!(
get_tokens_types("AB$34"),
vec![TokenType::Reference {
sheet: None,
row: 34,
column: 28,
absolute_column: false,
absolute_row: true
}]
);
assert_eq!(
get_tokens_types("$AB$34"),
vec![TokenType::Reference {
sheet: None,
row: 34,
column: 28,
absolute_column: true,
absolute_row: true
}]
);
assert_eq!(
get_tokens_types("'My House'!AB34"),
vec![TokenType::Reference {
sheet: Some("My House".to_string()),
row: 34,
column: 28,
absolute_column: false,
absolute_row: false
}]
);
}