diff --git a/base/src/expressions/lexer/mod.rs b/base/src/expressions/lexer/mod.rs index f377542..e868f36 100644 --- a/base/src/expressions/lexer/mod.rs +++ b/base/src/expressions/lexer/mod.rs @@ -42,6 +42,8 @@ //! assert!(matches!(lexer.next_token(), TokenType::Reference { .. })); //! ``` +use serde::{Deserialize, Serialize}; + use crate::expressions::token::{OpCompare, OpProduct, OpSum}; use crate::language::Language; @@ -59,7 +61,7 @@ mod test; mod ranges; mod structured_references; -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct LexerError { pub position: usize, pub message: String, diff --git a/base/src/expressions/lexer/util.rs b/base/src/expressions/lexer/util.rs index 9942936..273079b 100644 --- a/base/src/expressions/lexer/util.rs +++ b/base/src/expressions/lexer/util.rs @@ -1,5 +1,7 @@ use std::fmt; +use serde::{Deserialize, Serialize}; + use crate::expressions::token; use crate::language::get_language; use crate::locale::get_locale; @@ -7,7 +9,7 @@ use crate::locale::get_locale; use super::{Lexer, LexerMode}; /// A MarkedToken is a token together with its position on a formula -#[derive(Debug, PartialEq)] +#[derive(Debug, PartialEq, Serialize, Deserialize)] pub struct MarkedToken { pub token: token::TokenType, pub start: i32, diff --git a/base/src/expressions/mod.rs b/base/src/expressions/mod.rs index 35f9756..83f10f4 100644 --- a/base/src/expressions/mod.rs +++ b/base/src/expressions/mod.rs @@ -1,6 +1,8 @@ // public modules pub mod lexer; pub mod parser; +#[cfg(test)] +mod test_tokens_serde; pub mod token; pub mod types; pub mod utils; diff --git a/base/src/expressions/test_tokens_serde.rs b/base/src/expressions/test_tokens_serde.rs new file mode 100644 index 0000000..8f2e566 --- /dev/null +++ b/base/src/expressions/test_tokens_serde.rs @@ -0,0 +1,12 @@ +#![allow(clippy::unwrap_used)] + +use super::lexer::util::{get_tokens, MarkedToken}; + +#[test] +fn test_simple_formula() { + let formula = "123+23"; + let tokens = get_tokens(formula); + let tokens_str = serde_json::to_string(&tokens).unwrap(); + let tokens_json: Vec = serde_json::from_str(&tokens_str).unwrap(); + assert_eq!(tokens_json.len(), 3); +} diff --git a/base/src/expressions/token.rs b/base/src/expressions/token.rs index 8f47f36..d261fd2 100644 --- a/base/src/expressions/token.rs +++ b/base/src/expressions/token.rs @@ -1,12 +1,13 @@ use std::fmt; +use serde::{Deserialize, Serialize}; use serde_repr::{Deserialize_repr, Serialize_repr}; use crate::language::Language; use super::{lexer::LexerError, types::ParsedReference}; -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub enum OpCompare { LessThan, GreaterThan, @@ -44,7 +45,7 @@ impl fmt::Display for OpUnary { } } -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub enum OpSum { Add, Minus, @@ -59,7 +60,7 @@ impl fmt::Display for OpSum { } } -#[derive(Debug, PartialEq, Eq, Clone)] +#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)] pub enum OpProduct { Times, Divide, @@ -200,7 +201,7 @@ pub fn is_english_error_string(name: &str) -> bool { names.iter().any(|e| *e == name) } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum TableSpecifier { All, Data, @@ -209,13 +210,13 @@ pub enum TableSpecifier { Totals, } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum TableReference { ColumnReference(String), RangeReference((String, String)), } -#[derive(Debug, PartialEq, Clone)] +#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)] pub enum TokenType { Illegal(LexerError), EOF,