FIX: We need to serialize/deserialize tokens in wasm
This commit is contained in:
@@ -42,6 +42,8 @@
|
|||||||
//! assert!(matches!(lexer.next_token(), TokenType::Reference { .. }));
|
//! assert!(matches!(lexer.next_token(), TokenType::Reference { .. }));
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::expressions::token::{OpCompare, OpProduct, OpSum};
|
use crate::expressions::token::{OpCompare, OpProduct, OpSum};
|
||||||
|
|
||||||
use crate::language::Language;
|
use crate::language::Language;
|
||||||
@@ -59,7 +61,7 @@ mod test;
|
|||||||
mod ranges;
|
mod ranges;
|
||||||
mod structured_references;
|
mod structured_references;
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||||
pub struct LexerError {
|
pub struct LexerError {
|
||||||
pub position: usize,
|
pub position: usize,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
use crate::expressions::token;
|
use crate::expressions::token;
|
||||||
use crate::language::get_language;
|
use crate::language::get_language;
|
||||||
use crate::locale::get_locale;
|
use crate::locale::get_locale;
|
||||||
@@ -7,7 +9,7 @@ use crate::locale::get_locale;
|
|||||||
use super::{Lexer, LexerMode};
|
use super::{Lexer, LexerMode};
|
||||||
|
|
||||||
/// A MarkedToken is a token together with its position on a formula
|
/// A MarkedToken is a token together with its position on a formula
|
||||||
#[derive(Debug, PartialEq)]
|
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||||
pub struct MarkedToken {
|
pub struct MarkedToken {
|
||||||
pub token: token::TokenType,
|
pub token: token::TokenType,
|
||||||
pub start: i32,
|
pub start: i32,
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
// public modules
|
// public modules
|
||||||
pub mod lexer;
|
pub mod lexer;
|
||||||
pub mod parser;
|
pub mod parser;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test_tokens_serde;
|
||||||
pub mod token;
|
pub mod token;
|
||||||
pub mod types;
|
pub mod types;
|
||||||
pub mod utils;
|
pub mod utils;
|
||||||
|
|||||||
12
base/src/expressions/test_tokens_serde.rs
Normal file
12
base/src/expressions/test_tokens_serde.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
#![allow(clippy::unwrap_used)]
|
||||||
|
|
||||||
|
use super::lexer::util::{get_tokens, MarkedToken};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_simple_formula() {
|
||||||
|
let formula = "123+23";
|
||||||
|
let tokens = get_tokens(formula);
|
||||||
|
let tokens_str = serde_json::to_string(&tokens).unwrap();
|
||||||
|
let tokens_json: Vec<MarkedToken> = serde_json::from_str(&tokens_str).unwrap();
|
||||||
|
assert_eq!(tokens_json.len(), 3);
|
||||||
|
}
|
||||||
@@ -1,12 +1,13 @@
|
|||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||||
|
|
||||||
use crate::language::Language;
|
use crate::language::Language;
|
||||||
|
|
||||||
use super::{lexer::LexerError, types::ParsedReference};
|
use super::{lexer::LexerError, types::ParsedReference};
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||||
pub enum OpCompare {
|
pub enum OpCompare {
|
||||||
LessThan,
|
LessThan,
|
||||||
GreaterThan,
|
GreaterThan,
|
||||||
@@ -44,7 +45,7 @@ impl fmt::Display for OpUnary {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||||
pub enum OpSum {
|
pub enum OpSum {
|
||||||
Add,
|
Add,
|
||||||
Minus,
|
Minus,
|
||||||
@@ -59,7 +60,7 @@ impl fmt::Display for OpSum {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||||
pub enum OpProduct {
|
pub enum OpProduct {
|
||||||
Times,
|
Times,
|
||||||
Divide,
|
Divide,
|
||||||
@@ -200,7 +201,7 @@ pub fn is_english_error_string(name: &str) -> bool {
|
|||||||
names.iter().any(|e| *e == name)
|
names.iter().any(|e| *e == name)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum TableSpecifier {
|
pub enum TableSpecifier {
|
||||||
All,
|
All,
|
||||||
Data,
|
Data,
|
||||||
@@ -209,13 +210,13 @@ pub enum TableSpecifier {
|
|||||||
Totals,
|
Totals,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum TableReference {
|
pub enum TableReference {
|
||||||
ColumnReference(String),
|
ColumnReference(String),
|
||||||
RangeReference((String, String)),
|
RangeReference((String, String)),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Clone)]
|
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||||
pub enum TokenType {
|
pub enum TokenType {
|
||||||
Illegal(LexerError),
|
Illegal(LexerError),
|
||||||
EOF,
|
EOF,
|
||||||
|
|||||||
Reference in New Issue
Block a user