FIX: We need to serialize/deserialize tokens in wasm
This commit is contained in:
@@ -42,6 +42,8 @@
|
||||
//! assert!(matches!(lexer.next_token(), TokenType::Reference { .. }));
|
||||
//! ```
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::expressions::token::{OpCompare, OpProduct, OpSum};
|
||||
|
||||
use crate::language::Language;
|
||||
@@ -59,7 +61,7 @@ mod test;
|
||||
mod ranges;
|
||||
mod structured_references;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct LexerError {
|
||||
pub position: usize,
|
||||
pub message: String,
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
use std::fmt;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::expressions::token;
|
||||
use crate::language::get_language;
|
||||
use crate::locale::get_locale;
|
||||
@@ -7,7 +9,7 @@ use crate::locale::get_locale;
|
||||
use super::{Lexer, LexerMode};
|
||||
|
||||
/// A MarkedToken is a token together with its position on a formula
|
||||
#[derive(Debug, PartialEq)]
|
||||
#[derive(Debug, PartialEq, Serialize, Deserialize)]
|
||||
pub struct MarkedToken {
|
||||
pub token: token::TokenType,
|
||||
pub start: i32,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
// public modules
|
||||
pub mod lexer;
|
||||
pub mod parser;
|
||||
#[cfg(test)]
|
||||
mod test_tokens_serde;
|
||||
pub mod token;
|
||||
pub mod types;
|
||||
pub mod utils;
|
||||
|
||||
12
base/src/expressions/test_tokens_serde.rs
Normal file
12
base/src/expressions/test_tokens_serde.rs
Normal file
@@ -0,0 +1,12 @@
|
||||
#![allow(clippy::unwrap_used)]
|
||||
|
||||
use super::lexer::util::{get_tokens, MarkedToken};
|
||||
|
||||
#[test]
|
||||
fn test_simple_formula() {
|
||||
let formula = "123+23";
|
||||
let tokens = get_tokens(formula);
|
||||
let tokens_str = serde_json::to_string(&tokens).unwrap();
|
||||
let tokens_json: Vec<MarkedToken> = serde_json::from_str(&tokens_str).unwrap();
|
||||
assert_eq!(tokens_json.len(), 3);
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
use std::fmt;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_repr::{Deserialize_repr, Serialize_repr};
|
||||
|
||||
use crate::language::Language;
|
||||
|
||||
use super::{lexer::LexerError, types::ParsedReference};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub enum OpCompare {
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
@@ -44,7 +45,7 @@ impl fmt::Display for OpUnary {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub enum OpSum {
|
||||
Add,
|
||||
Minus,
|
||||
@@ -59,7 +60,7 @@ impl fmt::Display for OpSum {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
|
||||
pub enum OpProduct {
|
||||
Times,
|
||||
Divide,
|
||||
@@ -200,7 +201,7 @@ pub fn is_english_error_string(name: &str) -> bool {
|
||||
names.iter().any(|e| *e == name)
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||
pub enum TableSpecifier {
|
||||
All,
|
||||
Data,
|
||||
@@ -209,13 +210,13 @@ pub enum TableSpecifier {
|
||||
Totals,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||
pub enum TableReference {
|
||||
ColumnReference(String),
|
||||
RangeReference((String, String)),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize)]
|
||||
pub enum TokenType {
|
||||
Illegal(LexerError),
|
||||
EOF,
|
||||
|
||||
Reference in New Issue
Block a user