Add new keywords

next
Sayan Nandan 2 years ago
parent 7e5e2838cc
commit cac7bd4860
No known key found for this signature in database
GPG Key ID: 8BC07A0A4D41DD52

@ -239,7 +239,7 @@ impl<'a> Compiler<'a> {
Some(Token![create]) => self.create0(), Some(Token![create]) => self.create0(),
Some(Token![drop]) => self.drop0(), Some(Token![drop]) => self.drop0(),
Some(Token![alter]) => self.alter0(), Some(Token![alter]) => self.alter0(),
Some(Token![inspect]) => self.inspect0(), Some(Token![describe]) => self.inspect0(),
Some(Token![use]) => self.use0(), Some(Token![use]) => self.use0(),
_ => Err(LangError::ExpectedStatement), _ => Err(LangError::ExpectedStatement),
} }

@ -63,6 +63,86 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
is_full | is_single is_full | is_single
} }
/*
Contexts
*/
#[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> {
pub(super) lhs: RawSlice,
pub(super) rhs: &'a Lit,
pub(super) opc: u8,
}
impl<'a> RelationalExpr<'a> {
pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3;
pub(super) const OP_GE: u8 = 4;
pub(super) const OP_LT: u8 = 5;
pub(super) const OP_LE: u8 = 6;
#[inline(always)]
fn parse_operator(tok: &[Token], i: &mut usize, okay: &mut bool) -> u8 {
/*
FIXME(@ohsayan): This is relatively messy right now, but does the job. Will
re-implement later.
*/
#[inline(always)]
fn u(b: bool) -> u8 {
b as _
}
let op_eq = u(tok[0] == Token![=]) * Self::OP_EQ;
let op_ne = u(tok[0] == Token![!] && tok[1] == Token![=]) * Self::OP_NE;
let op_ge = u(tok[0] == Token![>] && tok[1] == Token![=]) * Self::OP_GE;
let op_gt = u(tok[0] == Token![>] && op_ge == 0) * Self::OP_GT;
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0;
*i += 1 + (opc & 1 == 0) as usize;
opc
}
#[inline(always)]
fn try_parse(tok: &'a [Token], cnt: &mut usize) -> Option<Self> {
/*
Minimum length of an expression:
[lhs] [operator] [rhs]
*/
let mut okay = tok.len() >= 3;
let mut i = 0_usize;
if compiler::unlikely(!okay) {
return None;
}
okay &= tok[0].is_ident();
i += 1;
// let's get ourselves the operator
let operator = Self::parse_operator(&tok[1..], &mut i, &mut okay);
okay &= i < tok.len();
okay &= tok[tok.len() - 1].is_lit(); // LOL, I really like saving cycles
*cnt += i + okay as usize;
if compiler::likely(okay) {
Some(unsafe {
Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.clone()),
rhs: extract!(tok[tok.len() - 1], Token::Lit(ref l) => l),
opc: operator,
}
})
} else {
compiler::cold_err(None)
}
}
}
#[cfg(test)]
#[inline(always)]
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i);
okay
}
/* /*
Impls for insert Impls for insert
*/ */
@ -672,79 +752,3 @@ pub(super) fn parse_delete_full<'a>(tok: &'a [Token]) -> LangResult<DeleteStatem
full_tt!(i, tok.len()); full_tt!(i, tok.len());
r r
} }
#[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> {
pub(super) lhs: RawSlice,
pub(super) rhs: &'a Lit,
pub(super) opc: u8,
}
impl<'a> RelationalExpr<'a> {
pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3;
pub(super) const OP_GE: u8 = 4;
pub(super) const OP_LT: u8 = 5;
pub(super) const OP_LE: u8 = 6;
#[inline(always)]
fn parse_operator(tok: &[Token], i: &mut usize, okay: &mut bool) -> u8 {
/*
FIXME(@ohsayan): This is relatively messy right now, but does the job. Will
re-implement later.
*/
#[inline(always)]
fn u(b: bool) -> u8 {
b as _
}
let op_eq = u(tok[0] == Token![=]) * Self::OP_EQ;
let op_ne = u(tok[0] == Token![!] && tok[1] == Token![=]) * Self::OP_NE;
let op_ge = u(tok[0] == Token![>] && tok[1] == Token![=]) * Self::OP_GE;
let op_gt = u(tok[0] == Token![>] && op_ge == 0) * Self::OP_GT;
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0;
*i += 1 + (opc & 1 == 0) as usize;
opc
}
#[inline(always)]
fn try_parse(tok: &'a [Token], cnt: &mut usize) -> Option<Self> {
/*
Minimum length of an expression:
[lhs] [operator] [rhs]
*/
let mut okay = tok.len() >= 3;
let mut i = 0_usize;
if compiler::unlikely(!okay) {
return None;
}
okay &= tok[0].is_ident();
i += 1;
// let's get ourselves the operator
let operator = Self::parse_operator(&tok[1..], &mut i, &mut okay);
okay &= i < tok.len();
okay &= tok[tok.len() - 1].is_lit(); // LOL, I really like saving cycles
*cnt += i + okay as usize;
if compiler::likely(okay) {
Some(unsafe {
Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.clone()),
rhs: extract!(tok[tok.len() - 1], Token::Lit(ref l) => l),
opc: operator,
}
})
} else {
compiler::cold_err(None)
}
}
}
#[cfg(test)]
#[inline(always)]
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i);
okay
}

@ -57,7 +57,7 @@ impl PartialEq<Symbol> for Token {
assertions! { assertions! {
size_of::<Token>() == 24, // FIXME(@ohsayan): Damn, what? size_of::<Token>() == 24, // FIXME(@ohsayan): Damn, what?
size_of::<Symbol>() == 1, size_of::<Symbol>() == 1,
size_of::<Keyword>() == 2, size_of::<Keyword>() == 1,
size_of::<Lit>() == 24, // FIXME(@ohsayan): Ouch size_of::<Lit>() == 24, // FIXME(@ohsayan): Ouch
} }
@ -130,93 +130,69 @@ pub enum Symbol {
SymAccent, // ` SymAccent, // `
} }
#[derive(Debug, Copy, Clone, PartialEq)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[repr(u8)] #[repr(u8)]
pub enum Keyword { pub enum Keyword {
Ddl(DdlKeyword), Table,
DdlMisc(DdlMiscKeyword), Model,
Dml(DmlKeyword), Space,
DmlMisc(DmlMiscKeyword), Index,
TypeId(Type),
Misc(MiscKeyword),
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum MiscKeyword {
Null,
}
enum_impls! {
Keyword => {
DdlKeyword as Ddl,
DdlMiscKeyword as DdlMisc,
DmlKeyword as Dml,
DmlMiscKeyword as DmlMisc,
Type as TypeId,
MiscKeyword as Misc,
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DmlMiscKeyword {
Limit,
From,
Into,
Where,
If,
And,
As,
By,
Asc,
Desc,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DmlKeyword {
Insert,
Select,
Update,
Delete,
Exists,
Truncate,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DdlMiscKeyword {
With,
Add,
Remove,
Sort,
Type, Type,
} Function,
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DdlKeyword {
Use, Use,
Create, Create,
Alter, Alter,
Drop, Drop,
Inspect, Describe,
Model, Truncate,
Space, Rename,
Add,
Remove,
Transform,
Order,
By,
Primary, Primary,
} Key,
Value,
#[derive(Debug, Clone, Copy, PartialEq, Eq)] With,
#[repr(u8)] On,
pub enum Type { Lock,
String, All,
Binary, Insert,
List, Select,
Map, Exists,
Bool, Update,
Int, Delere,
Double, Into,
Float, From,
As,
Return,
Sort,
Group,
Limit,
Asc,
Desc,
To,
Set,
Auto,
Default,
In,
Of,
Transaction,
Batch,
Read,
Write,
Begin,
End,
Where,
If,
And,
Or,
Not,
User,
Revoke,
Null,
Infinity,
} }
/* /*
@ -241,7 +217,7 @@ static SYM_GRAPH: [u8; 69] = [
23, 7, 0, 27, 0, 4, 16, 11, 0, 0, 9, 23, 7, 0, 27, 0, 4, 16, 11, 0, 0, 9,
]; ];
static SYM_DATA: [(u8, Symbol); 32] = [ static SYM_LUT: [(u8, Symbol); 32] = [
(b'+', Symbol::OpArithmeticAdd), (b'+', Symbol::OpArithmeticAdd),
(b'-', Symbol::OpArithmeticSub), (b'-', Symbol::OpArithmeticSub),
(b'*', Symbol::OpArithmeticMul), (b'*', Symbol::OpArithmeticMul),
@ -290,85 +266,107 @@ fn symph(k: u8) -> u8 {
#[inline(always)] #[inline(always)]
fn symof(sym: u8) -> Option<Symbol> { fn symof(sym: u8) -> Option<Symbol> {
let hf = symph(sym); let hf = symph(sym);
if hf < SYM_DATA.len() as u8 && SYM_DATA[hf as usize].0 == sym { if hf < SYM_LUT.len() as u8 && SYM_LUT[hf as usize].0 == sym {
Some(SYM_DATA[hf as usize].1) Some(SYM_LUT[hf as usize].1)
} else { } else {
None None
} }
} }
static KW_GRAPH: [u8; 40] = [ static KW_LUT: [(&[u8], Keyword); 60] = [
0, 2, 32, 18, 4, 37, 11, 27, 34, 35, 26, 33, 0, 0, 10, 2, 22, 8, 5, 7, 16, 9, 8, 39, 21, 5, 0, (b"table", Keyword::Table),
22, 14, 19, 22, 31, 28, 38, 26, 21, 30, 24, 10, 18, (b"model", Keyword::Model),
(b"space", Keyword::Space),
(b"index", Keyword::Index),
(b"type", Keyword::Type),
(b"function", Keyword::Function),
(b"use", Keyword::Use),
(b"create", Keyword::Create),
(b"alter", Keyword::Alter),
(b"drop", Keyword::Drop),
(b"describe", Keyword::Describe),
(b"truncate", Keyword::Truncate),
(b"rename", Keyword::Rename),
(b"add", Keyword::Add),
(b"remove", Keyword::Remove),
(b"transform", Keyword::Transform),
(b"order", Keyword::Order),
(b"by", Keyword::By),
(b"primary", Keyword::Primary),
(b"key", Keyword::Key),
(b"value", Keyword::Value),
(b"with", Keyword::With),
(b"on", Keyword::On),
(b"lock", Keyword::Lock),
(b"all", Keyword::All),
(b"insert", Keyword::Insert),
(b"select", Keyword::Select),
(b"exists", Keyword::Exists),
(b"update", Keyword::Update),
(b"delere", Keyword::Delere),
(b"into", Keyword::Into),
(b"from", Keyword::From),
(b"as", Keyword::As),
(b"return", Keyword::Return),
(b"sort", Keyword::Sort),
(b"group", Keyword::Group),
(b"limit", Keyword::Limit),
(b"asc", Keyword::Asc),
(b"desc", Keyword::Desc),
(b"to", Keyword::To),
(b"set", Keyword::Set),
(b"auto", Keyword::Auto),
(b"default", Keyword::Default),
(b"in", Keyword::In),
(b"of", Keyword::Of),
(b"transaction", Keyword::Transaction),
(b"batch", Keyword::Batch),
(b"read", Keyword::Read),
(b"write", Keyword::Write),
(b"begin", Keyword::Begin),
(b"end", Keyword::End),
(b"where", Keyword::Where),
(b"if", Keyword::If),
(b"and", Keyword::And),
(b"or", Keyword::Or),
(b"not", Keyword::Not),
(b"user", Keyword::User),
(b"revoke", Keyword::Revoke),
(b"null", Keyword::Null),
(b"infinity", Keyword::Infinity),
]; ];
static KW_DATA: [(&str, Keyword); 38] = [ static KWG: [u8; 64] = [
("use", Keyword::Ddl(DdlKeyword::Use)), 0, 55, 32, 25, 4, 21, 51, 43, 28, 59, 34, 1, 9, 39, 5, 49, 0, 16, 29, 0, 48, 0, 17, 60, 19, 21,
("create", Keyword::Ddl(DdlKeyword::Create)), 26, 18, 0, 41, 55, 10, 48, 62, 55, 35, 56, 18, 29, 41, 5, 46, 25, 52, 32, 26, 27, 17, 61, 60,
("alter", Keyword::Ddl(DdlKeyword::Alter)), 61, 59, 24, 12, 17, 30, 53, 4, 17, 0, 6, 2, 45, 56,
("drop", Keyword::Ddl(DdlKeyword::Drop)),
("inspect", Keyword::Ddl(DdlKeyword::Inspect)),
("model", Keyword::Ddl(DdlKeyword::Model)),
("space", Keyword::Ddl(DdlKeyword::Space)),
("primary", Keyword::Ddl(DdlKeyword::Primary)),
("with", Keyword::DdlMisc(DdlMiscKeyword::With)),
("add", Keyword::DdlMisc(DdlMiscKeyword::Add)),
("remove", Keyword::DdlMisc(DdlMiscKeyword::Remove)),
("sort", Keyword::DdlMisc(DdlMiscKeyword::Sort)),
("type", Keyword::DdlMisc(DdlMiscKeyword::Type)),
("insert", Keyword::Dml(DmlKeyword::Insert)),
("select", Keyword::Dml(DmlKeyword::Select)),
("update", Keyword::Dml(DmlKeyword::Update)),
("delete", Keyword::Dml(DmlKeyword::Delete)),
("exists", Keyword::Dml(DmlKeyword::Exists)),
("truncate", Keyword::Dml(DmlKeyword::Truncate)),
("limit", Keyword::DmlMisc(DmlMiscKeyword::Limit)),
("from", Keyword::DmlMisc(DmlMiscKeyword::From)),
("into", Keyword::DmlMisc(DmlMiscKeyword::Into)),
("where", Keyword::DmlMisc(DmlMiscKeyword::Where)),
("if", Keyword::DmlMisc(DmlMiscKeyword::If)),
("and", Keyword::DmlMisc(DmlMiscKeyword::And)),
("as", Keyword::DmlMisc(DmlMiscKeyword::As)),
("by", Keyword::DmlMisc(DmlMiscKeyword::By)),
("asc", Keyword::DmlMisc(DmlMiscKeyword::Asc)),
("desc", Keyword::DmlMisc(DmlMiscKeyword::Desc)),
("string", Keyword::TypeId(Type::String)),
("binary", Keyword::TypeId(Type::Binary)),
("list", Keyword::TypeId(Type::List)),
("map", Keyword::TypeId(Type::Map)),
("bool", Keyword::TypeId(Type::Bool)),
("int", Keyword::TypeId(Type::Int)),
("double", Keyword::TypeId(Type::Double)),
("float", Keyword::TypeId(Type::Float)),
("null", Keyword::Misc(MiscKeyword::Null)),
]; ];
const KW_MAGIC_A: &[u8] = b"GSggb8qI"; const KWMG_1: [u8; 11] = *b"nJEcjrLflKX";
const KW_MAGIC_B: &[u8] = b"ZaljIeOx"; const KWMG_2: [u8; 11] = *b"KWHPUPK3Fh3";
const KW_MODULUS: usize = 8; const KWMG_S: usize = KWMG_1.len();
#[inline(always)] fn kwhf(k: &[u8], mg: &[u8]) -> u32 {
fn kwfh(k: &[u8], magic: &[u8]) -> u32 {
let mut i = 0; let mut i = 0;
let mut s = 0; let mut s = 0;
while i < k.len() { while i < k.len() {
s += magic[(i % KW_MODULUS) as usize] as u32 * k[i] as u32; s += mg[(i % KWMG_S) as usize] as u32 * k[i] as u32;
i += 1; i += 1;
} }
s % KW_GRAPH.len() as u32 s % KWG.len() as u32
} }
#[inline(always)] #[inline(always)]
fn kwph(k: &[u8]) -> u8 { fn kwph(k: &[u8]) -> u8 {
(KW_GRAPH[kwfh(k, KW_MAGIC_A) as usize] + KW_GRAPH[kwfh(k, KW_MAGIC_B) as usize]) (KWG[kwhf(k, &KWMG_1) as usize] + KWG[kwhf(k, &KWMG_2) as usize]) % KWG.len() as u8
% KW_GRAPH.len() as u8
} }
#[inline(always)] #[inline(always)]
fn kwof(key: &str) -> Option<Keyword> { fn kwof(key: &str) -> Option<Keyword> {
let ph = kwph(key.as_bytes()); let key = key.as_bytes();
if ph < KW_DATA.len() as u8 && KW_DATA[ph as usize].0 == key { let ph = kwph(key);
Some(KW_DATA[ph as usize].1) if ph < KW_LUT.len() as u8 && KW_LUT[ph as usize].0 == key {
Some(KW_LUT[ph as usize].1)
} else { } else {
None None
} }
@ -700,10 +698,6 @@ impl Token {
matches!(self, Token::Ident(_)) matches!(self, Token::Ident(_))
} }
#[inline(always)] #[inline(always)]
pub(crate) const fn is_typeid(&self) -> bool {
matches!(self, Token::Keyword(Keyword::TypeId(_)))
}
#[inline(always)]
pub(crate) fn as_ident_eq_ignore_case(&self, arg: &[u8]) -> bool { pub(crate) fn as_ident_eq_ignore_case(&self, arg: &[u8]) -> bool {
self.is_ident() self.is_ident()
&& unsafe { && unsafe {

@ -37,51 +37,9 @@ macro_rules! __sym_token {
}; };
} }
macro_rules! __ddl_token { macro_rules! __kw {
($ident:ident) => { ($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Ddl( $crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::$ident)
$crate::engine::ql::lexer::DdlKeyword::$ident,
))
};
}
macro_rules! __ddl_misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::DdlMisc(
$crate::engine::ql::lexer::DdlMiscKeyword::$ident,
))
};
}
macro_rules! __dml_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Dml(
$crate::engine::ql::lexer::DmlKeyword::$ident,
))
};
}
macro_rules! __dml_misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::DmlMisc(
$crate::engine::ql::lexer::DmlMiscKeyword::$ident,
))
};
}
macro_rules! __type_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::TypeId(
$crate::engine::ql::lexer::Type::$ident,
))
};
}
macro_rules! __misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Misc(
$crate::engine::ql::lexer::MiscKeyword::$ident,
))
}; };
} }
@ -163,119 +121,119 @@ macro_rules! Token {
}; };
// ddl keywords // ddl keywords
(use) => { (use) => {
__ddl_token!(Use) __kw!(Use)
}; };
(create) => { (create) => {
__ddl_token!(Create) __kw!(Create)
}; };
(alter) => { (alter) => {
__ddl_token!(Alter) __kw!(Alter)
}; };
(drop) => { (drop) => {
__ddl_token!(Drop) __kw!(Drop)
}; };
(inspect) => { (describe) => {
__ddl_token!(Inspect) __kw!(Describe)
}; };
(model) => { (model) => {
__ddl_token!(Model) __kw!(Model)
}; };
(space) => { (space) => {
__ddl_token!(Space) __kw!(Space)
}; };
(primary) => { (primary) => {
__ddl_token!(Primary) __kw!(Primary)
}; };
// ddl misc // ddl misc
(with) => { (with) => {
__ddl_misc_token!(With) __kw!(With)
}; };
(add) => { (add) => {
__ddl_misc_token!(Add) __kw!(Add)
}; };
(remove) => { (remove) => {
__ddl_misc_token!(Remove) __kw!(Remove)
}; };
(sort) => { (sort) => {
__ddl_misc_token!(Sort) __kw!(Sort)
}; };
(type) => { (type) => {
__ddl_misc_token!(Type) __kw!(Type)
}; };
// dml // dml
(insert) => { (insert) => {
__dml_token!(Insert) __kw!(Insert)
}; };
(select) => { (select) => {
__dml_token!(Select) __kw!(Select)
}; };
(update) => { (update) => {
__dml_token!(Update) __kw!(Update)
}; };
(delete) => { (delete) => {
__dml_token!(Delete) __kw!(Delete)
}; };
(exists) => { (exists) => {
__dml_token!(Exists) __kw!(Exists)
}; };
(truncate) => { (truncate) => {
__dml_token!(Truncate) __kw!(Truncate)
}; };
// dml misc // dml misc
(limit) => { (limit) => {
__dml_misc_token!(Limit) __kw!(Limit)
}; };
(from) => { (from) => {
__dml_misc_token!(From) __kw!(From)
}; };
(into) => { (into) => {
__dml_misc_token!(Into) __kw!(Into)
}; };
(where) => { (where) => {
__dml_misc_token!(Where) __kw!(Where)
}; };
(if) => { (if) => {
__dml_misc_token!(If) __kw!(If)
}; };
(and) => { (and) => {
__dml_misc_token!(And) __kw!(And)
}; };
(as) => { (as) => {
__dml_misc_token!(As) __kw!(As)
}; };
(by) => { (by) => {
__dml_misc_token!(By) __kw!(By)
}; };
(asc) => { (asc) => {
__dml_misc_token!(Asc) __kw!(Asc)
}; };
(desc) => { (desc) => {
__dml_misc_token!(Desc) __kw!(Desc)
}; };
// types // types
(string) => { (string) => {
__type_token!(String) __kw!(String)
}; };
(binary) => { (binary) => {
__type_token!(Binary) __kw!(Binary)
}; };
(list) => { (list) => {
__type_token!(List) __kw!(List)
}; };
(map) => { (map) => {
__type_token!(Map) __kw!(Map)
}; };
(bool) => { (bool) => {
__type_token!(Bool) __kw!(Bool)
}; };
(int) => { (int) => {
__type_token!(Int) __kw!(Int)
}; };
(double) => { (double) => {
__type_token!(Double) __kw!(Double)
}; };
(float) => { (float) => {
__type_token!(Float) __kw!(Float)
}; };
// tt // tt
(open {}) => { (open {}) => {
@ -298,7 +256,7 @@ macro_rules! Token {
}; };
// misc // misc
(null) => { (null) => {
__misc_token!(Null) __kw!(Null)
}; };
} }

@ -46,9 +46,7 @@
use { use {
super::{ super::{
lexer::{ lexer::{Lit, Symbol, Token},
DdlKeyword, DdlMiscKeyword, DmlKeyword, Keyword, Lit, MiscKeyword, Symbol, Token, Type,
},
LangError, LangResult, RawSlice, LangError, LangResult, RawSlice,
}, },
crate::util::MaybeInit, crate::util::MaybeInit,
@ -110,22 +108,22 @@ pub type Dict = HashMap<String, Option<DictEntry>>;
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
/// A layer contains a type and corresponding metadata /// A layer contains a type and corresponding metadata
pub struct Layer { pub struct Layer {
ty: Type, ty: RawSlice,
props: Dict, props: Dict,
reset: bool, reset: bool,
} }
impl Layer { impl Layer {
//// Create a new layer //// Create a new layer
pub(super) const fn new(ty: Type, props: Dict, reset: bool) -> Self { pub(super) const fn new(ty: RawSlice, props: Dict, reset: bool) -> Self {
Self { ty, props, reset } Self { ty, props, reset }
} }
/// Create a new layer that doesn't have any reset /// Create a new layer that doesn't have any reset
pub(super) const fn new_noreset(ty: Type, props: Dict) -> Self { pub(super) const fn new_noreset(ty: RawSlice, props: Dict) -> Self {
Self::new(ty, props, false) Self::new(ty, props, false)
} }
/// Create a new layer that adds a reset /// Create a new layer that adds a reset
pub(super) const fn new_reset(ty: Type, props: Dict) -> Self { pub(super) const fn new_reset(ty: RawSlice, props: Dict) -> Self {
Self::new(ty, props, true) Self::new(ty, props, true)
} }
} }
@ -430,10 +428,7 @@ pub(super) fn rfold_tymeta<const ALLOW_RESET: bool>(
let mut tmp = MaybeInit::uninit(); let mut tmp = MaybeInit::uninit();
while r.pos() < l && r.is_okay() { while r.pos() < l && r.is_okay() {
match (&tok[r.pos()], state) { match (&tok[r.pos()], state) {
( (Token![type], TyMetaFoldState::IDENT_OR_CB) => {
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Type)),
TyMetaFoldState::IDENT_OR_CB,
) => {
// we were expecting an ident but found the type keyword! increase depth // we were expecting an ident but found the type keyword! increase depth
r.incr(); r.incr();
r.set_has_more(); r.set_has_more();
@ -573,10 +568,10 @@ pub(super) fn rfold_layers<const ALLOW_RESET: bool>(
let mut dict = Dict::new(); let mut dict = Dict::new();
while i < l && okay { while i < l && okay {
match (&tok[i], state) { match (&tok[i], state) {
(Token::Keyword(Keyword::TypeId(ty)), LayerFoldState::TY) => { (Token::Ident(ty), LayerFoldState::TY) => {
i += 1; i += 1;
// expecting type, and found type. next is either end or an open brace or some arbitrary token // expecting type, and found type. next is either end or an open brace or some arbitrary token
tmp = MaybeInit::new(ty); tmp = MaybeInit::new(ty.clone());
state = LayerFoldState::END_OR_OB; state = LayerFoldState::END_OR_OB;
} }
(Token::Symbol(Symbol::TtOpenBrace), LayerFoldState::END_OR_OB) => { (Token::Symbol(Symbol::TtOpenBrace), LayerFoldState::END_OR_OB) => {
@ -670,12 +665,8 @@ pub(super) fn collect_field_properties(tok: &[Token]) -> (FieldProperties, u64)
let mut okay = true; let mut okay = true;
while i < tok.len() { while i < tok.len() {
match &tok[i] { match &tok[i] {
Token::Keyword(Keyword::Ddl(DdlKeyword::Primary)) => { Token![primary] => okay &= props.properties.insert(FieldProperties::PRIMARY),
okay &= props.properties.insert(FieldProperties::PRIMARY) Token![null] => okay &= props.properties.insert(FieldProperties::NULL),
}
Token::Keyword(Keyword::Misc(MiscKeyword::Null)) => {
okay &= props.properties.insert(FieldProperties::NULL)
}
Token::Ident(_) => break, Token::Ident(_) => break,
_ => { _ => {
// we could pass this over to the caller, but it's better if we do it since we're doing // we could pass this over to the caller, but it's better if we do it since we're doing
@ -768,9 +759,7 @@ pub(super) fn parse_schema_from_tokens(tok: &[Token]) -> LangResult<(Model, usiz
state = SchemaParseState::FIELD; state = SchemaParseState::FIELD;
} }
( (
Token::Keyword(Keyword::Ddl(DdlKeyword::Primary)) Token![primary] | Token![null] | Token::Ident(_),
| Token::Keyword(Keyword::Misc(MiscKeyword::Null))
| Token::Ident(_),
SchemaParseState::FIELD | SchemaParseState::END_OR_FIELD, SchemaParseState::FIELD | SchemaParseState::END_OR_FIELD,
) => { ) => {
// fine, we found a field. let's see what we've got // fine, we found a field. let's see what we've got
@ -809,7 +798,7 @@ pub(super) fn parse_schema_from_tokens(tok: &[Token]) -> LangResult<(Model, usiz
extract!(tok[0], Token::Ident(ref model_name) => model_name.clone()) extract!(tok[0], Token::Ident(ref model_name) => model_name.clone())
}; };
if l > i && tok[i] == (Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::With))) { if l > i && tok[i] == (Token![with]) {
// we have some more input, and it should be a dict of properties // we have some more input, and it should be a dict of properties
i += 1; // +WITH i += 1; // +WITH
@ -1063,15 +1052,13 @@ pub(super) fn parse_alter_kind_from_tokens(
*current += 2; *current += 2;
let model_name = unsafe { extract!(tok[0], Token::Ident(ref l) => l.clone()) }; let model_name = unsafe { extract!(tok[0], Token::Ident(ref l) => l.clone()) };
match tok[1] { match tok[1] {
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Add)) => alter_add(&tok[1..], current) Token![add] => alter_add(&tok[1..], current)
.map(AlterKind::Add) .map(AlterKind::Add)
.map(|kind| Alter::new(model_name, kind)), .map(|kind| Alter::new(model_name, kind)),
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Remove)) => { Token![remove] => alter_remove(&tok[1..], current)
alter_remove(&tok[1..], current)
.map(AlterKind::Remove) .map(AlterKind::Remove)
.map(|kind| Alter::new(model_name, kind)) .map(|kind| Alter::new(model_name, kind)),
} Token![update] => alter_update(&tok[1..], current)
Token::Keyword(Keyword::Dml(DmlKeyword::Update)) => alter_update(&tok[1..], current)
.map(AlterKind::Update) .map(AlterKind::Update)
.map(|kind| Alter::new(model_name, kind)), .map(|kind| Alter::new(model_name, kind)),
_ => return Err(LangError::ExpectedStatement), _ => return Err(LangError::ExpectedStatement),

@ -364,15 +364,15 @@ mod schema_tests {
} }
#[test] #[test]
fn inspect_model() { fn inspect_model() {
let tok = lex(b"inspect model user").unwrap(); let tok = lex(b"inspect model users").unwrap();
assert_eq!( assert_eq!(
ddl::parse_inspect_full(&tok[1..]).unwrap(), ddl::parse_inspect_full(&tok[1..]).unwrap(),
Statement::InspectModel(Entity::Single("user".into())) Statement::InspectModel(Entity::Single("users".into()))
); );
let tok = lex(b"inspect model tweeter.user").unwrap(); let tok = lex(b"inspect model tweeter.users").unwrap();
assert_eq!( assert_eq!(
ddl::parse_inspect_full(&tok[1..]).unwrap(), ddl::parse_inspect_full(&tok[1..]).unwrap(),
Statement::InspectModel(("tweeter", "user").into()) Statement::InspectModel(("tweeter", "users").into())
); );
} }
#[test] #[test]
@ -616,7 +616,6 @@ mod schema_tests {
} }
mod tymeta { mod tymeta {
use super::*; use super::*;
use crate::engine::ql::lexer::{Keyword, Type};
#[test] #[test]
fn tymeta_mini() { fn tymeta_mini() {
let tok = lex(b"}").unwrap(); let tok = lex(b"}").unwrap();
@ -705,7 +704,7 @@ mod schema_tests {
} }
#[test] #[test]
fn fuzz_tymeta_normal() { fn fuzz_tymeta_normal() {
// { maxlen: 10, unique: true, user: "sayan" } // { maxlen: 10, unique: true, users: "sayan" }
// ^start // ^start
let tok = lex(b" let tok = lex(b"
maxlen: 10, maxlen: 10,
@ -713,7 +712,7 @@ mod schema_tests {
auth: { auth: {
maybe: true\x01 maybe: true\x01
}, },
user: \"sayan\"\x01 users: \"sayan\"\x01
} }
") ")
.unwrap(); .unwrap();
@ -723,7 +722,7 @@ mod schema_tests {
"auth" => nullable_dict! { "auth" => nullable_dict! {
"maybe" => Lit::Bool(true), "maybe" => Lit::Bool(true),
}, },
"user" => Lit::Str("sayan".into()) "users" => Lit::Str("sayan".into())
}; };
fuzz_tokens(&tok, |should_pass, new_src| { fuzz_tokens(&tok, |should_pass, new_src| {
let (ret, dict) = schema::fold_tymeta(&new_src); let (ret, dict) = schema::fold_tymeta(&new_src);
@ -742,7 +741,7 @@ mod schema_tests {
} }
#[test] #[test]
fn fuzz_tymeta_with_ty() { fn fuzz_tymeta_with_ty() {
// list { maxlen: 10, unique: true, type string, user: "sayan" } // list { maxlen: 10, unique: true, type string, users: "sayan" }
// ^start // ^start
let tok = lex(b" let tok = lex(b"
maxlen: 10, maxlen: 10,
@ -751,7 +750,7 @@ mod schema_tests {
maybe: true\x01 maybe: true\x01
}, },
type string, type string,
user: \"sayan\"\x01 users: \"sayan\"\x01
} }
") ")
.unwrap(); .unwrap();
@ -767,7 +766,7 @@ mod schema_tests {
if should_pass { if should_pass {
assert!(ret.is_okay()); assert!(ret.is_okay());
assert!(ret.has_more()); assert!(ret.has_more());
assert!(new_src[ret.pos()] == Token::Keyword(Keyword::TypeId(Type::String))); assert!(new_src[ret.pos()] == Token::Ident("string".into()));
assert_eq!(dict, expected); assert_eq!(dict, expected);
} else if ret.is_okay() { } else if ret.is_okay() {
panic!("Expected failure but passed for token stream: `{:?}`", tok); panic!("Expected failure but passed for token stream: `{:?}`", tok);
@ -777,7 +776,7 @@ mod schema_tests {
} }
mod layer { mod layer {
use super::*; use super::*;
use crate::engine::ql::{lexer::Type, schema::Layer}; use crate::engine::ql::schema::Layer;
#[test] #[test]
fn layer_mini() { fn layer_mini() {
let tok = lex(b"string)").unwrap(); let tok = lex(b"string)").unwrap();
@ -786,7 +785,7 @@ mod schema_tests {
assert!(okay); assert!(okay);
assert_eq!( assert_eq!(
layers, layers,
vec![Layer::new_noreset(Type::String, nullable_dict! {})] vec![Layer::new_noreset("string".into(), nullable_dict! {})]
); );
} }
#[test] #[test]
@ -798,7 +797,7 @@ mod schema_tests {
assert_eq!( assert_eq!(
layers, layers,
vec![Layer::new_noreset( vec![Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"maxlen" => Lit::UnsignedInt(100) "maxlen" => Lit::UnsignedInt(100)
} }
@ -814,8 +813,8 @@ mod schema_tests {
assert_eq!( assert_eq!(
layers, layers,
vec![ vec![
Layer::new_noreset(Type::String, nullable_dict! {}), Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset(Type::List, nullable_dict! {}) Layer::new_noreset("list".into(), nullable_dict! {})
] ]
); );
} }
@ -828,9 +827,9 @@ mod schema_tests {
assert_eq!( assert_eq!(
layers, layers,
vec![ vec![
Layer::new_noreset(Type::String, nullable_dict! {}), Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true), "unique" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(10), "maxlen" => Lit::UnsignedInt(10),
@ -852,14 +851,14 @@ mod schema_tests {
layers, layers,
vec![ vec![
Layer::new_noreset( Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"ascii_only" => Lit::Bool(true), "ascii_only" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(255) "maxlen" => Lit::UnsignedInt(255)
} }
), ),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true), "unique" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(10), "maxlen" => Lit::UnsignedInt(10),
@ -882,14 +881,14 @@ mod schema_tests {
") ")
.unwrap(); .unwrap();
let expected = vec![ let expected = vec![
Layer::new_noreset(Type::String, nullable_dict!()), Layer::new_noreset("string".into(), nullable_dict!()),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"maxlen" => Lit::UnsignedInt(100), "maxlen" => Lit::UnsignedInt(100),
}, },
), ),
Layer::new_noreset(Type::List, nullable_dict!("unique" => Lit::Bool(true))), Layer::new_noreset("list".into(), nullable_dict!("unique" => Lit::Bool(true))),
]; ];
fuzz_tokens(&tok, |should_pass, new_tok| { fuzz_tokens(&tok, |should_pass, new_tok| {
let (layers, c, okay) = schema::fold_layers(&new_tok); let (layers, c, okay) = schema::fold_layers(&new_tok);
@ -935,10 +934,7 @@ mod schema_tests {
mod fields { mod fields {
use { use {
super::*, super::*,
crate::engine::ql::{ crate::engine::ql::schema::{Field, Layer},
lexer::Type,
schema::{Field, Layer},
},
}; };
#[test] #[test]
fn field_mini() { fn field_mini() {
@ -952,7 +948,7 @@ mod schema_tests {
f, f,
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
props: set![], props: set![],
} }
) )
@ -969,7 +965,7 @@ mod schema_tests {
f, f,
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
props: set!["primary"], props: set!["primary"],
} }
) )
@ -990,7 +986,7 @@ mod schema_tests {
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: [Layer::new_noreset( layers: [Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"maxlen" => Lit::UnsignedInt(10), "maxlen" => Lit::UnsignedInt(10),
"ascii_only" => Lit::Bool(true), "ascii_only" => Lit::Bool(true),
@ -1021,14 +1017,14 @@ mod schema_tests {
field_name: "notes".into(), field_name: "notes".into(),
layers: [ layers: [
Layer::new_noreset( Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"maxlen" => Lit::UnsignedInt(255), "maxlen" => Lit::UnsignedInt(255),
"ascii_only" => Lit::Bool(true), "ascii_only" => Lit::Bool(true),
} }
), ),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true) "unique" => Lit::Bool(true)
} }
@ -1041,10 +1037,7 @@ mod schema_tests {
} }
} }
mod schemas { mod schemas {
use crate::engine::ql::{ use crate::engine::ql::schema::{Field, Layer, Model};
lexer::Type,
schema::{Field, Layer, Model},
};
use super::*; use super::*;
#[test] #[test]
@ -1068,12 +1061,12 @@ mod schema_tests {
fields: vec![ fields: vec![
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"] props: set!["primary"]
}, },
Field { Field {
field_name: "password".into(), field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![] props: set![]
} }
], ],
@ -1103,17 +1096,17 @@ mod schema_tests {
fields: vec![ fields: vec![
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"] props: set!["primary"]
}, },
Field { Field {
field_name: "password".into(), field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![] props: set![]
}, },
Field { Field {
field_name: "profile_pic".into(), field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"] props: set!["null"]
} }
], ],
@ -1148,25 +1141,25 @@ mod schema_tests {
fields: vec![ fields: vec![
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"] props: set!["primary"]
}, },
Field { Field {
field_name: "password".into(), field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![] props: set![]
}, },
Field { Field {
field_name: "profile_pic".into(), field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"] props: set!["null"]
}, },
Field { Field {
field_name: "notes".into(), field_name: "notes".into(),
layers: vec![ layers: vec![
Layer::new_noreset(Type::String, nullable_dict! {}), Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true) "unique" => Lit::Bool(true)
} }
@ -1211,25 +1204,25 @@ mod schema_tests {
fields: vec![ fields: vec![
Field { Field {
field_name: "username".into(), field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"] props: set!["primary"]
}, },
Field { Field {
field_name: "password".into(), field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![] props: set![]
}, },
Field { Field {
field_name: "profile_pic".into(), field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})], layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"] props: set!["null"]
}, },
Field { Field {
field_name: "notes".into(), field_name: "notes".into(),
layers: vec![ layers: vec![
Layer::new_noreset(Type::String, nullable_dict! {}), Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true) "unique" => Lit::Bool(true)
} }
@ -1250,10 +1243,7 @@ mod schema_tests {
} }
mod dict_field_syntax { mod dict_field_syntax {
use super::*; use super::*;
use crate::engine::ql::{ use crate::engine::ql::schema::{ExpandedField, Layer};
lexer::Type,
schema::{ExpandedField, Layer},
};
#[test] #[test]
fn field_syn_mini() { fn field_syn_mini() {
let tok = lex(b"username { type string }").unwrap(); let tok = lex(b"username { type string }").unwrap();
@ -1263,7 +1253,7 @@ mod schema_tests {
ef, ef,
ExpandedField { ExpandedField {
field_name: "username".into(), field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: nullable_dict! {}, props: nullable_dict! {},
reset: false reset: false
} }
@ -1287,7 +1277,7 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(false), "nullable" => Lit::Bool(false),
}, },
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})], layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
reset: false reset: false
} }
); );
@ -1316,7 +1306,7 @@ mod schema_tests {
"jingle_bells" => Lit::Str("snow".into()), "jingle_bells" => Lit::Str("snow".into()),
}, },
layers: vec![Layer::new_noreset( layers: vec![Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"minlen" => Lit::UnsignedInt(6), "minlen" => Lit::UnsignedInt(6),
"maxlen" => Lit::UnsignedInt(255), "maxlen" => Lit::UnsignedInt(255),
@ -1353,13 +1343,13 @@ mod schema_tests {
}, },
layers: vec![ layers: vec![
Layer::new_noreset( Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"ascii_only" => Lit::Bool(true), "ascii_only" => Lit::Bool(true),
} }
), ),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true), "unique" => Lit::Bool(true),
} }
@ -1410,10 +1400,7 @@ mod schema_tests {
} }
mod alter_model_add { mod alter_model_add {
use super::*; use super::*;
use crate::engine::ql::{ use crate::engine::ql::schema::{ExpandedField, Layer};
lexer::Type,
schema::{ExpandedField, Layer},
};
#[test] #[test]
fn add_mini() { fn add_mini() {
let tok = lex(b" let tok = lex(b"
@ -1428,7 +1415,7 @@ mod schema_tests {
[ExpandedField { [ExpandedField {
field_name: "myfield".into(), field_name: "myfield".into(),
props: nullable_dict! {}, props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false reset: false
}] }]
); );
@ -1449,7 +1436,7 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false reset: false
}] }]
); );
@ -1470,7 +1457,7 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false reset: false
}] }]
); );
@ -1506,7 +1493,7 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false reset: false
}, },
ExpandedField { ExpandedField {
@ -1516,13 +1503,13 @@ mod schema_tests {
}, },
layers: [ layers: [
Layer::new_noreset( Layer::new_noreset(
Type::String, "string".into(),
nullable_dict! { nullable_dict! {
"maxlen" => Lit::UnsignedInt(255) "maxlen" => Lit::UnsignedInt(255)
} }
), ),
Layer::new_noreset( Layer::new_noreset(
Type::List, "list".into(),
nullable_dict! { nullable_dict! {
"unique" => Lit::Bool(true) "unique" => Lit::Bool(true)
}, },
@ -1536,12 +1523,9 @@ mod schema_tests {
} }
} }
mod alter_model_update { mod alter_model_update {
use crate::engine::ql::{
lexer::Type,
schema::{ExpandedField, Layer},
};
use super::*; use super::*;
use crate::engine::ql::schema::{ExpandedField, Layer};
#[test] #[test]
fn alter_mini() { fn alter_mini() {
let tok = lex(b" let tok = lex(b"
@ -1556,7 +1540,7 @@ mod schema_tests {
[ExpandedField { [ExpandedField {
field_name: "myfield".into(), field_name: "myfield".into(),
props: nullable_dict! {}, props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
}] }]
); );
@ -1575,7 +1559,7 @@ mod schema_tests {
[ExpandedField { [ExpandedField {
field_name: "myfield".into(), field_name: "myfield".into(),
props: nullable_dict! {}, props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
}] }]
); );
@ -1602,7 +1586,7 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
}] }]
); );
@ -1634,13 +1618,13 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
}, },
ExpandedField { ExpandedField {
field_name: "myfield2".into(), field_name: "myfield2".into(),
props: nullable_dict! {}, props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
} }
] ]
@ -1676,14 +1660,14 @@ mod schema_tests {
props: nullable_dict! { props: nullable_dict! {
"nullable" => Lit::Bool(true) "nullable" => Lit::Bool(true)
}, },
layers: [Layer::new_reset(Type::String, nullable_dict! {})].into(), layers: [Layer::new_reset("string".into(), nullable_dict! {})].into(),
reset: true reset: true
}, },
ExpandedField { ExpandedField {
field_name: "myfield2".into(), field_name: "myfield2".into(),
props: nullable_dict! {}, props: nullable_dict! {},
layers: [Layer::new_reset( layers: [Layer::new_reset(
Type::String, "string".into(),
nullable_dict! {"maxlen" => Lit::UnsignedInt(255)} nullable_dict! {"maxlen" => Lit::UnsignedInt(255)}
)] )]
.into(), .into(),
@ -1959,13 +1943,13 @@ mod dml_tests {
#[test] #[test]
fn insert_tuple_mini() { fn insert_tuple_mini() {
let x = lex(br#" let x = lex(br#"
insert twitter.user:"sayan" () insert twitter.users:"sayan" ()
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap(); let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()), primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "user".into()), entity: Entity::Full("twitter".into(), "users".into()),
data: vec![].into(), data: vec![].into(),
}; };
assert_eq!(e, r); assert_eq!(e, r);
@ -2111,13 +2095,13 @@ mod dml_tests {
#[test] #[test]
fn select_mini() { fn select_mini() {
let tok = lex(br#" let tok = lex(br#"
select * from user:"sayan" select * from users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()), primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("user".into()), entity: Entity::Single("users".into()),
fields: [].to_vec(), fields: [].to_vec(),
wildcard: true, wildcard: true,
}; };
@ -2126,13 +2110,13 @@ mod dml_tests {
#[test] #[test]
fn select() { fn select() {
let tok = lex(br#" let tok = lex(br#"
select field1 from user:"sayan" select field1 from users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()), primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("user".into()), entity: Entity::Single("users".into()),
fields: ["field1".into()].to_vec(), fields: ["field1".into()].to_vec(),
wildcard: false, wildcard: false,
}; };
@ -2141,13 +2125,13 @@ mod dml_tests {
#[test] #[test]
fn select_pro() { fn select_pro() {
let tok = lex(br#" let tok = lex(br#"
select field1 from twitter.user:"sayan" select field1 from twitter.users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()), primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "user".into()), entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into()].to_vec(), fields: ["field1".into()].to_vec(),
wildcard: false, wildcard: false,
}; };
@ -2156,13 +2140,13 @@ mod dml_tests {
#[test] #[test]
fn select_pro_max() { fn select_pro_max() {
let tok = lex(br#" let tok = lex(br#"
select field1, field2 from twitter.user:"sayan" select field1, field2 from twitter.users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()), primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "user".into()), entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into(), "field2".into()].to_vec(), fields: ["field1".into(), "field2".into()].to_vec(),
wildcard: false, wildcard: false,
}; };
@ -2305,22 +2289,22 @@ mod dml_tests {
#[test] #[test]
fn delete_mini() { fn delete_mini() {
let tok = lex(br#" let tok = lex(br#"
delete user:"sayan" delete users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let primary_key = "sayan".into(); let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, Entity::Single("user".into())); let e = DeleteStatement::new(&primary_key, Entity::Single("users".into()));
let r = dml::parse_delete_full(&tok[1..]).unwrap(); let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn delete() { fn delete() {
let tok = lex(br#" let tok = lex(br#"
delete twitter.user:"sayan" delete twitter.users:"sayan"
"#) "#)
.unwrap(); .unwrap();
let primary_key = "sayan".into(); let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, ("twitter", "user").into()); let e = DeleteStatement::new(&primary_key, ("twitter", "users").into());
let r = dml::parse_delete_full(&tok[1..]).unwrap(); let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e); assert_eq!(r, e);
} }

Loading…
Cancel
Save