Add new keywords

next
Sayan Nandan 2 years ago
parent 7e5e2838cc
commit cac7bd4860
No known key found for this signature in database
GPG Key ID: 8BC07A0A4D41DD52

@ -239,7 +239,7 @@ impl<'a> Compiler<'a> {
Some(Token![create]) => self.create0(),
Some(Token![drop]) => self.drop0(),
Some(Token![alter]) => self.alter0(),
Some(Token![inspect]) => self.inspect0(),
Some(Token![describe]) => self.inspect0(),
Some(Token![use]) => self.use0(),
_ => Err(LangError::ExpectedStatement),
}

@ -63,6 +63,86 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
is_full | is_single
}
/*
Contexts
*/
#[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> {
pub(super) lhs: RawSlice,
pub(super) rhs: &'a Lit,
pub(super) opc: u8,
}
impl<'a> RelationalExpr<'a> {
pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3;
pub(super) const OP_GE: u8 = 4;
pub(super) const OP_LT: u8 = 5;
pub(super) const OP_LE: u8 = 6;
#[inline(always)]
fn parse_operator(tok: &[Token], i: &mut usize, okay: &mut bool) -> u8 {
/*
FIXME(@ohsayan): This is relatively messy right now, but does the job. Will
re-implement later.
*/
#[inline(always)]
fn u(b: bool) -> u8 {
b as _
}
let op_eq = u(tok[0] == Token![=]) * Self::OP_EQ;
let op_ne = u(tok[0] == Token![!] && tok[1] == Token![=]) * Self::OP_NE;
let op_ge = u(tok[0] == Token![>] && tok[1] == Token![=]) * Self::OP_GE;
let op_gt = u(tok[0] == Token![>] && op_ge == 0) * Self::OP_GT;
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0;
*i += 1 + (opc & 1 == 0) as usize;
opc
}
#[inline(always)]
fn try_parse(tok: &'a [Token], cnt: &mut usize) -> Option<Self> {
/*
Minimum length of an expression:
[lhs] [operator] [rhs]
*/
let mut okay = tok.len() >= 3;
let mut i = 0_usize;
if compiler::unlikely(!okay) {
return None;
}
okay &= tok[0].is_ident();
i += 1;
// let's get ourselves the operator
let operator = Self::parse_operator(&tok[1..], &mut i, &mut okay);
okay &= i < tok.len();
okay &= tok[tok.len() - 1].is_lit(); // LOL, I really like saving cycles
*cnt += i + okay as usize;
if compiler::likely(okay) {
Some(unsafe {
Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.clone()),
rhs: extract!(tok[tok.len() - 1], Token::Lit(ref l) => l),
opc: operator,
}
})
} else {
compiler::cold_err(None)
}
}
}
#[cfg(test)]
#[inline(always)]
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i);
okay
}
/*
Impls for insert
*/
@ -672,79 +752,3 @@ pub(super) fn parse_delete_full<'a>(tok: &'a [Token]) -> LangResult<DeleteStatem
full_tt!(i, tok.len());
r
}
#[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> {
pub(super) lhs: RawSlice,
pub(super) rhs: &'a Lit,
pub(super) opc: u8,
}
impl<'a> RelationalExpr<'a> {
pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3;
pub(super) const OP_GE: u8 = 4;
pub(super) const OP_LT: u8 = 5;
pub(super) const OP_LE: u8 = 6;
#[inline(always)]
fn parse_operator(tok: &[Token], i: &mut usize, okay: &mut bool) -> u8 {
/*
FIXME(@ohsayan): This is relatively messy right now, but does the job. Will
re-implement later.
*/
#[inline(always)]
fn u(b: bool) -> u8 {
b as _
}
let op_eq = u(tok[0] == Token![=]) * Self::OP_EQ;
let op_ne = u(tok[0] == Token![!] && tok[1] == Token![=]) * Self::OP_NE;
let op_ge = u(tok[0] == Token![>] && tok[1] == Token![=]) * Self::OP_GE;
let op_gt = u(tok[0] == Token![>] && op_ge == 0) * Self::OP_GT;
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0;
*i += 1 + (opc & 1 == 0) as usize;
opc
}
#[inline(always)]
fn try_parse(tok: &'a [Token], cnt: &mut usize) -> Option<Self> {
/*
Minimum length of an expression:
[lhs] [operator] [rhs]
*/
let mut okay = tok.len() >= 3;
let mut i = 0_usize;
if compiler::unlikely(!okay) {
return None;
}
okay &= tok[0].is_ident();
i += 1;
// let's get ourselves the operator
let operator = Self::parse_operator(&tok[1..], &mut i, &mut okay);
okay &= i < tok.len();
okay &= tok[tok.len() - 1].is_lit(); // LOL, I really like saving cycles
*cnt += i + okay as usize;
if compiler::likely(okay) {
Some(unsafe {
Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.clone()),
rhs: extract!(tok[tok.len() - 1], Token::Lit(ref l) => l),
opc: operator,
}
})
} else {
compiler::cold_err(None)
}
}
}
#[cfg(test)]
#[inline(always)]
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i);
okay
}

@ -57,7 +57,7 @@ impl PartialEq<Symbol> for Token {
assertions! {
size_of::<Token>() == 24, // FIXME(@ohsayan): Damn, what?
size_of::<Symbol>() == 1,
size_of::<Keyword>() == 2,
size_of::<Keyword>() == 1,
size_of::<Lit>() == 24, // FIXME(@ohsayan): Ouch
}
@ -130,93 +130,69 @@ pub enum Symbol {
SymAccent, // `
}
#[derive(Debug, Copy, Clone, PartialEq)]
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
#[repr(u8)]
pub enum Keyword {
Ddl(DdlKeyword),
DdlMisc(DdlMiscKeyword),
Dml(DmlKeyword),
DmlMisc(DmlMiscKeyword),
TypeId(Type),
Misc(MiscKeyword),
}
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum MiscKeyword {
Null,
}
enum_impls! {
Keyword => {
DdlKeyword as Ddl,
DdlMiscKeyword as DdlMisc,
DmlKeyword as Dml,
DmlMiscKeyword as DmlMisc,
Type as TypeId,
MiscKeyword as Misc,
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DmlMiscKeyword {
Limit,
From,
Into,
Where,
If,
And,
As,
By,
Asc,
Desc,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DmlKeyword {
Insert,
Select,
Update,
Delete,
Exists,
Truncate,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DdlMiscKeyword {
With,
Add,
Remove,
Sort,
Table,
Model,
Space,
Index,
Type,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum DdlKeyword {
Function,
Use,
Create,
Alter,
Drop,
Inspect,
Model,
Space,
Describe,
Truncate,
Rename,
Add,
Remove,
Transform,
Order,
By,
Primary,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(u8)]
pub enum Type {
String,
Binary,
List,
Map,
Bool,
Int,
Double,
Float,
Key,
Value,
With,
On,
Lock,
All,
Insert,
Select,
Exists,
Update,
Delere,
Into,
From,
As,
Return,
Sort,
Group,
Limit,
Asc,
Desc,
To,
Set,
Auto,
Default,
In,
Of,
Transaction,
Batch,
Read,
Write,
Begin,
End,
Where,
If,
And,
Or,
Not,
User,
Revoke,
Null,
Infinity,
}
/*
@ -241,7 +217,7 @@ static SYM_GRAPH: [u8; 69] = [
23, 7, 0, 27, 0, 4, 16, 11, 0, 0, 9,
];
static SYM_DATA: [(u8, Symbol); 32] = [
static SYM_LUT: [(u8, Symbol); 32] = [
(b'+', Symbol::OpArithmeticAdd),
(b'-', Symbol::OpArithmeticSub),
(b'*', Symbol::OpArithmeticMul),
@ -290,85 +266,107 @@ fn symph(k: u8) -> u8 {
#[inline(always)]
fn symof(sym: u8) -> Option<Symbol> {
let hf = symph(sym);
if hf < SYM_DATA.len() as u8 && SYM_DATA[hf as usize].0 == sym {
Some(SYM_DATA[hf as usize].1)
if hf < SYM_LUT.len() as u8 && SYM_LUT[hf as usize].0 == sym {
Some(SYM_LUT[hf as usize].1)
} else {
None
}
}
static KW_GRAPH: [u8; 40] = [
0, 2, 32, 18, 4, 37, 11, 27, 34, 35, 26, 33, 0, 0, 10, 2, 22, 8, 5, 7, 16, 9, 8, 39, 21, 5, 0,
22, 14, 19, 22, 31, 28, 38, 26, 21, 30, 24, 10, 18,
static KW_LUT: [(&[u8], Keyword); 60] = [
(b"table", Keyword::Table),
(b"model", Keyword::Model),
(b"space", Keyword::Space),
(b"index", Keyword::Index),
(b"type", Keyword::Type),
(b"function", Keyword::Function),
(b"use", Keyword::Use),
(b"create", Keyword::Create),
(b"alter", Keyword::Alter),
(b"drop", Keyword::Drop),
(b"describe", Keyword::Describe),
(b"truncate", Keyword::Truncate),
(b"rename", Keyword::Rename),
(b"add", Keyword::Add),
(b"remove", Keyword::Remove),
(b"transform", Keyword::Transform),
(b"order", Keyword::Order),
(b"by", Keyword::By),
(b"primary", Keyword::Primary),
(b"key", Keyword::Key),
(b"value", Keyword::Value),
(b"with", Keyword::With),
(b"on", Keyword::On),
(b"lock", Keyword::Lock),
(b"all", Keyword::All),
(b"insert", Keyword::Insert),
(b"select", Keyword::Select),
(b"exists", Keyword::Exists),
(b"update", Keyword::Update),
(b"delere", Keyword::Delere),
(b"into", Keyword::Into),
(b"from", Keyword::From),
(b"as", Keyword::As),
(b"return", Keyword::Return),
(b"sort", Keyword::Sort),
(b"group", Keyword::Group),
(b"limit", Keyword::Limit),
(b"asc", Keyword::Asc),
(b"desc", Keyword::Desc),
(b"to", Keyword::To),
(b"set", Keyword::Set),
(b"auto", Keyword::Auto),
(b"default", Keyword::Default),
(b"in", Keyword::In),
(b"of", Keyword::Of),
(b"transaction", Keyword::Transaction),
(b"batch", Keyword::Batch),
(b"read", Keyword::Read),
(b"write", Keyword::Write),
(b"begin", Keyword::Begin),
(b"end", Keyword::End),
(b"where", Keyword::Where),
(b"if", Keyword::If),
(b"and", Keyword::And),
(b"or", Keyword::Or),
(b"not", Keyword::Not),
(b"user", Keyword::User),
(b"revoke", Keyword::Revoke),
(b"null", Keyword::Null),
(b"infinity", Keyword::Infinity),
];
static KW_DATA: [(&str, Keyword); 38] = [
("use", Keyword::Ddl(DdlKeyword::Use)),
("create", Keyword::Ddl(DdlKeyword::Create)),
("alter", Keyword::Ddl(DdlKeyword::Alter)),
("drop", Keyword::Ddl(DdlKeyword::Drop)),
("inspect", Keyword::Ddl(DdlKeyword::Inspect)),
("model", Keyword::Ddl(DdlKeyword::Model)),
("space", Keyword::Ddl(DdlKeyword::Space)),
("primary", Keyword::Ddl(DdlKeyword::Primary)),
("with", Keyword::DdlMisc(DdlMiscKeyword::With)),
("add", Keyword::DdlMisc(DdlMiscKeyword::Add)),
("remove", Keyword::DdlMisc(DdlMiscKeyword::Remove)),
("sort", Keyword::DdlMisc(DdlMiscKeyword::Sort)),
("type", Keyword::DdlMisc(DdlMiscKeyword::Type)),
("insert", Keyword::Dml(DmlKeyword::Insert)),
("select", Keyword::Dml(DmlKeyword::Select)),
("update", Keyword::Dml(DmlKeyword::Update)),
("delete", Keyword::Dml(DmlKeyword::Delete)),
("exists", Keyword::Dml(DmlKeyword::Exists)),
("truncate", Keyword::Dml(DmlKeyword::Truncate)),
("limit", Keyword::DmlMisc(DmlMiscKeyword::Limit)),
("from", Keyword::DmlMisc(DmlMiscKeyword::From)),
("into", Keyword::DmlMisc(DmlMiscKeyword::Into)),
("where", Keyword::DmlMisc(DmlMiscKeyword::Where)),
("if", Keyword::DmlMisc(DmlMiscKeyword::If)),
("and", Keyword::DmlMisc(DmlMiscKeyword::And)),
("as", Keyword::DmlMisc(DmlMiscKeyword::As)),
("by", Keyword::DmlMisc(DmlMiscKeyword::By)),
("asc", Keyword::DmlMisc(DmlMiscKeyword::Asc)),
("desc", Keyword::DmlMisc(DmlMiscKeyword::Desc)),
("string", Keyword::TypeId(Type::String)),
("binary", Keyword::TypeId(Type::Binary)),
("list", Keyword::TypeId(Type::List)),
("map", Keyword::TypeId(Type::Map)),
("bool", Keyword::TypeId(Type::Bool)),
("int", Keyword::TypeId(Type::Int)),
("double", Keyword::TypeId(Type::Double)),
("float", Keyword::TypeId(Type::Float)),
("null", Keyword::Misc(MiscKeyword::Null)),
static KWG: [u8; 64] = [
0, 55, 32, 25, 4, 21, 51, 43, 28, 59, 34, 1, 9, 39, 5, 49, 0, 16, 29, 0, 48, 0, 17, 60, 19, 21,
26, 18, 0, 41, 55, 10, 48, 62, 55, 35, 56, 18, 29, 41, 5, 46, 25, 52, 32, 26, 27, 17, 61, 60,
61, 59, 24, 12, 17, 30, 53, 4, 17, 0, 6, 2, 45, 56,
];
const KW_MAGIC_A: &[u8] = b"GSggb8qI";
const KW_MAGIC_B: &[u8] = b"ZaljIeOx";
const KW_MODULUS: usize = 8;
const KWMG_1: [u8; 11] = *b"nJEcjrLflKX";
const KWMG_2: [u8; 11] = *b"KWHPUPK3Fh3";
const KWMG_S: usize = KWMG_1.len();
#[inline(always)]
fn kwfh(k: &[u8], magic: &[u8]) -> u32 {
fn kwhf(k: &[u8], mg: &[u8]) -> u32 {
let mut i = 0;
let mut s = 0;
while i < k.len() {
s += magic[(i % KW_MODULUS) as usize] as u32 * k[i] as u32;
s += mg[(i % KWMG_S) as usize] as u32 * k[i] as u32;
i += 1;
}
s % KW_GRAPH.len() as u32
s % KWG.len() as u32
}
#[inline(always)]
fn kwph(k: &[u8]) -> u8 {
(KW_GRAPH[kwfh(k, KW_MAGIC_A) as usize] + KW_GRAPH[kwfh(k, KW_MAGIC_B) as usize])
% KW_GRAPH.len() as u8
(KWG[kwhf(k, &KWMG_1) as usize] + KWG[kwhf(k, &KWMG_2) as usize]) % KWG.len() as u8
}
#[inline(always)]
fn kwof(key: &str) -> Option<Keyword> {
let ph = kwph(key.as_bytes());
if ph < KW_DATA.len() as u8 && KW_DATA[ph as usize].0 == key {
Some(KW_DATA[ph as usize].1)
let key = key.as_bytes();
let ph = kwph(key);
if ph < KW_LUT.len() as u8 && KW_LUT[ph as usize].0 == key {
Some(KW_LUT[ph as usize].1)
} else {
None
}
@ -700,10 +698,6 @@ impl Token {
matches!(self, Token::Ident(_))
}
#[inline(always)]
pub(crate) const fn is_typeid(&self) -> bool {
matches!(self, Token::Keyword(Keyword::TypeId(_)))
}
#[inline(always)]
pub(crate) fn as_ident_eq_ignore_case(&self, arg: &[u8]) -> bool {
self.is_ident()
&& unsafe {

@ -37,51 +37,9 @@ macro_rules! __sym_token {
};
}
macro_rules! __ddl_token {
macro_rules! __kw {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Ddl(
$crate::engine::ql::lexer::DdlKeyword::$ident,
))
};
}
macro_rules! __ddl_misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::DdlMisc(
$crate::engine::ql::lexer::DdlMiscKeyword::$ident,
))
};
}
macro_rules! __dml_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Dml(
$crate::engine::ql::lexer::DmlKeyword::$ident,
))
};
}
macro_rules! __dml_misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::DmlMisc(
$crate::engine::ql::lexer::DmlMiscKeyword::$ident,
))
};
}
macro_rules! __type_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::TypeId(
$crate::engine::ql::lexer::Type::$ident,
))
};
}
macro_rules! __misc_token {
($ident:ident) => {
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::Misc(
$crate::engine::ql::lexer::MiscKeyword::$ident,
))
$crate::engine::ql::lexer::Token::Keyword($crate::engine::ql::lexer::Keyword::$ident)
};
}
@ -163,119 +121,119 @@ macro_rules! Token {
};
// ddl keywords
(use) => {
__ddl_token!(Use)
__kw!(Use)
};
(create) => {
__ddl_token!(Create)
__kw!(Create)
};
(alter) => {
__ddl_token!(Alter)
__kw!(Alter)
};
(drop) => {
__ddl_token!(Drop)
__kw!(Drop)
};
(inspect) => {
__ddl_token!(Inspect)
(describe) => {
__kw!(Describe)
};
(model) => {
__ddl_token!(Model)
__kw!(Model)
};
(space) => {
__ddl_token!(Space)
__kw!(Space)
};
(primary) => {
__ddl_token!(Primary)
__kw!(Primary)
};
// ddl misc
(with) => {
__ddl_misc_token!(With)
__kw!(With)
};
(add) => {
__ddl_misc_token!(Add)
__kw!(Add)
};
(remove) => {
__ddl_misc_token!(Remove)
__kw!(Remove)
};
(sort) => {
__ddl_misc_token!(Sort)
__kw!(Sort)
};
(type) => {
__ddl_misc_token!(Type)
__kw!(Type)
};
// dml
(insert) => {
__dml_token!(Insert)
__kw!(Insert)
};
(select) => {
__dml_token!(Select)
__kw!(Select)
};
(update) => {
__dml_token!(Update)
__kw!(Update)
};
(delete) => {
__dml_token!(Delete)
__kw!(Delete)
};
(exists) => {
__dml_token!(Exists)
__kw!(Exists)
};
(truncate) => {
__dml_token!(Truncate)
__kw!(Truncate)
};
// dml misc
(limit) => {
__dml_misc_token!(Limit)
__kw!(Limit)
};
(from) => {
__dml_misc_token!(From)
__kw!(From)
};
(into) => {
__dml_misc_token!(Into)
__kw!(Into)
};
(where) => {
__dml_misc_token!(Where)
__kw!(Where)
};
(if) => {
__dml_misc_token!(If)
__kw!(If)
};
(and) => {
__dml_misc_token!(And)
__kw!(And)
};
(as) => {
__dml_misc_token!(As)
__kw!(As)
};
(by) => {
__dml_misc_token!(By)
__kw!(By)
};
(asc) => {
__dml_misc_token!(Asc)
__kw!(Asc)
};
(desc) => {
__dml_misc_token!(Desc)
__kw!(Desc)
};
// types
(string) => {
__type_token!(String)
__kw!(String)
};
(binary) => {
__type_token!(Binary)
__kw!(Binary)
};
(list) => {
__type_token!(List)
__kw!(List)
};
(map) => {
__type_token!(Map)
__kw!(Map)
};
(bool) => {
__type_token!(Bool)
__kw!(Bool)
};
(int) => {
__type_token!(Int)
__kw!(Int)
};
(double) => {
__type_token!(Double)
__kw!(Double)
};
(float) => {
__type_token!(Float)
__kw!(Float)
};
// tt
(open {}) => {
@ -298,7 +256,7 @@ macro_rules! Token {
};
// misc
(null) => {
__misc_token!(Null)
__kw!(Null)
};
}

@ -46,9 +46,7 @@
use {
super::{
lexer::{
DdlKeyword, DdlMiscKeyword, DmlKeyword, Keyword, Lit, MiscKeyword, Symbol, Token, Type,
},
lexer::{Lit, Symbol, Token},
LangError, LangResult, RawSlice,
},
crate::util::MaybeInit,
@ -110,22 +108,22 @@ pub type Dict = HashMap<String, Option<DictEntry>>;
#[derive(Debug, PartialEq)]
/// A layer contains a type and corresponding metadata
pub struct Layer {
ty: Type,
ty: RawSlice,
props: Dict,
reset: bool,
}
impl Layer {
//// Create a new layer
pub(super) const fn new(ty: Type, props: Dict, reset: bool) -> Self {
pub(super) const fn new(ty: RawSlice, props: Dict, reset: bool) -> Self {
Self { ty, props, reset }
}
/// Create a new layer that doesn't have any reset
pub(super) const fn new_noreset(ty: Type, props: Dict) -> Self {
pub(super) const fn new_noreset(ty: RawSlice, props: Dict) -> Self {
Self::new(ty, props, false)
}
/// Create a new layer that adds a reset
pub(super) const fn new_reset(ty: Type, props: Dict) -> Self {
pub(super) const fn new_reset(ty: RawSlice, props: Dict) -> Self {
Self::new(ty, props, true)
}
}
@ -430,10 +428,7 @@ pub(super) fn rfold_tymeta<const ALLOW_RESET: bool>(
let mut tmp = MaybeInit::uninit();
while r.pos() < l && r.is_okay() {
match (&tok[r.pos()], state) {
(
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Type)),
TyMetaFoldState::IDENT_OR_CB,
) => {
(Token![type], TyMetaFoldState::IDENT_OR_CB) => {
// we were expecting an ident but found the type keyword! increase depth
r.incr();
r.set_has_more();
@ -573,10 +568,10 @@ pub(super) fn rfold_layers<const ALLOW_RESET: bool>(
let mut dict = Dict::new();
while i < l && okay {
match (&tok[i], state) {
(Token::Keyword(Keyword::TypeId(ty)), LayerFoldState::TY) => {
(Token::Ident(ty), LayerFoldState::TY) => {
i += 1;
// expecting type, and found type. next is either end or an open brace or some arbitrary token
tmp = MaybeInit::new(ty);
tmp = MaybeInit::new(ty.clone());
state = LayerFoldState::END_OR_OB;
}
(Token::Symbol(Symbol::TtOpenBrace), LayerFoldState::END_OR_OB) => {
@ -670,12 +665,8 @@ pub(super) fn collect_field_properties(tok: &[Token]) -> (FieldProperties, u64)
let mut okay = true;
while i < tok.len() {
match &tok[i] {
Token::Keyword(Keyword::Ddl(DdlKeyword::Primary)) => {
okay &= props.properties.insert(FieldProperties::PRIMARY)
}
Token::Keyword(Keyword::Misc(MiscKeyword::Null)) => {
okay &= props.properties.insert(FieldProperties::NULL)
}
Token![primary] => okay &= props.properties.insert(FieldProperties::PRIMARY),
Token![null] => okay &= props.properties.insert(FieldProperties::NULL),
Token::Ident(_) => break,
_ => {
// we could pass this over to the caller, but it's better if we do it since we're doing
@ -768,9 +759,7 @@ pub(super) fn parse_schema_from_tokens(tok: &[Token]) -> LangResult<(Model, usiz
state = SchemaParseState::FIELD;
}
(
Token::Keyword(Keyword::Ddl(DdlKeyword::Primary))
| Token::Keyword(Keyword::Misc(MiscKeyword::Null))
| Token::Ident(_),
Token![primary] | Token![null] | Token::Ident(_),
SchemaParseState::FIELD | SchemaParseState::END_OR_FIELD,
) => {
// fine, we found a field. let's see what we've got
@ -809,7 +798,7 @@ pub(super) fn parse_schema_from_tokens(tok: &[Token]) -> LangResult<(Model, usiz
extract!(tok[0], Token::Ident(ref model_name) => model_name.clone())
};
if l > i && tok[i] == (Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::With))) {
if l > i && tok[i] == (Token![with]) {
// we have some more input, and it should be a dict of properties
i += 1; // +WITH
@ -1063,15 +1052,13 @@ pub(super) fn parse_alter_kind_from_tokens(
*current += 2;
let model_name = unsafe { extract!(tok[0], Token::Ident(ref l) => l.clone()) };
match tok[1] {
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Add)) => alter_add(&tok[1..], current)
Token![add] => alter_add(&tok[1..], current)
.map(AlterKind::Add)
.map(|kind| Alter::new(model_name, kind)),
Token::Keyword(Keyword::DdlMisc(DdlMiscKeyword::Remove)) => {
alter_remove(&tok[1..], current)
.map(AlterKind::Remove)
.map(|kind| Alter::new(model_name, kind))
}
Token::Keyword(Keyword::Dml(DmlKeyword::Update)) => alter_update(&tok[1..], current)
Token![remove] => alter_remove(&tok[1..], current)
.map(AlterKind::Remove)
.map(|kind| Alter::new(model_name, kind)),
Token![update] => alter_update(&tok[1..], current)
.map(AlterKind::Update)
.map(|kind| Alter::new(model_name, kind)),
_ => return Err(LangError::ExpectedStatement),

@ -364,15 +364,15 @@ mod schema_tests {
}
#[test]
fn inspect_model() {
let tok = lex(b"inspect model user").unwrap();
let tok = lex(b"inspect model users").unwrap();
assert_eq!(
ddl::parse_inspect_full(&tok[1..]).unwrap(),
Statement::InspectModel(Entity::Single("user".into()))
Statement::InspectModel(Entity::Single("users".into()))
);
let tok = lex(b"inspect model tweeter.user").unwrap();
let tok = lex(b"inspect model tweeter.users").unwrap();
assert_eq!(
ddl::parse_inspect_full(&tok[1..]).unwrap(),
Statement::InspectModel(("tweeter", "user").into())
Statement::InspectModel(("tweeter", "users").into())
);
}
#[test]
@ -616,7 +616,6 @@ mod schema_tests {
}
mod tymeta {
use super::*;
use crate::engine::ql::lexer::{Keyword, Type};
#[test]
fn tymeta_mini() {
let tok = lex(b"}").unwrap();
@ -705,7 +704,7 @@ mod schema_tests {
}
#[test]
fn fuzz_tymeta_normal() {
// { maxlen: 10, unique: true, user: "sayan" }
// { maxlen: 10, unique: true, users: "sayan" }
// ^start
let tok = lex(b"
maxlen: 10,
@ -713,7 +712,7 @@ mod schema_tests {
auth: {
maybe: true\x01
},
user: \"sayan\"\x01
users: \"sayan\"\x01
}
")
.unwrap();
@ -723,7 +722,7 @@ mod schema_tests {
"auth" => nullable_dict! {
"maybe" => Lit::Bool(true),
},
"user" => Lit::Str("sayan".into())
"users" => Lit::Str("sayan".into())
};
fuzz_tokens(&tok, |should_pass, new_src| {
let (ret, dict) = schema::fold_tymeta(&new_src);
@ -742,7 +741,7 @@ mod schema_tests {
}
#[test]
fn fuzz_tymeta_with_ty() {
// list { maxlen: 10, unique: true, type string, user: "sayan" }
// list { maxlen: 10, unique: true, type string, users: "sayan" }
// ^start
let tok = lex(b"
maxlen: 10,
@ -751,7 +750,7 @@ mod schema_tests {
maybe: true\x01
},
type string,
user: \"sayan\"\x01
users: \"sayan\"\x01
}
")
.unwrap();
@ -767,7 +766,7 @@ mod schema_tests {
if should_pass {
assert!(ret.is_okay());
assert!(ret.has_more());
assert!(new_src[ret.pos()] == Token::Keyword(Keyword::TypeId(Type::String)));
assert!(new_src[ret.pos()] == Token::Ident("string".into()));
assert_eq!(dict, expected);
} else if ret.is_okay() {
panic!("Expected failure but passed for token stream: `{:?}`", tok);
@ -777,7 +776,7 @@ mod schema_tests {
}
mod layer {
use super::*;
use crate::engine::ql::{lexer::Type, schema::Layer};
use crate::engine::ql::schema::Layer;
#[test]
fn layer_mini() {
let tok = lex(b"string)").unwrap();
@ -786,7 +785,7 @@ mod schema_tests {
assert!(okay);
assert_eq!(
layers,
vec![Layer::new_noreset(Type::String, nullable_dict! {})]
vec![Layer::new_noreset("string".into(), nullable_dict! {})]
);
}
#[test]
@ -798,7 +797,7 @@ mod schema_tests {
assert_eq!(
layers,
vec![Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"maxlen" => Lit::UnsignedInt(100)
}
@ -814,8 +813,8 @@ mod schema_tests {
assert_eq!(
layers,
vec![
Layer::new_noreset(Type::String, nullable_dict! {}),
Layer::new_noreset(Type::List, nullable_dict! {})
Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset("list".into(), nullable_dict! {})
]
);
}
@ -828,9 +827,9 @@ mod schema_tests {
assert_eq!(
layers,
vec![
Layer::new_noreset(Type::String, nullable_dict! {}),
Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(10),
@ -852,14 +851,14 @@ mod schema_tests {
layers,
vec![
Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"ascii_only" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(255)
}
),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true),
"maxlen" => Lit::UnsignedInt(10),
@ -882,14 +881,14 @@ mod schema_tests {
")
.unwrap();
let expected = vec![
Layer::new_noreset(Type::String, nullable_dict!()),
Layer::new_noreset("string".into(), nullable_dict!()),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"maxlen" => Lit::UnsignedInt(100),
},
),
Layer::new_noreset(Type::List, nullable_dict!("unique" => Lit::Bool(true))),
Layer::new_noreset("list".into(), nullable_dict!("unique" => Lit::Bool(true))),
];
fuzz_tokens(&tok, |should_pass, new_tok| {
let (layers, c, okay) = schema::fold_layers(&new_tok);
@ -935,10 +934,7 @@ mod schema_tests {
mod fields {
use {
super::*,
crate::engine::ql::{
lexer::Type,
schema::{Field, Layer},
},
crate::engine::ql::schema::{Field, Layer},
};
#[test]
fn field_mini() {
@ -952,7 +948,7 @@ mod schema_tests {
f,
Field {
field_name: "username".into(),
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
props: set![],
}
)
@ -969,7 +965,7 @@ mod schema_tests {
f,
Field {
field_name: "username".into(),
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
props: set!["primary"],
}
)
@ -990,7 +986,7 @@ mod schema_tests {
Field {
field_name: "username".into(),
layers: [Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"maxlen" => Lit::UnsignedInt(10),
"ascii_only" => Lit::Bool(true),
@ -1021,14 +1017,14 @@ mod schema_tests {
field_name: "notes".into(),
layers: [
Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"maxlen" => Lit::UnsignedInt(255),
"ascii_only" => Lit::Bool(true),
}
),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true)
}
@ -1041,10 +1037,7 @@ mod schema_tests {
}
}
mod schemas {
use crate::engine::ql::{
lexer::Type,
schema::{Field, Layer, Model},
};
use crate::engine::ql::schema::{Field, Layer, Model};
use super::*;
#[test]
@ -1068,12 +1061,12 @@ mod schema_tests {
fields: vec![
Field {
field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"]
},
Field {
field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![]
}
],
@ -1103,17 +1096,17 @@ mod schema_tests {
fields: vec![
Field {
field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"]
},
Field {
field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![]
},
Field {
field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"]
}
],
@ -1148,25 +1141,25 @@ mod schema_tests {
fields: vec![
Field {
field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"]
},
Field {
field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![]
},
Field {
field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"]
},
Field {
field_name: "notes".into(),
layers: vec![
Layer::new_noreset(Type::String, nullable_dict! {}),
Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true)
}
@ -1211,25 +1204,25 @@ mod schema_tests {
fields: vec![
Field {
field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: set!["primary"]
},
Field {
field_name: "password".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set![]
},
Field {
field_name: "profile_pic".into(),
layers: vec![Layer::new_noreset(Type::Binary, nullable_dict! {})],
layers: vec![Layer::new_noreset("binary".into(), nullable_dict! {})],
props: set!["null"]
},
Field {
field_name: "notes".into(),
layers: vec![
Layer::new_noreset(Type::String, nullable_dict! {}),
Layer::new_noreset("string".into(), nullable_dict! {}),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true)
}
@ -1250,10 +1243,7 @@ mod schema_tests {
}
mod dict_field_syntax {
use super::*;
use crate::engine::ql::{
lexer::Type,
schema::{ExpandedField, Layer},
};
use crate::engine::ql::schema::{ExpandedField, Layer};
#[test]
fn field_syn_mini() {
let tok = lex(b"username { type string }").unwrap();
@ -1263,7 +1253,7 @@ mod schema_tests {
ef,
ExpandedField {
field_name: "username".into(),
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
props: nullable_dict! {},
reset: false
}
@ -1287,7 +1277,7 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(false),
},
layers: vec![Layer::new_noreset(Type::String, nullable_dict! {})],
layers: vec![Layer::new_noreset("string".into(), nullable_dict! {})],
reset: false
}
);
@ -1316,7 +1306,7 @@ mod schema_tests {
"jingle_bells" => Lit::Str("snow".into()),
},
layers: vec![Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"minlen" => Lit::UnsignedInt(6),
"maxlen" => Lit::UnsignedInt(255),
@ -1353,13 +1343,13 @@ mod schema_tests {
},
layers: vec![
Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"ascii_only" => Lit::Bool(true),
}
),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true),
}
@ -1410,10 +1400,7 @@ mod schema_tests {
}
mod alter_model_add {
use super::*;
use crate::engine::ql::{
lexer::Type,
schema::{ExpandedField, Layer},
};
use crate::engine::ql::schema::{ExpandedField, Layer};
#[test]
fn add_mini() {
let tok = lex(b"
@ -1428,7 +1415,7 @@ mod schema_tests {
[ExpandedField {
field_name: "myfield".into(),
props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false
}]
);
@ -1449,7 +1436,7 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false
}]
);
@ -1470,7 +1457,7 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false
}]
);
@ -1506,7 +1493,7 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: false
},
ExpandedField {
@ -1516,13 +1503,13 @@ mod schema_tests {
},
layers: [
Layer::new_noreset(
Type::String,
"string".into(),
nullable_dict! {
"maxlen" => Lit::UnsignedInt(255)
}
),
Layer::new_noreset(
Type::List,
"list".into(),
nullable_dict! {
"unique" => Lit::Bool(true)
},
@ -1536,12 +1523,9 @@ mod schema_tests {
}
}
mod alter_model_update {
use crate::engine::ql::{
lexer::Type,
schema::{ExpandedField, Layer},
};
use super::*;
use crate::engine::ql::schema::{ExpandedField, Layer};
#[test]
fn alter_mini() {
let tok = lex(b"
@ -1556,7 +1540,7 @@ mod schema_tests {
[ExpandedField {
field_name: "myfield".into(),
props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true
}]
);
@ -1575,7 +1559,7 @@ mod schema_tests {
[ExpandedField {
field_name: "myfield".into(),
props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true
}]
);
@ -1602,7 +1586,7 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true
}]
);
@ -1634,13 +1618,13 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true
},
ExpandedField {
field_name: "myfield2".into(),
props: nullable_dict! {},
layers: [Layer::new_noreset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_noreset("string".into(), nullable_dict! {})].into(),
reset: true
}
]
@ -1676,14 +1660,14 @@ mod schema_tests {
props: nullable_dict! {
"nullable" => Lit::Bool(true)
},
layers: [Layer::new_reset(Type::String, nullable_dict! {})].into(),
layers: [Layer::new_reset("string".into(), nullable_dict! {})].into(),
reset: true
},
ExpandedField {
field_name: "myfield2".into(),
props: nullable_dict! {},
layers: [Layer::new_reset(
Type::String,
"string".into(),
nullable_dict! {"maxlen" => Lit::UnsignedInt(255)}
)]
.into(),
@ -1959,13 +1943,13 @@ mod dml_tests {
#[test]
fn insert_tuple_mini() {
let x = lex(br#"
insert twitter.user:"sayan" ()
insert twitter.users:"sayan" ()
"#)
.unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "user".into()),
entity: Entity::Full("twitter".into(), "users".into()),
data: vec![].into(),
};
assert_eq!(e, r);
@ -2111,13 +2095,13 @@ mod dml_tests {
#[test]
fn select_mini() {
let tok = lex(br#"
select * from user:"sayan"
select * from users:"sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("user".into()),
entity: Entity::Single("users".into()),
fields: [].to_vec(),
wildcard: true,
};
@ -2126,13 +2110,13 @@ mod dml_tests {
#[test]
fn select() {
let tok = lex(br#"
select field1 from user:"sayan"
select field1 from users:"sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("user".into()),
entity: Entity::Single("users".into()),
fields: ["field1".into()].to_vec(),
wildcard: false,
};
@ -2141,13 +2125,13 @@ mod dml_tests {
#[test]
fn select_pro() {
let tok = lex(br#"
select field1 from twitter.user:"sayan"
select field1 from twitter.users:"sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "user".into()),
entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into()].to_vec(),
wildcard: false,
};
@ -2156,13 +2140,13 @@ mod dml_tests {
#[test]
fn select_pro_max() {
let tok = lex(br#"
select field1, field2 from twitter.user:"sayan"
select field1, field2 from twitter.users:"sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "user".into()),
entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into(), "field2".into()].to_vec(),
wildcard: false,
};
@ -2305,22 +2289,22 @@ mod dml_tests {
#[test]
fn delete_mini() {
let tok = lex(br#"
delete user:"sayan"
delete users:"sayan"
"#)
.unwrap();
let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, Entity::Single("user".into()));
let e = DeleteStatement::new(&primary_key, Entity::Single("users".into()));
let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e);
}
#[test]
fn delete() {
let tok = lex(br#"
delete twitter.user:"sayan"
delete twitter.users:"sayan"
"#)
.unwrap();
let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, ("twitter", "user").into());
let e = DeleteStatement::new(&primary_key, ("twitter", "users").into());
let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e);
}

Loading…
Cancel
Save