Use `where` clauses by default for DML queries

Along with this, here's a summary of other changes:
- `RawSlice` now uses `NonNull` to help the compiler with size opts
- The lexer now allows case insensitive keyword usage (I still DO NOT
particularly approve of it, because enforcing is important)
next
Sayan Nandan 2 years ago
parent 2dec28d989
commit bd0c06652c
No known key found for this signature in database
GPG Key ID: 8BC07A0A4D41DD52

@ -82,7 +82,10 @@ pub(super) fn parse_drop(tok: &[Token], counter: &mut usize) -> LangResult<State
// either `force` or nothing
if tok.len() == i {
return Ok(Statement::DropSpace(DropSpace::new(
unsafe { extract!(tok[1], Token::Ident(ref space) => space.clone()) },
unsafe {
// UNSAFE(@ohsayan): Safe because the match predicate ensures that tok[1] is indeed an ident
extract!(tok[1], Token::Ident(ref space) => space.clone())
},
force,
)));
}
@ -96,7 +99,7 @@ pub(super) fn parse_drop(tok: &[Token], counter: &mut usize) -> LangResult<State
pub(super) fn parse_drop_full(tok: &[Token]) -> LangResult<Statement> {
let mut i = 0;
let r = self::parse_drop(tok, &mut i);
full_tt!(i, tok.len());
assert_full_tt!(i, tok.len());
r
}
@ -114,11 +117,15 @@ pub(super) fn parse_inspect(tok: &[Token], c: &mut usize) -> LangResult<Statemen
Some(Token![space]) if tok.len() == 2 && tok[1].is_ident() => {
*c += 1;
Ok(Statement::InspectSpace(unsafe {
// UNSAFE(@ohsayan): Safe because of the match predicate
extract!(tok[1], Token::Ident(ref space) => space.clone())
}))
}
Some(Token::Ident(id))
if unsafe { id.as_slice().eq_ignore_ascii_case(b"spaces") } && tok.len() == 1 =>
if unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity of slice
id.as_slice().eq_ignore_ascii_case(b"spaces")
} && tok.len() == 1 =>
{
Ok(Statement::InspectSpaces)
}
@ -130,6 +137,6 @@ pub(super) fn parse_inspect(tok: &[Token], c: &mut usize) -> LangResult<Statemen
pub(super) fn parse_inspect_full(tok: &[Token]) -> LangResult<Statement> {
let mut i = 0;
let r = self::parse_inspect(tok, &mut i);
full_tt!(i, tok.len());
assert_full_tt!(i, tok.len());
r
}

@ -56,10 +56,16 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
let is_single = Entity::tokens_with_single(tok);
if is_full {
*i += 3;
*d = MaybeInit::new(unsafe { Entity::full_entity_from_slice(tok) })
*d = MaybeInit::new(unsafe {
// UNSAFE(@ohsayan): Predicate ensures validity
Entity::full_entity_from_slice(tok)
})
} else if is_single {
*i += 1;
*d = MaybeInit::new(unsafe { Entity::single_entity_from_slice(tok) });
*d = MaybeInit::new(unsafe {
// UNSAFE(@ohsayan): Predicate ensures validity
Entity::single_entity_from_slice(tok)
});
}
is_full | is_single
}
@ -69,13 +75,17 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
*/
#[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> {
pub struct RelationalExpr<'a> {
pub(super) lhs: &'a [u8],
pub(super) rhs: &'a Lit,
pub(super) opc: u8,
}
impl<'a> RelationalExpr<'a> {
#[inline(always)]
pub(super) fn new(lhs: &'a [u8], rhs: &'a Lit, opc: u8) -> Self {
Self { lhs, rhs, opc }
}
pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3;
@ -102,7 +112,7 @@ impl<'a> RelationalExpr<'a> {
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0;
*okay &= opc != 0;
*i += 1 + (opc & 1 == 0) as usize;
opc
}
@ -127,6 +137,7 @@ impl<'a> RelationalExpr<'a> {
*cnt += i + okay as usize;
if compiler::likely(okay) {
Some(unsafe {
// UNSAFE(@ohsayan): tok[0] is checked for being an ident, tok[lit_idx] also checked to be a lit
Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.as_slice()),
rhs: extract!(tok[lit_idx], Token::Lit(ref l) => l),
@ -140,7 +151,7 @@ impl<'a> RelationalExpr<'a> {
}
#[derive(Debug, PartialEq)]
pub(super) struct WhereClause<'a> {
pub struct WhereClause<'a> {
c: HashMap<&'a [u8], RelationalExpr<'a>>,
}
@ -150,11 +161,19 @@ impl<'a> WhereClause<'a> {
Self { c }
}
#[inline(always)]
pub(super) fn parse_where(tok: &'a [Token], flag: &mut bool, cnt: &mut usize) -> Self {
/// Parse the expressions in a `where` context, appending it to the given map
///
/// Notes:
/// - Deny duplicate clauses
/// - No enforcement on minimum number of clauses
fn parse_where_and_append_to(
tok: &'a [Token],
cnt: &mut usize,
c: &mut HashMap<&'a [u8], RelationalExpr<'a>>,
) -> bool {
let l = tok.len();
let mut okay = true;
let mut i = 0;
let mut c = HashMap::with_capacity(2);
let mut has_more = true;
while okay && i < l && has_more {
okay &= RelationalExpr::try_parse(&tok[i..], &mut i)
@ -163,8 +182,18 @@ impl<'a> WhereClause<'a> {
has_more = tok[cmp::min(i, l - 1)] == Token![and] && i < l;
i += has_more as usize;
}
*flag &= okay;
*cnt += i;
okay
}
#[inline(always)]
/// Parse a where context
///
/// Notes:
/// - Enforce a minimum of 1 clause
pub(super) fn parse_where(tok: &'a [Token], flag: &mut bool, cnt: &mut usize) -> Self {
let mut c = HashMap::with_capacity(2);
*flag &= Self::parse_where_and_append_to(tok, cnt, &mut c);
*flag &= !c.is_empty();
Self { c }
}
}
@ -174,7 +203,7 @@ pub(super) fn parse_where_clause_full<'a>(tok: &'a [Token]) -> Option<WhereClaus
let mut flag = true;
let mut i = 0;
let ret = WhereClause::parse_where(tok, &mut flag, &mut i);
full_tt!(tok.len(), i);
assert_full_tt!(tok.len(), i);
flag.then_some(ret)
}
@ -183,7 +212,7 @@ pub(super) fn parse_where_clause_full<'a>(tok: &'a [Token]) -> Option<WhereClaus
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i);
assert_full_tt!(tok.len(), i);
okay
}
@ -339,7 +368,15 @@ pub(super) fn parse_data_map_syntax<'a>(
Lit::UnsafeLit(l) => DataType::AnonymousTypeNeedsEval(l.clone()),
Lit::SignedInt(int) => DataType::SignedInt(*int),
};
okay &= data.insert(unsafe { id.as_slice() }, Some(dt)).is_none();
okay &= data
.insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures slice validity
id.as_slice()
},
Some(dt),
)
.is_none();
}
(Token::Ident(id), Token::Symbol(Symbol::TtOpenSqBracket)) => {
// ooh a list
@ -348,11 +385,25 @@ pub(super) fn parse_data_map_syntax<'a>(
okay &= lst_ok;
i += lst_i;
okay &= data
.insert(unsafe { id.as_slice() }, Some(l.into()))
.insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity
id.as_slice()
},
Some(l.into()),
)
.is_none();
}
(Token::Ident(id), Token![null]) => {
okay &= data.insert(unsafe { id.as_slice() }, None).is_none();
okay &= data
.insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity
id.as_slice()
},
None,
)
.is_none();
}
_ => {
okay = false;
@ -411,65 +462,77 @@ impl<'a> From<HashMap<&'static [u8], Option<DataType>>> for InsertData<'a> {
#[derive(Debug, PartialEq)]
pub struct InsertStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity,
pub(super) data: InsertData<'a>,
}
#[inline(always)]
fn parse_entity(tok: &[Token], entity: &mut MaybeInit<Entity>, i: &mut usize) -> bool {
let is_full = tok[0].is_ident() && tok[1] == Token![.] && tok[2].is_ident();
let is_half = tok[0].is_ident();
unsafe {
// UNSAFE(@ohsayan): The branch predicates assert their correctness
if is_full {
*i += 3;
*entity = MaybeInit::new(Entity::full_entity_from_slice(&tok));
} else if is_half {
*i += 1;
*entity = MaybeInit::new(Entity::single_entity_from_slice(&tok));
}
}
is_full | is_half
}
pub(super) fn parse_insert<'a>(
src: &'a [Token],
tok: &'a [Token],
counter: &mut usize,
) -> LangResult<InsertStatement<'a>> {
/*
smallest:
insert space:primary_key ()
^1 ^2 ^3^4 ^^5,6
insert into model (primarykey)
^1 ^2 ^3 ^4 ^5
*/
let l = src.len();
let is_full = Entity::tokens_with_full(src);
let is_half = Entity::tokens_with_single(src);
let mut okay = is_full | is_half;
let mut i = 0;
let l = tok.len();
if compiler::unlikely(l < 5) {
return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
let mut okay = tok[0] == Token![into];
let mut i = okay as usize;
let mut entity = MaybeInit::uninit();
okay &= process_entity(&src[i..], &mut entity, &mut i);
// primary key is a lit; atleast lit + (<oparen><cparen>) | (<obrace><cbrace>)
okay &= l >= (i + 4);
// colon, lit
okay &= src[i] == Token![:] && src[i + 1].is_lit();
// check data
let is_map = okay && src[i + 2] == Token![open {}];
let is_tuple = okay && src[i + 2] == Token![() open];
okay &= is_map | is_tuple;
if !okay {
return Err(LangError::UnexpectedToken);
okay &= parse_entity(&tok[i..], &mut entity, &mut i);
let mut data = None;
if !(i < l) {
unsafe {
// UNSAFE(@ohsayan): ALWAYS true because 1 + 3 for entity; early exit if smaller
impossible!();
}
let primary_key = unsafe { extract!(&src[i+1], Token::Lit(l) => l) };
i += 3; // skip col, lit + op/ob
let data;
if is_tuple {
let (ord, cnt, ok) = parse_data_tuple_syntax(&src[i..]);
}
match tok[i] {
Token![() open] => {
let (this_data, incr, ok) = parse_data_tuple_syntax(&tok[i + 1..]);
okay &= ok;
i += cnt;
data = InsertData::Ordered(ord);
} else {
let (map, cnt, ok) = parse_data_map_syntax(&src[i..]);
i += incr + 1;
data = Some(InsertData::Ordered(this_data));
}
Token![open {}] => {
let (this_data, incr, ok) = parse_data_map_syntax(&tok[i + 1..]);
okay &= ok;
i += cnt;
data = InsertData::Map(map);
i += incr + 1;
data = Some(InsertData::Map(this_data));
}
_ => okay = false,
}
*counter += i;
if okay {
let data = unsafe {
// UNSAFE(@ohsayan): Will be safe because of `okay` since it ensures that entity has been initialized
data.unwrap_unchecked()
};
Ok(InsertStatement {
primary_key,
entity: unsafe { entity.assume_init() },
entity: unsafe {
// UNSAFE(@ohsayan): Will be safe because of `okay` since it ensures that entity has been initialized
entity.assume_init()
},
data,
})
} else {
@ -491,14 +554,39 @@ pub(super) fn parse_insert_full<'a>(tok: &'a [Token]) -> Option<InsertStatement<
#[derive(Debug, PartialEq)]
pub(super) struct SelectStatement<'a> {
/// the primary key
pub(super) primary_key: &'a Lit,
/// the entity
pub(super) entity: Entity,
/// fields in order of querying. will be zero when wildcard is set
pub(super) fields: Vec<RawSlice>,
/// whether a wildcard was passed
pub(super) wildcard: bool,
/// where clause
pub(super) clause: WhereClause<'a>,
}
impl<'a> SelectStatement<'a> {
#[inline(always)]
pub(crate) fn new_test(
entity: Entity,
fields: Vec<RawSlice>,
wildcard: bool,
clauses: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> SelectStatement<'a> {
Self::new(entity, fields, wildcard, clauses)
}
#[inline(always)]
fn new(
entity: Entity,
fields: Vec<RawSlice>,
wildcard: bool,
clauses: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> SelectStatement<'a> {
Self {
entity,
fields,
wildcard,
clause: WhereClause::new(clauses),
}
}
}
/// Parse a `select` query. The cursor should have already passed the `select` token when this
@ -507,47 +595,62 @@ pub(super) fn parse_select<'a>(
tok: &'a [Token],
counter: &mut usize,
) -> LangResult<SelectStatement<'a>> {
/*
Smallest query:
select * from model
^ ^ ^
1 2 3
*/
let l = tok.len();
let mut i = 0_usize;
let mut okay = l > 4;
let mut fields = Vec::new();
let is_wildcard = i < l && tok[i] == Token![*];
if compiler::unlikely(l < 3) {
return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
let mut i = 0;
let mut okay = true;
let mut select_fields = Vec::new();
let is_wildcard = tok[0] == Token![*];
i += is_wildcard as usize;
while okay && i < l && tok[i].is_ident() && !is_wildcard {
unsafe {
fields.push(extract!(&tok[i], Token::Ident(id) => id.clone()));
while i < l && okay && !is_wildcard {
match tok[i] {
Token::Ident(ref id) => select_fields.push(id.clone()),
_ => {
break;
}
}
i += 1;
// skip comma
let nx_comma = i < l && tok[i] == Token![,];
let nx_from = i < l && tok[i] == Token![from];
let nx_idx = cmp::min(i, l);
let nx_comma = tok[nx_idx] == Token![,] && i < l;
let nx_from = tok[nx_idx] == Token![from];
okay &= nx_comma | nx_from;
i += nx_comma as usize;
}
okay &= i < l && tok[i] == Token![from];
okay &= is_wildcard | !select_fields.is_empty();
okay &= (i + 2) <= l;
if compiler::unlikely(!okay) {
return compiler::cold_err(Err(LangError::UnexpectedToken));
}
okay &= tok[i] == Token![from];
i += okay as usize;
// parsed upto select a, b, c from ...; now parse entity and select
// now process entity
let mut entity = MaybeInit::uninit();
okay &= process_entity(&tok[i..], &mut entity, &mut i);
// now primary key
okay &= i < l && tok[i] == Token![:];
i += okay as usize;
okay &= i < l && tok[i].is_lit();
*counter += i + okay as usize;
let has_where = tok[cmp::min(i, l)] == Token![where];
i += has_where as usize;
let mut clauses = <_ as Default>::default();
if has_where {
okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty(); // append doesn't enforce clause arity
}
*counter += i;
if okay {
let primary_key = unsafe { extract!(tok[i], Token::Lit(ref l) => l) };
Ok(SelectStatement {
primary_key,
entity: unsafe { entity.assume_init() },
fields,
entity: unsafe {
// UNSAFE(@ohsayan): `process_entity` and `okay` assert correctness
entity.assume_init()
},
fields: select_fields,
wildcard: is_wildcard,
clause: WhereClause::new(clauses),
})
} else {
Err(LangError::UnexpectedToken)
@ -559,7 +662,7 @@ pub(super) fn parse_select<'a>(
pub(super) fn parse_select_full<'a>(tok: &'a [Token]) -> Option<SelectStatement<'a>> {
let mut i = 0;
let r = self::parse_select(tok, &mut i);
assert!(i == tok.len(), "didn't use full length");
assert_full_tt!(i, tok.len());
r.ok()
}
@ -645,6 +748,10 @@ impl<'a> AssignmentExpression<'a> {
if okay {
let expression = unsafe {
/*
UNSAFE(@ohsayan): tok[0] is checked for being an ident early on; second, tok[i]
is also checked for being a lit and then `okay` ensures correctness
*/
AssignmentExpression {
lhs: extract!(tok[0], Token::Ident(ref r) => r.clone()),
rhs: extract!(tok[i], Token::Lit(ref l) => l),
@ -663,7 +770,7 @@ pub(super) fn parse_expression_full<'a>(tok: &'a [Token]) -> Option<AssignmentEx
let mut i = 0;
let mut exprs = Vec::new();
if AssignmentExpression::parse_and_append_expression(tok, &mut exprs, &mut i) {
full_tt!(i, tok.len());
assert_full_tt!(i, tok.len());
Some(exprs.remove(0))
} else {
None
@ -676,52 +783,86 @@ pub(super) fn parse_expression_full<'a>(tok: &'a [Token]) -> Option<AssignmentEx
#[derive(Debug, PartialEq)]
pub struct UpdateStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity,
pub(super) expressions: Vec<AssignmentExpression<'a>>,
pub(super) wc: WhereClause<'a>,
}
impl<'a> UpdateStatement<'a> {
#[inline(always)]
#[cfg(test)]
pub fn new_test(
entity: Entity,
expressions: Vec<AssignmentExpression<'a>>,
wc: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> Self {
Self::new(entity, expressions, WhereClause::new(wc))
}
#[inline(always)]
pub fn new(
entity: Entity,
expressions: Vec<AssignmentExpression<'a>>,
wc: WhereClause<'a>,
) -> Self {
Self {
entity,
expressions,
wc,
}
}
#[inline(always)]
pub(super) fn parse_update(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> {
/*
TODO(@ohsayan): Allow volcanoes
smallest tt:
update model SET x = 1 where x = 1
^1 ^2 ^3 ^4 ^5^6 ^7^8^9
*/
let l = tok.len();
// TODO(@ohsayan): This would become 8 when we add `SET`. It isn't exactly needed but is for purely aesthetic purposes
let mut okay = l > 6;
let mut i = 0_usize;
// parse entity
if compiler::unlikely(l < 9) {
return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
let mut i = 0;
let mut entity = MaybeInit::uninit();
okay &= process_entity(tok, &mut entity, &mut i);
// check if we have our primary key
okay &= i < l && tok[i] == Token![:];
i += okay as usize;
okay &= i < l && tok[i].is_lit();
let primary_key_location = i;
i += okay as usize;
// now parse expressions that we have to update
let mut okay = parse_entity(&tok[i..], &mut entity, &mut i);
if !((i + 6) <= l) {
unsafe {
// UNSAFE(@ohsayan): Obvious, just a hint; entity can fw by 3 max
impossible!();
}
}
okay &= tok[i] == Token![set];
i += 1; // ignore whatever we have here, even if it's broken
let mut nx_where = false;
let mut expressions = Vec::new();
while i < l && okay {
while i < l && okay && !nx_where {
okay &= AssignmentExpression::parse_and_append_expression(
&tok[i..],
&mut expressions,
&mut i,
);
let nx_comma = i < l && tok[i] == Token![,];
// TODO(@ohsayan): Define the need for a semicolon; remember, no SQL unsafety!
let nx_over = i == l;
okay &= nx_comma | nx_over;
let nx_idx = cmp::min(i, l);
let nx_comma = tok[nx_idx] == Token![,] && i < l;
// NOTE: volcano
nx_where = tok[nx_idx] == Token![where] && i < l;
okay &= nx_comma | nx_where; // NOTE: volcano
i += nx_comma as usize;
}
okay &= nx_where;
i += okay as usize;
// now process expressions
let mut clauses = <_ as Default>::default();
okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty(); // NOTE: volcano
*counter += i;
if okay {
let primary_key =
unsafe { extract!(tok[primary_key_location], Token::Lit(ref pk) => pk) };
Ok(Self {
primary_key,
entity: unsafe { entity.assume_init() },
entity: unsafe {
// UNSAFE(@ohsayan): This is safe because of `parse_entity` and `okay`
entity.assume_init()
},
expressions,
wc: WhereClause::new(clauses),
})
} else {
Err(LangError::UnexpectedToken)
@ -733,7 +874,7 @@ impl<'a> UpdateStatement<'a> {
pub(super) fn parse_update_full<'a>(tok: &'a [Token]) -> LangResult<UpdateStatement<'a>> {
let mut i = 0;
let r = UpdateStatement::parse_update(tok, &mut i);
full_tt!(i, tok.len());
assert_full_tt!(i, tok.len());
r
}
@ -746,43 +887,56 @@ pub(super) fn parse_update_full<'a>(tok: &'a [Token]) -> LangResult<UpdateStatem
#[derive(Debug, PartialEq)]
pub(super) struct DeleteStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity,
pub(super) wc: WhereClause<'a>,
}
impl<'a> DeleteStatement<'a> {
#[inline(always)]
pub(super) fn new(primary_key: &'a Lit, entity: Entity) -> Self {
Self {
primary_key,
entity,
pub(super) fn new(entity: Entity, wc: WhereClause<'a>) -> Self {
Self { entity, wc }
}
#[inline(always)]
#[cfg(test)]
pub(super) fn new_test(entity: Entity, wc: HashMap<&'a [u8], RelationalExpr<'a>>) -> Self {
Self::new(entity, WhereClause::new(wc))
}
pub(super) fn parse_delete(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> {
/*
TODO(@ohsayan): Volcano
smallest tt:
delete from model where x = 1
^1 ^2 ^3 ^4 ^5
*/
let l = tok.len();
let mut okay = l > 2;
let mut i = 0_usize;
// parse entity
if compiler::unlikely(l < 5) {
return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
let mut i = 0;
let mut okay = tok[i] == Token![from];
i += 1; // skip even if incorrect
let mut entity = MaybeInit::uninit();
okay &= process_entity(tok, &mut entity, &mut i);
// find primary key
okay &= i < l && tok[i] == Token![:];
i += okay as usize;
okay &= i < l && tok[i].is_lit();
let primary_key_idx = i;
i += okay as usize;
okay &= parse_entity(&tok[i..], &mut entity, &mut i);
if !(i < l) {
unsafe {
// UNSAFE(@ohsayan): Obvious, we have atleast 5, used max 4
impossible!();
}
}
okay &= tok[i] == Token![where]; // NOTE: volcano
i += 1; // skip even if incorrect
let mut clauses = <_ as Default>::default();
okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty();
*counter += i;
if okay {
unsafe {
Ok(Self {
primary_key: extract!(tok[primary_key_idx], Token::Lit(ref l) => l),
entity: entity.assume_init(),
entity: unsafe {
// UNSAFE(@ohsayan): obvious due to `okay` and `parse_entity`
entity.assume_init()
},
wc: WhereClause::new(clauses),
})
}
} else {
Err(LangError::UnexpectedToken)
}
@ -793,6 +947,6 @@ impl<'a> DeleteStatement<'a> {
pub(super) fn parse_delete_full<'a>(tok: &'a [Token]) -> LangResult<DeleteStatement<'a>> {
let mut i = 0_usize;
let r = DeleteStatement::parse_delete(tok, &mut i);
full_tt!(i, tok.len());
assert_full_tt!(i, tok.len());
r
}

@ -363,8 +363,7 @@ fn kwph(k: &[u8]) -> u8 {
}
#[inline(always)]
fn kwof(key: &str) -> Option<Keyword> {
let key = key.as_bytes();
fn kwof(key: &[u8]) -> Option<Keyword> {
let ph = kwph(key);
if ph < KW_LUT.len() as u8 && KW_LUT[ph as usize].0 == key {
Some(KW_LUT[ph as usize].1)
@ -498,11 +497,11 @@ impl<'a, const OPERATING_MODE: u8> Lexer<'a, OPERATING_MODE> {
fn scan_ident_or_keyword(&mut self) {
let s = self.scan_ident();
let st = unsafe { s.as_str() };
match kwof(st) {
let st = unsafe { s.as_slice() }.to_ascii_lowercase();
match kwof(&st) {
Some(kw) => self.tokens.push(kw.into()),
// FIXME(@ohsayan): Uh, mind fixing this? The only advantage is that I can keep the graph *memory* footprint small
None if st == "true" || st == "false" => self.push_token(Lit::Bool(st == "true")),
None if st == b"true" || st == b"false" => self.push_token(Lit::Bool(st == b"true")),
None => self.tokens.push(Token::Ident(s)),
}
}

@ -25,7 +25,7 @@
*/
#[cfg(test)]
macro_rules! full_tt {
macro_rules! assert_full_tt {
($a:expr, $b:expr) => {
assert_eq!($a, $b, "full token stream not utilized")
};
@ -180,6 +180,9 @@ macro_rules! Token {
__kw!(Truncate)
};
// dml misc
(set) => {
__kw!(Set)
};
(limit) => {
__kw!(Limit)
};

@ -39,7 +39,7 @@ mod tests;
#[cfg(test)]
use core::{fmt, ops::Deref};
use core::{mem, slice, str};
use core::{mem, ptr::NonNull, slice, str};
/*
Lang errors
@ -78,7 +78,7 @@ pub enum LangError {
#[cfg_attr(not(test), derive(Debug))]
#[derive(Clone)]
pub struct RawSlice {
ptr: *const u8,
ptr: NonNull<u8>,
len: usize,
}
@ -90,13 +90,16 @@ impl RawSlice {
const _EALIGN: () = assert!(mem::align_of::<Self>() == mem::align_of::<&[u8]>());
const FAKE_SLICE: Self = unsafe { Self::new_from_str("") };
const unsafe fn new(ptr: *const u8, len: usize) -> Self {
Self { ptr, len }
Self {
ptr: NonNull::new_unchecked(ptr.cast_mut()),
len,
}
}
const unsafe fn new_from_str(s: &str) -> Self {
Self::new(s.as_bytes().as_ptr(), s.as_bytes().len())
}
unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.len)
slice::from_raw_parts(self.ptr.as_ptr(), self.len)
}
unsafe fn as_str(&self) -> &str {
str::from_utf8_unchecked(self.as_slice())

@ -2034,21 +2034,21 @@ mod dml_tests {
#[test]
fn insert_tuple_mini() {
let x = lex(br#"
insert twitter.users:"sayan" ()
insert into twitter.users ("sayan")
"#)
.unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()),
data: vec![].into(),
data: into_array_nullable!["sayan"].to_vec().into(),
};
assert_eq!(e, r);
}
#[test]
fn insert_tuple() {
let x = lex(br#"
insert twitter.users:"sayan" (
insert into twitter.users (
"sayan",
"Sayan",
"sayan@example.com",
true,
@ -2059,9 +2059,15 @@ mod dml_tests {
.unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()),
data: into_array_nullable!["Sayan", "sayan@example.com", true, 12345, 67890]
data: into_array_nullable![
"sayan",
"Sayan",
"sayan@example.com",
true,
12345,
67890
]
.to_vec()
.into(),
};
@ -2070,7 +2076,8 @@ mod dml_tests {
#[test]
fn insert_tuple_pro() {
let x = lex(br#"
insert twitter.users:"sayan" (
insert into twitter.users (
"sayan",
"Sayan",
"sayan@example.com",
true,
@ -2084,9 +2091,9 @@ mod dml_tests {
.unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()),
data: into_array_nullable![
"sayan",
"Sayan",
"sayan@example.com",
true,
@ -2103,19 +2110,25 @@ mod dml_tests {
}
#[test]
fn insert_map_mini() {
let tok = lex(br#"insert jotsy.app:"sayan" {}"#).unwrap();
let tok = lex(br#"
insert into jotsy.app { username: "sayan" }
"#)
.unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("jotsy".into(), "app".into()),
data: nullable_dict! {}.into(),
data: dict_nullable! {
"username".as_bytes() => "sayan"
}
.into(),
};
assert_eq!(e, r);
}
#[test]
fn insert_map() {
let tok = lex(br#"
insert jotsy.app:"sayan" {
insert into jotsy.app {
username: "sayan",
name: "Sayan",
email: "sayan@example.com",
verified: true,
@ -2126,9 +2139,9 @@ mod dml_tests {
.unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("jotsy".into(), "app".into()),
data: dict_nullable! {
"username".as_bytes() => "sayan",
"name".as_bytes() => "Sayan",
"email".as_bytes() => "sayan@example.com",
"verified".as_bytes() => true,
@ -2142,7 +2155,8 @@ mod dml_tests {
#[test]
fn insert_map_pro() {
let tok = lex(br#"
insert jotsy.app:"sayan" {
insert into jotsy.app {
username: "sayan",
password: "pass123",
email: "sayan@example.com",
verified: true,
@ -2156,9 +2170,9 @@ mod dml_tests {
.unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement {
primary_key: &("sayan".to_string()).into(),
entity: Entity::Full("jotsy".into(), "app".into()),
data: dict_nullable! {
"username".as_bytes() => "sayan",
"password".as_bytes() => "pass123",
"email".as_bytes() => "sayan@example.com",
"verified".as_bytes() => true,
@ -2175,72 +2189,93 @@ mod dml_tests {
}
mod stmt_select {
use crate::engine::ql::dml::RelationalExpr;
use {
super::*,
crate::engine::ql::{
ast::Entity,
dml::{self, SelectStatement},
lexer::Lit,
},
};
#[test]
fn select_mini() {
let tok = lex(br#"
select * from users:"sayan"
select * from users where username = "sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("users".into()),
fields: [].to_vec(),
wildcard: true,
};
let username_where = "sayan".into();
let e = SelectStatement::new_test(
Entity::Single("users".into()),
[].to_vec(),
true,
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e);
}
#[test]
fn select() {
let tok = lex(br#"
select field1 from users:"sayan"
select field1 from users where username = "sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Single("users".into()),
fields: ["field1".into()].to_vec(),
wildcard: false,
};
let username_where = "sayan".into();
let e = SelectStatement::new_test(
Entity::Single("users".into()),
["field1".into()].to_vec(),
false,
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e);
}
#[test]
fn select_pro() {
let tok = lex(br#"
select field1 from twitter.users:"sayan"
select field1 from twitter.users where username = "sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into()].to_vec(),
wildcard: false,
};
let username_where = "sayan".into();
let e = SelectStatement::new_test(
Entity::Full("twitter".into(), "users".into()),
["field1".into()].to_vec(),
false,
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e);
}
#[test]
fn select_pro_max() {
let tok = lex(br#"
select field1, field2 from twitter.users:"sayan"
select field1, field2 from twitter.users where username = "sayan"
"#)
.unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement {
primary_key: &Lit::Str("sayan".into()),
entity: Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into(), "field2".into()].to_vec(),
wildcard: false,
};
let username_where = "sayan".into();
let e = SelectStatement::new_test(
Entity::Full("twitter".into(), "users".into()),
["field1".into(), "field2".into()].to_vec(),
false,
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e);
}
}
@ -2323,46 +2358,67 @@ mod dml_tests {
super::*,
crate::engine::ql::{
ast::Entity,
dml::{self, AssignmentExpression, Operator, UpdateStatement},
dml::{
self, AssignmentExpression, Operator, RelationalExpr, UpdateStatement,
WhereClause,
},
},
};
#[test]
fn update_mini() {
let tok = lex(br#"
update app:"sayan" notes += "this is my new note"
update app SET notes += "this is my new note" where username = "sayan"
"#)
.unwrap();
let where_username = "sayan".into();
let note = "this is my new note".to_string().into();
let r = dml::parse_update_full(&tok[1..]).unwrap();
let e = UpdateStatement {
primary_key: &("sayan".to_owned().into()),
entity: Entity::Single("app".into()),
expressions: vec![AssignmentExpression {
lhs: "notes".into(),
rhs: &note,
operator_fn: Operator::AddAssign,
}],
wc: WhereClause::new(dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&where_username,
RelationalExpr::OP_EQ
)
}),
};
assert_eq!(r, e);
}
#[test]
fn update() {
let tok = lex(br#"
update jotsy.app:"sayan" notes += "this is my new note", email = "sayan@example.com"
update
jotsy.app
SET
notes += "this is my new note",
email = "sayan@example.com"
WHERE
username = "sayan"
"#)
.unwrap();
let r = dml::parse_update_full(&tok[1..]).unwrap();
let where_username = "sayan".into();
let field_note = "this is my new note".into();
let field_email = "sayan@example.com".into();
let primary_key = "sayan".into();
let e = UpdateStatement {
primary_key: &primary_key,
entity: ("jotsy", "app").into(),
expressions: vec![
AssignmentExpression::new("notes".into(), &field_note, Operator::AddAssign),
AssignmentExpression::new("email".into(), &field_email, Operator::Assign),
],
wc: WhereClause::new(dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&where_username,
RelationalExpr::OP_EQ
)
}),
};
assert_eq!(r, e);
@ -2373,29 +2429,47 @@ mod dml_tests {
super::*,
crate::engine::ql::{
ast::Entity,
dml::{self, DeleteStatement},
dml::{self, DeleteStatement, RelationalExpr},
},
};
#[test]
fn delete_mini() {
let tok = lex(br#"
delete users:"sayan"
delete from users where username = "sayan"
"#)
.unwrap();
let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, Entity::Single("users".into()));
let e = DeleteStatement::new_test(
Entity::Single("users".into()),
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&primary_key,
RelationalExpr::OP_EQ
)
},
);
let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e);
}
#[test]
fn delete() {
let tok = lex(br#"
delete twitter.users:"sayan"
delete from twitter.users where username = "sayan"
"#)
.unwrap();
let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, ("twitter", "users").into());
let e = DeleteStatement::new_test(
("twitter", "users").into(),
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&primary_key,
RelationalExpr::OP_EQ
)
},
);
let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e);
}
@ -2528,5 +2602,13 @@ mod dml_tests {
});
assert_eq!(expected, dml::parse_where_clause_full(&tok).unwrap());
}
#[test]
fn where_duplicate_condition() {
let tok = lex(br#"
userid = 100 and userid > 200
"#)
.unwrap();
assert!(dml::parse_where_clause_full(&tok).is_none());
}
}
}

@ -230,6 +230,7 @@ pub struct MaybeInit<T> {
impl<T> MaybeInit<T> {
/// Initialize a new uninitialized variant
#[inline(always)]
pub const fn uninit() -> Self {
Self {
#[cfg(test)]
@ -238,6 +239,7 @@ impl<T> MaybeInit<T> {
}
}
/// Initialize with a value
#[inline(always)]
pub const fn new(val: T) -> Self {
Self {
#[cfg(test)]
@ -250,6 +252,7 @@ impl<T> MaybeInit<T> {
/// ## Safety
///
/// Caller needs to ensure that the data is actually initialized
#[inline(always)]
pub const unsafe fn assume_init(self) -> T {
#[cfg(test)]
{
@ -264,6 +267,7 @@ impl<T> MaybeInit<T> {
/// ## Safety
///
/// Caller needs to ensure that the data is actually initialized
#[inline(always)]
pub const unsafe fn assume_init_ref(&self) -> &T {
#[cfg(test)]
{

Loading…
Cancel
Save