Use `where` clauses by default for DML queries

Along with this, here's a summary of other changes:
- `RawSlice` now uses `NonNull` to help the compiler with size opts
- The lexer now allows case insensitive keyword usage (I still DO NOT
particularly approve of it, because enforcing is important)
next
Sayan Nandan 2 years ago
parent 2dec28d989
commit bd0c06652c
No known key found for this signature in database
GPG Key ID: 8BC07A0A4D41DD52

@ -82,7 +82,10 @@ pub(super) fn parse_drop(tok: &[Token], counter: &mut usize) -> LangResult<State
// either `force` or nothing // either `force` or nothing
if tok.len() == i { if tok.len() == i {
return Ok(Statement::DropSpace(DropSpace::new( return Ok(Statement::DropSpace(DropSpace::new(
unsafe { extract!(tok[1], Token::Ident(ref space) => space.clone()) }, unsafe {
// UNSAFE(@ohsayan): Safe because the match predicate ensures that tok[1] is indeed an ident
extract!(tok[1], Token::Ident(ref space) => space.clone())
},
force, force,
))); )));
} }
@ -96,7 +99,7 @@ pub(super) fn parse_drop(tok: &[Token], counter: &mut usize) -> LangResult<State
pub(super) fn parse_drop_full(tok: &[Token]) -> LangResult<Statement> { pub(super) fn parse_drop_full(tok: &[Token]) -> LangResult<Statement> {
let mut i = 0; let mut i = 0;
let r = self::parse_drop(tok, &mut i); let r = self::parse_drop(tok, &mut i);
full_tt!(i, tok.len()); assert_full_tt!(i, tok.len());
r r
} }
@ -114,11 +117,15 @@ pub(super) fn parse_inspect(tok: &[Token], c: &mut usize) -> LangResult<Statemen
Some(Token![space]) if tok.len() == 2 && tok[1].is_ident() => { Some(Token![space]) if tok.len() == 2 && tok[1].is_ident() => {
*c += 1; *c += 1;
Ok(Statement::InspectSpace(unsafe { Ok(Statement::InspectSpace(unsafe {
// UNSAFE(@ohsayan): Safe because of the match predicate
extract!(tok[1], Token::Ident(ref space) => space.clone()) extract!(tok[1], Token::Ident(ref space) => space.clone())
})) }))
} }
Some(Token::Ident(id)) Some(Token::Ident(id))
if unsafe { id.as_slice().eq_ignore_ascii_case(b"spaces") } && tok.len() == 1 => if unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity of slice
id.as_slice().eq_ignore_ascii_case(b"spaces")
} && tok.len() == 1 =>
{ {
Ok(Statement::InspectSpaces) Ok(Statement::InspectSpaces)
} }
@ -130,6 +137,6 @@ pub(super) fn parse_inspect(tok: &[Token], c: &mut usize) -> LangResult<Statemen
pub(super) fn parse_inspect_full(tok: &[Token]) -> LangResult<Statement> { pub(super) fn parse_inspect_full(tok: &[Token]) -> LangResult<Statement> {
let mut i = 0; let mut i = 0;
let r = self::parse_inspect(tok, &mut i); let r = self::parse_inspect(tok, &mut i);
full_tt!(i, tok.len()); assert_full_tt!(i, tok.len());
r r
} }

@ -56,10 +56,16 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
let is_single = Entity::tokens_with_single(tok); let is_single = Entity::tokens_with_single(tok);
if is_full { if is_full {
*i += 3; *i += 3;
*d = MaybeInit::new(unsafe { Entity::full_entity_from_slice(tok) }) *d = MaybeInit::new(unsafe {
// UNSAFE(@ohsayan): Predicate ensures validity
Entity::full_entity_from_slice(tok)
})
} else if is_single { } else if is_single {
*i += 1; *i += 1;
*d = MaybeInit::new(unsafe { Entity::single_entity_from_slice(tok) }); *d = MaybeInit::new(unsafe {
// UNSAFE(@ohsayan): Predicate ensures validity
Entity::single_entity_from_slice(tok)
});
} }
is_full | is_single is_full | is_single
} }
@ -69,13 +75,17 @@ fn process_entity(tok: &[Token], d: &mut MaybeInit<Entity>, i: &mut usize) -> bo
*/ */
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(super) struct RelationalExpr<'a> { pub struct RelationalExpr<'a> {
pub(super) lhs: &'a [u8], pub(super) lhs: &'a [u8],
pub(super) rhs: &'a Lit, pub(super) rhs: &'a Lit,
pub(super) opc: u8, pub(super) opc: u8,
} }
impl<'a> RelationalExpr<'a> { impl<'a> RelationalExpr<'a> {
#[inline(always)]
pub(super) fn new(lhs: &'a [u8], rhs: &'a Lit, opc: u8) -> Self {
Self { lhs, rhs, opc }
}
pub(super) const OP_EQ: u8 = 1; pub(super) const OP_EQ: u8 = 1;
pub(super) const OP_NE: u8 = 2; pub(super) const OP_NE: u8 = 2;
pub(super) const OP_GT: u8 = 3; pub(super) const OP_GT: u8 = 3;
@ -102,7 +112,7 @@ impl<'a> RelationalExpr<'a> {
let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE; let op_le = u(tok[0] == Token![<] && tok[1] == Token![=]) * Self::OP_LE;
let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT; let op_lt = u(tok[0] == Token![<] && op_le == 0) * Self::OP_LT;
let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt; let opc = op_eq + op_ne + op_ge + op_gt + op_le + op_lt;
*okay = opc != 0; *okay &= opc != 0;
*i += 1 + (opc & 1 == 0) as usize; *i += 1 + (opc & 1 == 0) as usize;
opc opc
} }
@ -127,6 +137,7 @@ impl<'a> RelationalExpr<'a> {
*cnt += i + okay as usize; *cnt += i + okay as usize;
if compiler::likely(okay) { if compiler::likely(okay) {
Some(unsafe { Some(unsafe {
// UNSAFE(@ohsayan): tok[0] is checked for being an ident, tok[lit_idx] also checked to be a lit
Self { Self {
lhs: extract!(tok[0], Token::Ident(ref id) => id.as_slice()), lhs: extract!(tok[0], Token::Ident(ref id) => id.as_slice()),
rhs: extract!(tok[lit_idx], Token::Lit(ref l) => l), rhs: extract!(tok[lit_idx], Token::Lit(ref l) => l),
@ -140,7 +151,7 @@ impl<'a> RelationalExpr<'a> {
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(super) struct WhereClause<'a> { pub struct WhereClause<'a> {
c: HashMap<&'a [u8], RelationalExpr<'a>>, c: HashMap<&'a [u8], RelationalExpr<'a>>,
} }
@ -150,11 +161,19 @@ impl<'a> WhereClause<'a> {
Self { c } Self { c }
} }
#[inline(always)] #[inline(always)]
pub(super) fn parse_where(tok: &'a [Token], flag: &mut bool, cnt: &mut usize) -> Self { /// Parse the expressions in a `where` context, appending it to the given map
///
/// Notes:
/// - Deny duplicate clauses
/// - No enforcement on minimum number of clauses
fn parse_where_and_append_to(
tok: &'a [Token],
cnt: &mut usize,
c: &mut HashMap<&'a [u8], RelationalExpr<'a>>,
) -> bool {
let l = tok.len(); let l = tok.len();
let mut okay = true; let mut okay = true;
let mut i = 0; let mut i = 0;
let mut c = HashMap::with_capacity(2);
let mut has_more = true; let mut has_more = true;
while okay && i < l && has_more { while okay && i < l && has_more {
okay &= RelationalExpr::try_parse(&tok[i..], &mut i) okay &= RelationalExpr::try_parse(&tok[i..], &mut i)
@ -163,8 +182,18 @@ impl<'a> WhereClause<'a> {
has_more = tok[cmp::min(i, l - 1)] == Token![and] && i < l; has_more = tok[cmp::min(i, l - 1)] == Token![and] && i < l;
i += has_more as usize; i += has_more as usize;
} }
*flag &= okay;
*cnt += i; *cnt += i;
okay
}
#[inline(always)]
/// Parse a where context
///
/// Notes:
/// - Enforce a minimum of 1 clause
pub(super) fn parse_where(tok: &'a [Token], flag: &mut bool, cnt: &mut usize) -> Self {
let mut c = HashMap::with_capacity(2);
*flag &= Self::parse_where_and_append_to(tok, cnt, &mut c);
*flag &= !c.is_empty();
Self { c } Self { c }
} }
} }
@ -174,7 +203,7 @@ pub(super) fn parse_where_clause_full<'a>(tok: &'a [Token]) -> Option<WhereClaus
let mut flag = true; let mut flag = true;
let mut i = 0; let mut i = 0;
let ret = WhereClause::parse_where(tok, &mut flag, &mut i); let ret = WhereClause::parse_where(tok, &mut flag, &mut i);
full_tt!(tok.len(), i); assert_full_tt!(tok.len(), i);
flag.then_some(ret) flag.then_some(ret)
} }
@ -183,7 +212,7 @@ pub(super) fn parse_where_clause_full<'a>(tok: &'a [Token]) -> Option<WhereClaus
pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> { pub(super) fn parse_relexpr_full<'a>(tok: &'a [Token]) -> Option<RelationalExpr<'a>> {
let mut i = 0; let mut i = 0;
let okay = RelationalExpr::try_parse(tok, &mut i); let okay = RelationalExpr::try_parse(tok, &mut i);
full_tt!(tok.len(), i); assert_full_tt!(tok.len(), i);
okay okay
} }
@ -339,7 +368,15 @@ pub(super) fn parse_data_map_syntax<'a>(
Lit::UnsafeLit(l) => DataType::AnonymousTypeNeedsEval(l.clone()), Lit::UnsafeLit(l) => DataType::AnonymousTypeNeedsEval(l.clone()),
Lit::SignedInt(int) => DataType::SignedInt(*int), Lit::SignedInt(int) => DataType::SignedInt(*int),
}; };
okay &= data.insert(unsafe { id.as_slice() }, Some(dt)).is_none(); okay &= data
.insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures slice validity
id.as_slice()
},
Some(dt),
)
.is_none();
} }
(Token::Ident(id), Token::Symbol(Symbol::TtOpenSqBracket)) => { (Token::Ident(id), Token::Symbol(Symbol::TtOpenSqBracket)) => {
// ooh a list // ooh a list
@ -348,11 +385,25 @@ pub(super) fn parse_data_map_syntax<'a>(
okay &= lst_ok; okay &= lst_ok;
i += lst_i; i += lst_i;
okay &= data okay &= data
.insert(unsafe { id.as_slice() }, Some(l.into())) .insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity
id.as_slice()
},
Some(l.into()),
)
.is_none(); .is_none();
} }
(Token::Ident(id), Token![null]) => { (Token::Ident(id), Token![null]) => {
okay &= data.insert(unsafe { id.as_slice() }, None).is_none(); okay &= data
.insert(
unsafe {
// UNSAFE(@ohsayan): Token lifetime ensures validity
id.as_slice()
},
None,
)
.is_none();
} }
_ => { _ => {
okay = false; okay = false;
@ -411,65 +462,77 @@ impl<'a> From<HashMap<&'static [u8], Option<DataType>>> for InsertData<'a> {
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct InsertStatement<'a> { pub struct InsertStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity, pub(super) entity: Entity,
pub(super) data: InsertData<'a>, pub(super) data: InsertData<'a>,
} }
#[inline(always)]
fn parse_entity(tok: &[Token], entity: &mut MaybeInit<Entity>, i: &mut usize) -> bool {
let is_full = tok[0].is_ident() && tok[1] == Token![.] && tok[2].is_ident();
let is_half = tok[0].is_ident();
unsafe {
// UNSAFE(@ohsayan): The branch predicates assert their correctness
if is_full {
*i += 3;
*entity = MaybeInit::new(Entity::full_entity_from_slice(&tok));
} else if is_half {
*i += 1;
*entity = MaybeInit::new(Entity::single_entity_from_slice(&tok));
}
}
is_full | is_half
}
pub(super) fn parse_insert<'a>( pub(super) fn parse_insert<'a>(
src: &'a [Token], tok: &'a [Token],
counter: &mut usize, counter: &mut usize,
) -> LangResult<InsertStatement<'a>> { ) -> LangResult<InsertStatement<'a>> {
/* /*
smallest: smallest:
insert space:primary_key () insert into model (primarykey)
^1 ^2 ^3^4 ^^5,6 ^1 ^2 ^3 ^4 ^5
*/ */
let l = src.len(); let l = tok.len();
let is_full = Entity::tokens_with_full(src); if compiler::unlikely(l < 5) {
let is_half = Entity::tokens_with_single(src); return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
let mut okay = is_full | is_half; let mut okay = tok[0] == Token![into];
let mut i = 0; let mut i = okay as usize;
let mut entity = MaybeInit::uninit(); let mut entity = MaybeInit::uninit();
okay &= parse_entity(&tok[i..], &mut entity, &mut i);
okay &= process_entity(&src[i..], &mut entity, &mut i); let mut data = None;
if !(i < l) {
// primary key is a lit; atleast lit + (<oparen><cparen>) | (<obrace><cbrace>) unsafe {
okay &= l >= (i + 4); // UNSAFE(@ohsayan): ALWAYS true because 1 + 3 for entity; early exit if smaller
// colon, lit impossible!();
okay &= src[i] == Token![:] && src[i + 1].is_lit();
// check data
let is_map = okay && src[i + 2] == Token![open {}];
let is_tuple = okay && src[i + 2] == Token![() open];
okay &= is_map | is_tuple;
if !okay {
return Err(LangError::UnexpectedToken);
} }
}
let primary_key = unsafe { extract!(&src[i+1], Token::Lit(l) => l) }; match tok[i] {
i += 3; // skip col, lit + op/ob Token![() open] => {
let (this_data, incr, ok) = parse_data_tuple_syntax(&tok[i + 1..]);
let data;
if is_tuple {
let (ord, cnt, ok) = parse_data_tuple_syntax(&src[i..]);
okay &= ok; okay &= ok;
i += cnt; i += incr + 1;
data = InsertData::Ordered(ord); data = Some(InsertData::Ordered(this_data));
} else { }
let (map, cnt, ok) = parse_data_map_syntax(&src[i..]); Token![open {}] => {
let (this_data, incr, ok) = parse_data_map_syntax(&tok[i + 1..]);
okay &= ok; okay &= ok;
i += cnt; i += incr + 1;
data = InsertData::Map(map); data = Some(InsertData::Map(this_data));
}
_ => okay = false,
} }
*counter += i; *counter += i;
if okay { if okay {
let data = unsafe {
// UNSAFE(@ohsayan): Will be safe because of `okay` since it ensures that entity has been initialized
data.unwrap_unchecked()
};
Ok(InsertStatement { Ok(InsertStatement {
primary_key, entity: unsafe {
entity: unsafe { entity.assume_init() }, // UNSAFE(@ohsayan): Will be safe because of `okay` since it ensures that entity has been initialized
entity.assume_init()
},
data, data,
}) })
} else { } else {
@ -491,14 +554,39 @@ pub(super) fn parse_insert_full<'a>(tok: &'a [Token]) -> Option<InsertStatement<
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(super) struct SelectStatement<'a> { pub(super) struct SelectStatement<'a> {
/// the primary key
pub(super) primary_key: &'a Lit,
/// the entity /// the entity
pub(super) entity: Entity, pub(super) entity: Entity,
/// fields in order of querying. will be zero when wildcard is set /// fields in order of querying. will be zero when wildcard is set
pub(super) fields: Vec<RawSlice>, pub(super) fields: Vec<RawSlice>,
/// whether a wildcard was passed /// whether a wildcard was passed
pub(super) wildcard: bool, pub(super) wildcard: bool,
/// where clause
pub(super) clause: WhereClause<'a>,
}
impl<'a> SelectStatement<'a> {
#[inline(always)]
pub(crate) fn new_test(
entity: Entity,
fields: Vec<RawSlice>,
wildcard: bool,
clauses: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> SelectStatement<'a> {
Self::new(entity, fields, wildcard, clauses)
}
#[inline(always)]
fn new(
entity: Entity,
fields: Vec<RawSlice>,
wildcard: bool,
clauses: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> SelectStatement<'a> {
Self {
entity,
fields,
wildcard,
clause: WhereClause::new(clauses),
}
}
} }
/// Parse a `select` query. The cursor should have already passed the `select` token when this /// Parse a `select` query. The cursor should have already passed the `select` token when this
@ -507,47 +595,62 @@ pub(super) fn parse_select<'a>(
tok: &'a [Token], tok: &'a [Token],
counter: &mut usize, counter: &mut usize,
) -> LangResult<SelectStatement<'a>> { ) -> LangResult<SelectStatement<'a>> {
/*
Smallest query:
select * from model
^ ^ ^
1 2 3
*/
let l = tok.len(); let l = tok.len();
if compiler::unlikely(l < 3) {
let mut i = 0_usize; return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
let mut okay = l > 4; }
let mut fields = Vec::new(); let mut i = 0;
let is_wildcard = i < l && tok[i] == Token![*]; let mut okay = true;
let mut select_fields = Vec::new();
let is_wildcard = tok[0] == Token![*];
i += is_wildcard as usize; i += is_wildcard as usize;
while i < l && okay && !is_wildcard {
while okay && i < l && tok[i].is_ident() && !is_wildcard { match tok[i] {
unsafe { Token::Ident(ref id) => select_fields.push(id.clone()),
fields.push(extract!(&tok[i], Token::Ident(id) => id.clone())); _ => {
break;
}
} }
i += 1; i += 1;
// skip comma let nx_idx = cmp::min(i, l);
let nx_comma = i < l && tok[i] == Token![,]; let nx_comma = tok[nx_idx] == Token![,] && i < l;
let nx_from = i < l && tok[i] == Token![from]; let nx_from = tok[nx_idx] == Token![from];
okay &= nx_comma | nx_from; okay &= nx_comma | nx_from;
i += nx_comma as usize; i += nx_comma as usize;
} }
okay &= is_wildcard | !select_fields.is_empty();
okay &= i < l && tok[i] == Token![from]; okay &= (i + 2) <= l;
if compiler::unlikely(!okay) {
return compiler::cold_err(Err(LangError::UnexpectedToken));
}
okay &= tok[i] == Token![from];
i += okay as usize; i += okay as usize;
// now process entity
// parsed upto select a, b, c from ...; now parse entity and select
let mut entity = MaybeInit::uninit(); let mut entity = MaybeInit::uninit();
okay &= process_entity(&tok[i..], &mut entity, &mut i); okay &= process_entity(&tok[i..], &mut entity, &mut i);
let has_where = tok[cmp::min(i, l)] == Token![where];
// now primary key i += has_where as usize;
okay &= i < l && tok[i] == Token![:]; let mut clauses = <_ as Default>::default();
i += okay as usize; if has_where {
okay &= i < l && tok[i].is_lit(); okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty(); // append doesn't enforce clause arity
*counter += i + okay as usize; }
*counter += i;
if okay { if okay {
let primary_key = unsafe { extract!(tok[i], Token::Lit(ref l) => l) };
Ok(SelectStatement { Ok(SelectStatement {
primary_key, entity: unsafe {
entity: unsafe { entity.assume_init() }, // UNSAFE(@ohsayan): `process_entity` and `okay` assert correctness
fields, entity.assume_init()
},
fields: select_fields,
wildcard: is_wildcard, wildcard: is_wildcard,
clause: WhereClause::new(clauses),
}) })
} else { } else {
Err(LangError::UnexpectedToken) Err(LangError::UnexpectedToken)
@ -559,7 +662,7 @@ pub(super) fn parse_select<'a>(
pub(super) fn parse_select_full<'a>(tok: &'a [Token]) -> Option<SelectStatement<'a>> { pub(super) fn parse_select_full<'a>(tok: &'a [Token]) -> Option<SelectStatement<'a>> {
let mut i = 0; let mut i = 0;
let r = self::parse_select(tok, &mut i); let r = self::parse_select(tok, &mut i);
assert!(i == tok.len(), "didn't use full length"); assert_full_tt!(i, tok.len());
r.ok() r.ok()
} }
@ -645,6 +748,10 @@ impl<'a> AssignmentExpression<'a> {
if okay { if okay {
let expression = unsafe { let expression = unsafe {
/*
UNSAFE(@ohsayan): tok[0] is checked for being an ident early on; second, tok[i]
is also checked for being a lit and then `okay` ensures correctness
*/
AssignmentExpression { AssignmentExpression {
lhs: extract!(tok[0], Token::Ident(ref r) => r.clone()), lhs: extract!(tok[0], Token::Ident(ref r) => r.clone()),
rhs: extract!(tok[i], Token::Lit(ref l) => l), rhs: extract!(tok[i], Token::Lit(ref l) => l),
@ -663,7 +770,7 @@ pub(super) fn parse_expression_full<'a>(tok: &'a [Token]) -> Option<AssignmentEx
let mut i = 0; let mut i = 0;
let mut exprs = Vec::new(); let mut exprs = Vec::new();
if AssignmentExpression::parse_and_append_expression(tok, &mut exprs, &mut i) { if AssignmentExpression::parse_and_append_expression(tok, &mut exprs, &mut i) {
full_tt!(i, tok.len()); assert_full_tt!(i, tok.len());
Some(exprs.remove(0)) Some(exprs.remove(0))
} else { } else {
None None
@ -676,52 +783,86 @@ pub(super) fn parse_expression_full<'a>(tok: &'a [Token]) -> Option<AssignmentEx
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub struct UpdateStatement<'a> { pub struct UpdateStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity, pub(super) entity: Entity,
pub(super) expressions: Vec<AssignmentExpression<'a>>, pub(super) expressions: Vec<AssignmentExpression<'a>>,
pub(super) wc: WhereClause<'a>,
} }
impl<'a> UpdateStatement<'a> { impl<'a> UpdateStatement<'a> {
#[inline(always)]
#[cfg(test)]
pub fn new_test(
entity: Entity,
expressions: Vec<AssignmentExpression<'a>>,
wc: HashMap<&'a [u8], RelationalExpr<'a>>,
) -> Self {
Self::new(entity, expressions, WhereClause::new(wc))
}
#[inline(always)]
pub fn new(
entity: Entity,
expressions: Vec<AssignmentExpression<'a>>,
wc: WhereClause<'a>,
) -> Self {
Self {
entity,
expressions,
wc,
}
}
#[inline(always)]
pub(super) fn parse_update(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> { pub(super) fn parse_update(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> {
/*
TODO(@ohsayan): Allow volcanoes
smallest tt:
update model SET x = 1 where x = 1
^1 ^2 ^3 ^4 ^5^6 ^7^8^9
*/
let l = tok.len(); let l = tok.len();
// TODO(@ohsayan): This would become 8 when we add `SET`. It isn't exactly needed but is for purely aesthetic purposes if compiler::unlikely(l < 9) {
let mut okay = l > 6; return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
let mut i = 0_usize; }
let mut i = 0;
// parse entity
let mut entity = MaybeInit::uninit(); let mut entity = MaybeInit::uninit();
okay &= process_entity(tok, &mut entity, &mut i); let mut okay = parse_entity(&tok[i..], &mut entity, &mut i);
if !((i + 6) <= l) {
// check if we have our primary key unsafe {
okay &= i < l && tok[i] == Token![:]; // UNSAFE(@ohsayan): Obvious, just a hint; entity can fw by 3 max
i += okay as usize; impossible!();
okay &= i < l && tok[i].is_lit(); }
let primary_key_location = i; }
i += okay as usize; okay &= tok[i] == Token![set];
i += 1; // ignore whatever we have here, even if it's broken
// now parse expressions that we have to update let mut nx_where = false;
let mut expressions = Vec::new(); let mut expressions = Vec::new();
while i < l && okay { while i < l && okay && !nx_where {
okay &= AssignmentExpression::parse_and_append_expression( okay &= AssignmentExpression::parse_and_append_expression(
&tok[i..], &tok[i..],
&mut expressions, &mut expressions,
&mut i, &mut i,
); );
let nx_comma = i < l && tok[i] == Token![,]; let nx_idx = cmp::min(i, l);
// TODO(@ohsayan): Define the need for a semicolon; remember, no SQL unsafety! let nx_comma = tok[nx_idx] == Token![,] && i < l;
let nx_over = i == l; // NOTE: volcano
okay &= nx_comma | nx_over; nx_where = tok[nx_idx] == Token![where] && i < l;
okay &= nx_comma | nx_where; // NOTE: volcano
i += nx_comma as usize; i += nx_comma as usize;
} }
okay &= nx_where;
i += okay as usize;
// now process expressions
let mut clauses = <_ as Default>::default();
okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty(); // NOTE: volcano
*counter += i; *counter += i;
if okay { if okay {
let primary_key =
unsafe { extract!(tok[primary_key_location], Token::Lit(ref pk) => pk) };
Ok(Self { Ok(Self {
primary_key, entity: unsafe {
entity: unsafe { entity.assume_init() }, // UNSAFE(@ohsayan): This is safe because of `parse_entity` and `okay`
entity.assume_init()
},
expressions, expressions,
wc: WhereClause::new(clauses),
}) })
} else { } else {
Err(LangError::UnexpectedToken) Err(LangError::UnexpectedToken)
@ -733,7 +874,7 @@ impl<'a> UpdateStatement<'a> {
pub(super) fn parse_update_full<'a>(tok: &'a [Token]) -> LangResult<UpdateStatement<'a>> { pub(super) fn parse_update_full<'a>(tok: &'a [Token]) -> LangResult<UpdateStatement<'a>> {
let mut i = 0; let mut i = 0;
let r = UpdateStatement::parse_update(tok, &mut i); let r = UpdateStatement::parse_update(tok, &mut i);
full_tt!(i, tok.len()); assert_full_tt!(i, tok.len());
r r
} }
@ -746,43 +887,56 @@ pub(super) fn parse_update_full<'a>(tok: &'a [Token]) -> LangResult<UpdateStatem
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq)]
pub(super) struct DeleteStatement<'a> { pub(super) struct DeleteStatement<'a> {
pub(super) primary_key: &'a Lit,
pub(super) entity: Entity, pub(super) entity: Entity,
pub(super) wc: WhereClause<'a>,
} }
impl<'a> DeleteStatement<'a> { impl<'a> DeleteStatement<'a> {
#[inline(always)] #[inline(always)]
pub(super) fn new(primary_key: &'a Lit, entity: Entity) -> Self { pub(super) fn new(entity: Entity, wc: WhereClause<'a>) -> Self {
Self { Self { entity, wc }
primary_key,
entity,
} }
#[inline(always)]
#[cfg(test)]
pub(super) fn new_test(entity: Entity, wc: HashMap<&'a [u8], RelationalExpr<'a>>) -> Self {
Self::new(entity, WhereClause::new(wc))
} }
pub(super) fn parse_delete(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> { pub(super) fn parse_delete(tok: &'a [Token], counter: &mut usize) -> LangResult<Self> {
/*
TODO(@ohsayan): Volcano
smallest tt:
delete from model where x = 1
^1 ^2 ^3 ^4 ^5
*/
let l = tok.len(); let l = tok.len();
let mut okay = l > 2; if compiler::unlikely(l < 5) {
let mut i = 0_usize; return compiler::cold_err(Err(LangError::UnexpectedEndofStatement));
}
// parse entity let mut i = 0;
let mut okay = tok[i] == Token![from];
i += 1; // skip even if incorrect
let mut entity = MaybeInit::uninit(); let mut entity = MaybeInit::uninit();
okay &= process_entity(tok, &mut entity, &mut i); okay &= parse_entity(&tok[i..], &mut entity, &mut i);
if !(i < l) {
// find primary key unsafe {
okay &= i < l && tok[i] == Token![:]; // UNSAFE(@ohsayan): Obvious, we have atleast 5, used max 4
i += okay as usize; impossible!();
okay &= i < l && tok[i].is_lit(); }
let primary_key_idx = i; }
i += okay as usize; okay &= tok[i] == Token![where]; // NOTE: volcano
i += 1; // skip even if incorrect
let mut clauses = <_ as Default>::default();
okay &= WhereClause::parse_where_and_append_to(&tok[i..], &mut i, &mut clauses);
okay &= !clauses.is_empty();
*counter += i; *counter += i;
if okay { if okay {
unsafe {
Ok(Self { Ok(Self {
primary_key: extract!(tok[primary_key_idx], Token::Lit(ref l) => l), entity: unsafe {
entity: entity.assume_init(), // UNSAFE(@ohsayan): obvious due to `okay` and `parse_entity`
entity.assume_init()
},
wc: WhereClause::new(clauses),
}) })
}
} else { } else {
Err(LangError::UnexpectedToken) Err(LangError::UnexpectedToken)
} }
@ -793,6 +947,6 @@ impl<'a> DeleteStatement<'a> {
pub(super) fn parse_delete_full<'a>(tok: &'a [Token]) -> LangResult<DeleteStatement<'a>> { pub(super) fn parse_delete_full<'a>(tok: &'a [Token]) -> LangResult<DeleteStatement<'a>> {
let mut i = 0_usize; let mut i = 0_usize;
let r = DeleteStatement::parse_delete(tok, &mut i); let r = DeleteStatement::parse_delete(tok, &mut i);
full_tt!(i, tok.len()); assert_full_tt!(i, tok.len());
r r
} }

@ -363,8 +363,7 @@ fn kwph(k: &[u8]) -> u8 {
} }
#[inline(always)] #[inline(always)]
fn kwof(key: &str) -> Option<Keyword> { fn kwof(key: &[u8]) -> Option<Keyword> {
let key = key.as_bytes();
let ph = kwph(key); let ph = kwph(key);
if ph < KW_LUT.len() as u8 && KW_LUT[ph as usize].0 == key { if ph < KW_LUT.len() as u8 && KW_LUT[ph as usize].0 == key {
Some(KW_LUT[ph as usize].1) Some(KW_LUT[ph as usize].1)
@ -498,11 +497,11 @@ impl<'a, const OPERATING_MODE: u8> Lexer<'a, OPERATING_MODE> {
fn scan_ident_or_keyword(&mut self) { fn scan_ident_or_keyword(&mut self) {
let s = self.scan_ident(); let s = self.scan_ident();
let st = unsafe { s.as_str() }; let st = unsafe { s.as_slice() }.to_ascii_lowercase();
match kwof(st) { match kwof(&st) {
Some(kw) => self.tokens.push(kw.into()), Some(kw) => self.tokens.push(kw.into()),
// FIXME(@ohsayan): Uh, mind fixing this? The only advantage is that I can keep the graph *memory* footprint small // FIXME(@ohsayan): Uh, mind fixing this? The only advantage is that I can keep the graph *memory* footprint small
None if st == "true" || st == "false" => self.push_token(Lit::Bool(st == "true")), None if st == b"true" || st == b"false" => self.push_token(Lit::Bool(st == b"true")),
None => self.tokens.push(Token::Ident(s)), None => self.tokens.push(Token::Ident(s)),
} }
} }

@ -25,7 +25,7 @@
*/ */
#[cfg(test)] #[cfg(test)]
macro_rules! full_tt { macro_rules! assert_full_tt {
($a:expr, $b:expr) => { ($a:expr, $b:expr) => {
assert_eq!($a, $b, "full token stream not utilized") assert_eq!($a, $b, "full token stream not utilized")
}; };
@ -180,6 +180,9 @@ macro_rules! Token {
__kw!(Truncate) __kw!(Truncate)
}; };
// dml misc // dml misc
(set) => {
__kw!(Set)
};
(limit) => { (limit) => {
__kw!(Limit) __kw!(Limit)
}; };

@ -39,7 +39,7 @@ mod tests;
#[cfg(test)] #[cfg(test)]
use core::{fmt, ops::Deref}; use core::{fmt, ops::Deref};
use core::{mem, slice, str}; use core::{mem, ptr::NonNull, slice, str};
/* /*
Lang errors Lang errors
@ -78,7 +78,7 @@ pub enum LangError {
#[cfg_attr(not(test), derive(Debug))] #[cfg_attr(not(test), derive(Debug))]
#[derive(Clone)] #[derive(Clone)]
pub struct RawSlice { pub struct RawSlice {
ptr: *const u8, ptr: NonNull<u8>,
len: usize, len: usize,
} }
@ -90,13 +90,16 @@ impl RawSlice {
const _EALIGN: () = assert!(mem::align_of::<Self>() == mem::align_of::<&[u8]>()); const _EALIGN: () = assert!(mem::align_of::<Self>() == mem::align_of::<&[u8]>());
const FAKE_SLICE: Self = unsafe { Self::new_from_str("") }; const FAKE_SLICE: Self = unsafe { Self::new_from_str("") };
const unsafe fn new(ptr: *const u8, len: usize) -> Self { const unsafe fn new(ptr: *const u8, len: usize) -> Self {
Self { ptr, len } Self {
ptr: NonNull::new_unchecked(ptr.cast_mut()),
len,
}
} }
const unsafe fn new_from_str(s: &str) -> Self { const unsafe fn new_from_str(s: &str) -> Self {
Self::new(s.as_bytes().as_ptr(), s.as_bytes().len()) Self::new(s.as_bytes().as_ptr(), s.as_bytes().len())
} }
unsafe fn as_slice(&self) -> &[u8] { unsafe fn as_slice(&self) -> &[u8] {
slice::from_raw_parts(self.ptr, self.len) slice::from_raw_parts(self.ptr.as_ptr(), self.len)
} }
unsafe fn as_str(&self) -> &str { unsafe fn as_str(&self) -> &str {
str::from_utf8_unchecked(self.as_slice()) str::from_utf8_unchecked(self.as_slice())

@ -2034,21 +2034,21 @@ mod dml_tests {
#[test] #[test]
fn insert_tuple_mini() { fn insert_tuple_mini() {
let x = lex(br#" let x = lex(br#"
insert twitter.users:"sayan" () insert into twitter.users ("sayan")
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap(); let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()), entity: Entity::Full("twitter".into(), "users".into()),
data: vec![].into(), data: into_array_nullable!["sayan"].to_vec().into(),
}; };
assert_eq!(e, r); assert_eq!(e, r);
} }
#[test] #[test]
fn insert_tuple() { fn insert_tuple() {
let x = lex(br#" let x = lex(br#"
insert twitter.users:"sayan" ( insert into twitter.users (
"sayan",
"Sayan", "Sayan",
"sayan@example.com", "sayan@example.com",
true, true,
@ -2059,9 +2059,15 @@ mod dml_tests {
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap(); let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()), entity: Entity::Full("twitter".into(), "users".into()),
data: into_array_nullable!["Sayan", "sayan@example.com", true, 12345, 67890] data: into_array_nullable![
"sayan",
"Sayan",
"sayan@example.com",
true,
12345,
67890
]
.to_vec() .to_vec()
.into(), .into(),
}; };
@ -2070,7 +2076,8 @@ mod dml_tests {
#[test] #[test]
fn insert_tuple_pro() { fn insert_tuple_pro() {
let x = lex(br#" let x = lex(br#"
insert twitter.users:"sayan" ( insert into twitter.users (
"sayan",
"Sayan", "Sayan",
"sayan@example.com", "sayan@example.com",
true, true,
@ -2084,9 +2091,9 @@ mod dml_tests {
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&x[1..]).unwrap(); let r = dml::parse_insert_full(&x[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("twitter".into(), "users".into()), entity: Entity::Full("twitter".into(), "users".into()),
data: into_array_nullable![ data: into_array_nullable![
"sayan",
"Sayan", "Sayan",
"sayan@example.com", "sayan@example.com",
true, true,
@ -2103,19 +2110,25 @@ mod dml_tests {
} }
#[test] #[test]
fn insert_map_mini() { fn insert_map_mini() {
let tok = lex(br#"insert jotsy.app:"sayan" {}"#).unwrap(); let tok = lex(br#"
insert into jotsy.app { username: "sayan" }
"#)
.unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap(); let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("jotsy".into(), "app".into()), entity: Entity::Full("jotsy".into(), "app".into()),
data: nullable_dict! {}.into(), data: dict_nullable! {
"username".as_bytes() => "sayan"
}
.into(),
}; };
assert_eq!(e, r); assert_eq!(e, r);
} }
#[test] #[test]
fn insert_map() { fn insert_map() {
let tok = lex(br#" let tok = lex(br#"
insert jotsy.app:"sayan" { insert into jotsy.app {
username: "sayan",
name: "Sayan", name: "Sayan",
email: "sayan@example.com", email: "sayan@example.com",
verified: true, verified: true,
@ -2126,9 +2139,9 @@ mod dml_tests {
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap(); let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string().into()),
entity: Entity::Full("jotsy".into(), "app".into()), entity: Entity::Full("jotsy".into(), "app".into()),
data: dict_nullable! { data: dict_nullable! {
"username".as_bytes() => "sayan",
"name".as_bytes() => "Sayan", "name".as_bytes() => "Sayan",
"email".as_bytes() => "sayan@example.com", "email".as_bytes() => "sayan@example.com",
"verified".as_bytes() => true, "verified".as_bytes() => true,
@ -2142,7 +2155,8 @@ mod dml_tests {
#[test] #[test]
fn insert_map_pro() { fn insert_map_pro() {
let tok = lex(br#" let tok = lex(br#"
insert jotsy.app:"sayan" { insert into jotsy.app {
username: "sayan",
password: "pass123", password: "pass123",
email: "sayan@example.com", email: "sayan@example.com",
verified: true, verified: true,
@ -2156,9 +2170,9 @@ mod dml_tests {
.unwrap(); .unwrap();
let r = dml::parse_insert_full(&tok[1..]).unwrap(); let r = dml::parse_insert_full(&tok[1..]).unwrap();
let e = InsertStatement { let e = InsertStatement {
primary_key: &("sayan".to_string()).into(),
entity: Entity::Full("jotsy".into(), "app".into()), entity: Entity::Full("jotsy".into(), "app".into()),
data: dict_nullable! { data: dict_nullable! {
"username".as_bytes() => "sayan",
"password".as_bytes() => "pass123", "password".as_bytes() => "pass123",
"email".as_bytes() => "sayan@example.com", "email".as_bytes() => "sayan@example.com",
"verified".as_bytes() => true, "verified".as_bytes() => true,
@ -2175,72 +2189,93 @@ mod dml_tests {
} }
mod stmt_select { mod stmt_select {
use crate::engine::ql::dml::RelationalExpr;
use { use {
super::*, super::*,
crate::engine::ql::{ crate::engine::ql::{
ast::Entity, ast::Entity,
dml::{self, SelectStatement}, dml::{self, SelectStatement},
lexer::Lit,
}, },
}; };
#[test] #[test]
fn select_mini() { fn select_mini() {
let tok = lex(br#" let tok = lex(br#"
select * from users:"sayan" select * from users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let username_where = "sayan".into();
primary_key: &Lit::Str("sayan".into()), let e = SelectStatement::new_test(
entity: Entity::Single("users".into()), Entity::Single("users".into()),
fields: [].to_vec(), [].to_vec(),
wildcard: true, true,
}; dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn select() { fn select() {
let tok = lex(br#" let tok = lex(br#"
select field1 from users:"sayan" select field1 from users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let username_where = "sayan".into();
primary_key: &Lit::Str("sayan".into()), let e = SelectStatement::new_test(
entity: Entity::Single("users".into()), Entity::Single("users".into()),
fields: ["field1".into()].to_vec(), ["field1".into()].to_vec(),
wildcard: false, false,
}; dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn select_pro() { fn select_pro() {
let tok = lex(br#" let tok = lex(br#"
select field1 from twitter.users:"sayan" select field1 from twitter.users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let username_where = "sayan".into();
primary_key: &Lit::Str("sayan".into()), let e = SelectStatement::new_test(
entity: Entity::Full("twitter".into(), "users".into()), Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into()].to_vec(), ["field1".into()].to_vec(),
wildcard: false, false,
}; dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn select_pro_max() { fn select_pro_max() {
let tok = lex(br#" let tok = lex(br#"
select field1, field2 from twitter.users:"sayan" select field1, field2 from twitter.users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_select_full(&tok[1..]).unwrap(); let r = dml::parse_select_full(&tok[1..]).unwrap();
let e = SelectStatement { let username_where = "sayan".into();
primary_key: &Lit::Str("sayan".into()), let e = SelectStatement::new_test(
entity: Entity::Full("twitter".into(), "users".into()), Entity::Full("twitter".into(), "users".into()),
fields: ["field1".into(), "field2".into()].to_vec(), ["field1".into(), "field2".into()].to_vec(),
wildcard: false, false,
}; dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(), &username_where, RelationalExpr::OP_EQ
),
},
);
assert_eq!(r, e); assert_eq!(r, e);
} }
} }
@ -2323,46 +2358,67 @@ mod dml_tests {
super::*, super::*,
crate::engine::ql::{ crate::engine::ql::{
ast::Entity, ast::Entity,
dml::{self, AssignmentExpression, Operator, UpdateStatement}, dml::{
self, AssignmentExpression, Operator, RelationalExpr, UpdateStatement,
WhereClause,
},
}, },
}; };
#[test] #[test]
fn update_mini() { fn update_mini() {
let tok = lex(br#" let tok = lex(br#"
update app:"sayan" notes += "this is my new note" update app SET notes += "this is my new note" where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let where_username = "sayan".into();
let note = "this is my new note".to_string().into(); let note = "this is my new note".to_string().into();
let r = dml::parse_update_full(&tok[1..]).unwrap(); let r = dml::parse_update_full(&tok[1..]).unwrap();
let e = UpdateStatement { let e = UpdateStatement {
primary_key: &("sayan".to_owned().into()),
entity: Entity::Single("app".into()), entity: Entity::Single("app".into()),
expressions: vec![AssignmentExpression { expressions: vec![AssignmentExpression {
lhs: "notes".into(), lhs: "notes".into(),
rhs: &note, rhs: &note,
operator_fn: Operator::AddAssign, operator_fn: Operator::AddAssign,
}], }],
wc: WhereClause::new(dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&where_username,
RelationalExpr::OP_EQ
)
}),
}; };
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn update() { fn update() {
let tok = lex(br#" let tok = lex(br#"
update jotsy.app:"sayan" notes += "this is my new note", email = "sayan@example.com" update
jotsy.app
SET
notes += "this is my new note",
email = "sayan@example.com"
WHERE
username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let r = dml::parse_update_full(&tok[1..]).unwrap(); let r = dml::parse_update_full(&tok[1..]).unwrap();
let where_username = "sayan".into();
let field_note = "this is my new note".into(); let field_note = "this is my new note".into();
let field_email = "sayan@example.com".into(); let field_email = "sayan@example.com".into();
let primary_key = "sayan".into();
let e = UpdateStatement { let e = UpdateStatement {
primary_key: &primary_key,
entity: ("jotsy", "app").into(), entity: ("jotsy", "app").into(),
expressions: vec![ expressions: vec![
AssignmentExpression::new("notes".into(), &field_note, Operator::AddAssign), AssignmentExpression::new("notes".into(), &field_note, Operator::AddAssign),
AssignmentExpression::new("email".into(), &field_email, Operator::Assign), AssignmentExpression::new("email".into(), &field_email, Operator::Assign),
], ],
wc: WhereClause::new(dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&where_username,
RelationalExpr::OP_EQ
)
}),
}; };
assert_eq!(r, e); assert_eq!(r, e);
@ -2373,29 +2429,47 @@ mod dml_tests {
super::*, super::*,
crate::engine::ql::{ crate::engine::ql::{
ast::Entity, ast::Entity,
dml::{self, DeleteStatement}, dml::{self, DeleteStatement, RelationalExpr},
}, },
}; };
#[test] #[test]
fn delete_mini() { fn delete_mini() {
let tok = lex(br#" let tok = lex(br#"
delete users:"sayan" delete from users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let primary_key = "sayan".into(); let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, Entity::Single("users".into())); let e = DeleteStatement::new_test(
Entity::Single("users".into()),
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&primary_key,
RelationalExpr::OP_EQ
)
},
);
let r = dml::parse_delete_full(&tok[1..]).unwrap(); let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e); assert_eq!(r, e);
} }
#[test] #[test]
fn delete() { fn delete() {
let tok = lex(br#" let tok = lex(br#"
delete twitter.users:"sayan" delete from twitter.users where username = "sayan"
"#) "#)
.unwrap(); .unwrap();
let primary_key = "sayan".into(); let primary_key = "sayan".into();
let e = DeleteStatement::new(&primary_key, ("twitter", "users").into()); let e = DeleteStatement::new_test(
("twitter", "users").into(),
dict! {
"username".as_bytes() => RelationalExpr::new(
"username".as_bytes(),
&primary_key,
RelationalExpr::OP_EQ
)
},
);
let r = dml::parse_delete_full(&tok[1..]).unwrap(); let r = dml::parse_delete_full(&tok[1..]).unwrap();
assert_eq!(r, e); assert_eq!(r, e);
} }
@ -2528,5 +2602,13 @@ mod dml_tests {
}); });
assert_eq!(expected, dml::parse_where_clause_full(&tok).unwrap()); assert_eq!(expected, dml::parse_where_clause_full(&tok).unwrap());
} }
#[test]
fn where_duplicate_condition() {
let tok = lex(br#"
userid = 100 and userid > 200
"#)
.unwrap();
assert!(dml::parse_where_clause_full(&tok).is_none());
}
} }
} }

@ -230,6 +230,7 @@ pub struct MaybeInit<T> {
impl<T> MaybeInit<T> { impl<T> MaybeInit<T> {
/// Initialize a new uninitialized variant /// Initialize a new uninitialized variant
#[inline(always)]
pub const fn uninit() -> Self { pub const fn uninit() -> Self {
Self { Self {
#[cfg(test)] #[cfg(test)]
@ -238,6 +239,7 @@ impl<T> MaybeInit<T> {
} }
} }
/// Initialize with a value /// Initialize with a value
#[inline(always)]
pub const fn new(val: T) -> Self { pub const fn new(val: T) -> Self {
Self { Self {
#[cfg(test)] #[cfg(test)]
@ -250,6 +252,7 @@ impl<T> MaybeInit<T> {
/// ## Safety /// ## Safety
/// ///
/// Caller needs to ensure that the data is actually initialized /// Caller needs to ensure that the data is actually initialized
#[inline(always)]
pub const unsafe fn assume_init(self) -> T { pub const unsafe fn assume_init(self) -> T {
#[cfg(test)] #[cfg(test)]
{ {
@ -264,6 +267,7 @@ impl<T> MaybeInit<T> {
/// ## Safety /// ## Safety
/// ///
/// Caller needs to ensure that the data is actually initialized /// Caller needs to ensure that the data is actually initialized
#[inline(always)]
pub const unsafe fn assume_init_ref(&self) -> &T { pub const unsafe fn assume_init_ref(&self) -> &T {
#[cfg(test)] #[cfg(test)]
{ {

Loading…
Cancel
Save