Upgrade all interfaces to use the new protocol
parent
a018b76f40
commit
879e20f6ca
@ -1,128 +0,0 @@
|
|||||||
/*
|
|
||||||
* Created on Tue May 11 2021
|
|
||||||
*
|
|
||||||
* This file is a part of Skytable
|
|
||||||
* Skytable (formerly known as TerrabaseDB or Skybase) is a free and open-source
|
|
||||||
* NoSQL database written by Sayan Nandan ("the Author") with the
|
|
||||||
* vision to provide flexibility in data modelling without compromising
|
|
||||||
* on performance, queryability or scalability.
|
|
||||||
*
|
|
||||||
* Copyright (c) 2021, Sayan Nandan <ohsayan@outlook.com>
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
|
||||||
* (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
* GNU Affero General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
use super::UnsafeSlice;
|
|
||||||
#[cfg(test)]
|
|
||||||
use bytes::Bytes;
|
|
||||||
|
|
||||||
#[non_exhaustive]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
/// # Unsafe elements
|
|
||||||
/// This enum represents the data types as **unsafe** elements, supported by the Skyhash Protocol
|
|
||||||
///
|
|
||||||
/// ## Safety
|
|
||||||
///
|
|
||||||
/// The instantiator must ensure that the [`UnsafeSlice`]s are valid. See its own safety contracts
|
|
||||||
/// for more information
|
|
||||||
pub enum UnsafeElement {
|
|
||||||
/// Arrays can be nested! Their `<tsymbol>` is `&`
|
|
||||||
Array(Box<[UnsafeElement]>),
|
|
||||||
/// A String value; `<tsymbol>` is `+`
|
|
||||||
String(UnsafeSlice),
|
|
||||||
/// An unsigned integer value; `<tsymbol>` is `:`
|
|
||||||
UnsignedInt(u64),
|
|
||||||
/// A non-recursive String array; tsymbol: `_`
|
|
||||||
FlatArray(Box<[UnsafeFlatElement]>),
|
|
||||||
/// A type-less non-recursive array
|
|
||||||
AnyArray(Box<[UnsafeSlice]>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
/// An **unsafe** flat element, present in a flat array
|
|
||||||
pub enum UnsafeFlatElement {
|
|
||||||
String(UnsafeSlice),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl UnsafeElement {
|
|
||||||
pub const fn is_any_array(&self) -> bool {
|
|
||||||
matches!(self, Self::AnyArray(_))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// test impls are for our tests
|
|
||||||
#[cfg(test)]
|
|
||||||
impl UnsafeElement {
|
|
||||||
pub unsafe fn to_owned_flat_array(inner: &[UnsafeFlatElement]) -> Vec<FlatElement> {
|
|
||||||
inner
|
|
||||||
.iter()
|
|
||||||
.map(|v| match v {
|
|
||||||
UnsafeFlatElement::String(st) => {
|
|
||||||
FlatElement::String(Bytes::copy_from_slice(st.as_slice()))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
pub unsafe fn to_owned_any_array(inner: &[UnsafeSlice]) -> Vec<Bytes> {
|
|
||||||
inner
|
|
||||||
.iter()
|
|
||||||
.map(|v| Bytes::copy_from_slice(v.as_slice()))
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
pub unsafe fn to_owned_array(inner: &[Self]) -> Vec<OwnedElement> {
|
|
||||||
inner
|
|
||||||
.iter()
|
|
||||||
.map(|v| match &*v {
|
|
||||||
UnsafeElement::String(st) => {
|
|
||||||
OwnedElement::String(Bytes::copy_from_slice(st.as_slice()))
|
|
||||||
}
|
|
||||||
UnsafeElement::UnsignedInt(int) => OwnedElement::UnsignedInt(*int),
|
|
||||||
UnsafeElement::AnyArray(arr) => {
|
|
||||||
OwnedElement::AnyArray(Self::to_owned_any_array(arr))
|
|
||||||
}
|
|
||||||
UnsafeElement::Array(arr) => OwnedElement::Array(Self::to_owned_array(arr)),
|
|
||||||
UnsafeElement::FlatArray(frr) => {
|
|
||||||
OwnedElement::FlatArray(Self::to_owned_flat_array(frr))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
pub unsafe fn as_owned_element(&self) -> OwnedElement {
|
|
||||||
match self {
|
|
||||||
Self::AnyArray(arr) => OwnedElement::AnyArray(Self::to_owned_any_array(arr)),
|
|
||||||
Self::FlatArray(frr) => OwnedElement::FlatArray(Self::to_owned_flat_array(frr)),
|
|
||||||
Self::Array(arr) => OwnedElement::Array(Self::to_owned_array(arr)),
|
|
||||||
Self::String(st) => OwnedElement::String(Bytes::copy_from_slice(st.as_slice())),
|
|
||||||
Self::UnsignedInt(int) => OwnedElement::UnsignedInt(*int),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// owned variants to simplify equality in tests
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
#[cfg(test)]
|
|
||||||
pub enum OwnedElement {
|
|
||||||
Array(Vec<OwnedElement>),
|
|
||||||
String(Bytes),
|
|
||||||
UnsignedInt(u64),
|
|
||||||
FlatArray(Vec<FlatElement>),
|
|
||||||
AnyArray(Vec<Bytes>),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
#[derive(Debug, PartialEq)]
|
|
||||||
pub enum FlatElement {
|
|
||||||
String(Bytes),
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load Diff
@ -1,355 +0,0 @@
|
|||||||
/*
|
|
||||||
* Created on Tue Apr 12 2022
|
|
||||||
*
|
|
||||||
* This file is a part of Skytable
|
|
||||||
* Skytable (formerly known as TerrabaseDB or Skybase) is a free and open-source
|
|
||||||
* NoSQL database written by Sayan Nandan ("the Author") with the
|
|
||||||
* vision to provide flexibility in data modelling without compromising
|
|
||||||
* on performance, queryability or scalability.
|
|
||||||
*
|
|
||||||
* Copyright (c) 2022, Sayan Nandan <ohsayan@outlook.com>
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
|
||||||
* (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
* GNU Affero General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
#![allow(unused)] // TODO(@ohsayan): Remove this once we're done
|
|
||||||
|
|
||||||
use crate::corestore::heap_array::HeapArray;
|
|
||||||
use crate::protocol::{ParseError, ParseResult, UnsafeSlice};
|
|
||||||
use core::{marker::PhantomData, mem::transmute};
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests;
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Query {
|
|
||||||
forward: usize,
|
|
||||||
data: QueryType,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Query {
|
|
||||||
const fn new(forward: usize, data: QueryType) -> Self {
|
|
||||||
Self { forward, data }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum QueryType {
|
|
||||||
Simple(SimpleQuery),
|
|
||||||
Pipelined(PipelinedQuery),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct SimpleQuery {
|
|
||||||
data: HeapArray<UnsafeSlice>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SimpleQuery {
|
|
||||||
#[cfg(test)]
|
|
||||||
fn into_owned(self) -> OwnedSimpleQuery {
|
|
||||||
OwnedSimpleQuery {
|
|
||||||
data: self
|
|
||||||
.data
|
|
||||||
.iter()
|
|
||||||
.map(|v| unsafe { v.as_slice().to_owned() })
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
struct OwnedSimpleQuery {
|
|
||||||
data: Vec<Vec<u8>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct PipelinedQuery {
|
|
||||||
data: HeapArray<HeapArray<UnsafeSlice>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PipelinedQuery {
|
|
||||||
#[cfg(test)]
|
|
||||||
fn into_owned(self) -> OwnedPipelinedQuery {
|
|
||||||
OwnedPipelinedQuery {
|
|
||||||
data: self
|
|
||||||
.data
|
|
||||||
.iter()
|
|
||||||
.map(|v| {
|
|
||||||
v.iter()
|
|
||||||
.map(|v| unsafe { v.as_slice().to_owned() })
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
struct OwnedPipelinedQuery {
|
|
||||||
data: Vec<Vec<Vec<u8>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A parser for Skyhash 2.0
|
|
||||||
pub struct Parser<'a> {
|
|
||||||
end: *const u8,
|
|
||||||
cursor: *const u8,
|
|
||||||
_lt: PhantomData<&'a ()>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
|
||||||
/// Initialize a new parser
|
|
||||||
pub fn new(slice: &[u8]) -> Self {
|
|
||||||
unsafe {
|
|
||||||
Self {
|
|
||||||
end: slice.as_ptr().add(slice.len()),
|
|
||||||
cursor: slice.as_ptr(),
|
|
||||||
_lt: PhantomData,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// basic methods
|
|
||||||
impl<'a> Parser<'a> {
|
|
||||||
/// Returns a ptr one byte past the allocation of the buffer
|
|
||||||
const fn data_end_ptr(&self) -> *const u8 {
|
|
||||||
self.end
|
|
||||||
}
|
|
||||||
/// Returns the position of the cursor
|
|
||||||
/// WARNING: Deref might led to a segfault
|
|
||||||
const fn cursor_ptr(&self) -> *const u8 {
|
|
||||||
self.cursor
|
|
||||||
}
|
|
||||||
/// Check how many bytes we have left
|
|
||||||
fn remaining(&self) -> usize {
|
|
||||||
self.data_end_ptr() as usize - self.cursor_ptr() as usize
|
|
||||||
}
|
|
||||||
/// Check if we have `size` bytes remaining
|
|
||||||
fn has_remaining(&self, size: usize) -> bool {
|
|
||||||
self.remaining() >= size
|
|
||||||
}
|
|
||||||
/// Check if we have exhausted the buffer
|
|
||||||
fn exhausted(&self) -> bool {
|
|
||||||
self.cursor_ptr() >= self.data_end_ptr()
|
|
||||||
}
|
|
||||||
/// Check if the buffer is not exhausted
|
|
||||||
fn not_exhausted(&self) -> bool {
|
|
||||||
self.cursor_ptr() < self.data_end_ptr()
|
|
||||||
}
|
|
||||||
/// Attempts to return the byte pointed at by the cursor.
|
|
||||||
/// WARNING: The same segfault warning
|
|
||||||
const unsafe fn get_byte_at_cursor(&self) -> u8 {
|
|
||||||
*self.cursor_ptr()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// mutable refs
|
|
||||||
impl<'a> Parser<'a> {
|
|
||||||
/// Increment the cursor by `by` positions
|
|
||||||
unsafe fn incr_cursor_by(&mut self, by: usize) {
|
|
||||||
self.cursor = self.cursor.add(by);
|
|
||||||
}
|
|
||||||
/// Increment the position of the cursor by one position
|
|
||||||
unsafe fn incr_cursor(&mut self) {
|
|
||||||
self.incr_cursor_by(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// higher level abstractions
|
|
||||||
impl<'a> Parser<'a> {
|
|
||||||
/// Attempt to read `len` bytes
|
|
||||||
fn read_until(&mut self, len: usize) -> ParseResult<UnsafeSlice> {
|
|
||||||
if self.has_remaining(len) {
|
|
||||||
unsafe {
|
|
||||||
// UNSAFE(@ohsayan): Already verified lengths
|
|
||||||
let slice = UnsafeSlice::new(self.cursor_ptr(), len);
|
|
||||||
self.incr_cursor_by(len);
|
|
||||||
Ok(slice)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(ParseError::NotEnough)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Attempt to read a byte slice terminated by an LF
|
|
||||||
fn read_line(&mut self) -> ParseResult<UnsafeSlice> {
|
|
||||||
let start_ptr = self.cursor_ptr();
|
|
||||||
unsafe {
|
|
||||||
while self.not_exhausted() && self.get_byte_at_cursor() != b'\n' {
|
|
||||||
self.incr_cursor();
|
|
||||||
}
|
|
||||||
if self.not_exhausted() && self.get_byte_at_cursor() == b'\n' {
|
|
||||||
let len = self.cursor_ptr() as usize - start_ptr as usize;
|
|
||||||
self.incr_cursor(); // skip LF
|
|
||||||
Ok(UnsafeSlice::new(start_ptr, len))
|
|
||||||
} else {
|
|
||||||
Err(ParseError::NotEnough)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Attempt to read a line, **rejecting an empty payload**
|
|
||||||
fn read_line_pedantic(&mut self) -> ParseResult<UnsafeSlice> {
|
|
||||||
let start_ptr = self.cursor_ptr();
|
|
||||||
unsafe {
|
|
||||||
while self.not_exhausted() && self.get_byte_at_cursor() != b'\n' {
|
|
||||||
self.incr_cursor();
|
|
||||||
}
|
|
||||||
let len = self.cursor_ptr() as usize - start_ptr as usize;
|
|
||||||
let has_lf = self.not_exhausted() && self.get_byte_at_cursor() == b'\n';
|
|
||||||
if has_lf && len != 0 {
|
|
||||||
self.incr_cursor(); // skip LF
|
|
||||||
Ok(UnsafeSlice::new(start_ptr, len))
|
|
||||||
} else {
|
|
||||||
// just some silly hackery
|
|
||||||
Err(transmute(has_lf))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Attempt to read an `usize` from the buffer
|
|
||||||
fn read_usize(&mut self) -> ParseResult<usize> {
|
|
||||||
let line = self.read_line_pedantic()?;
|
|
||||||
let bytes = unsafe {
|
|
||||||
// UNSAFE(@ohsayan): We just extracted the slice
|
|
||||||
line.as_slice()
|
|
||||||
};
|
|
||||||
let mut ret = 0usize;
|
|
||||||
for byte in bytes {
|
|
||||||
if byte.is_ascii_digit() {
|
|
||||||
ret = match ret.checked_mul(10) {
|
|
||||||
Some(r) => r,
|
|
||||||
None => return Err(ParseError::DatatypeParseFailure),
|
|
||||||
};
|
|
||||||
ret = match ret.checked_add((byte & 0x0F) as _) {
|
|
||||||
Some(r) => r,
|
|
||||||
None => return Err(ParseError::DatatypeParseFailure),
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return Err(ParseError::DatatypeParseFailure);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// query impls
|
|
||||||
impl<'a> Parser<'a> {
|
|
||||||
/// Parse the next simple query. This should have passed the `*` tsymbol
|
|
||||||
///
|
|
||||||
/// Simple query structure (tokenized line-by-line):
|
|
||||||
/// ```text
|
|
||||||
/// * -> Simple Query Header
|
|
||||||
/// <n>\n -> Count of elements in the simple query
|
|
||||||
/// <l0>\n -> Length of element 1
|
|
||||||
/// <e0> -> element 1 itself
|
|
||||||
/// <l1>\n -> Length of element 2
|
|
||||||
/// <e1> -> element 2 itself
|
|
||||||
/// ...
|
|
||||||
/// ```
|
|
||||||
fn _next_simple_query(&mut self) -> ParseResult<HeapArray<UnsafeSlice>> {
|
|
||||||
let element_count = self.read_usize()?;
|
|
||||||
unsafe {
|
|
||||||
let mut data = HeapArray::new_writer(element_count);
|
|
||||||
for i in 0..element_count {
|
|
||||||
let element_size = self.read_usize()?;
|
|
||||||
let element = self.read_until(element_size)?;
|
|
||||||
data.write_to_index(i, element);
|
|
||||||
}
|
|
||||||
Ok(data.finish())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/// Parse a simple query
|
|
||||||
fn next_simple_query(&mut self) -> ParseResult<SimpleQuery> {
|
|
||||||
Ok(SimpleQuery {
|
|
||||||
data: self._next_simple_query()?,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
/// Parse a pipelined query. This should have passed the `$` tsymbol
|
|
||||||
///
|
|
||||||
/// Pipelined query structure (tokenized line-by-line):
|
|
||||||
/// ```text
|
|
||||||
/// $ -> Pipeline
|
|
||||||
/// <n>\n -> Pipeline has n queries
|
|
||||||
/// <lq0>\n -> Query 1 has 3 elements
|
|
||||||
/// <lq0e0>\n -> Q1E1 has 3 bytes
|
|
||||||
/// <q0e0> -> Q1E1 itself
|
|
||||||
/// <lq0e1>\n -> Q1E2 has 1 byte
|
|
||||||
/// <q0e1> -> Q1E2 itself
|
|
||||||
/// <lq0e2>\n -> Q1E3 has 3 bytes
|
|
||||||
/// <q0e2> -> Q1E3 itself
|
|
||||||
/// <lq1>\n -> Query 2 has 2 elements
|
|
||||||
/// <lq1e0>\n -> Q2E1 has 3 bytes
|
|
||||||
/// <q1e0> -> Q2E1 itself
|
|
||||||
/// <lq1e1>\n -> Q2E2 has 1 byte
|
|
||||||
/// <q1e1> -> Q2E2 itself
|
|
||||||
/// ...
|
|
||||||
/// ```
|
|
||||||
///
|
|
||||||
/// Example:
|
|
||||||
/// ```text
|
|
||||||
/// $ -> Pipeline
|
|
||||||
/// 2\n -> Pipeline has 2 queries
|
|
||||||
/// 3\n -> Query 1 has 3 elements
|
|
||||||
/// 3\n -> Q1E1 has 3 bytes
|
|
||||||
/// SET -> Q1E1 itself
|
|
||||||
/// 1\n -> Q1E2 has 1 byte
|
|
||||||
/// x -> Q1E2 itself
|
|
||||||
/// 3\n -> Q1E3 has 3 bytes
|
|
||||||
/// 100 -> Q1E3 itself
|
|
||||||
/// 2\n -> Query 2 has 2 elements
|
|
||||||
/// 3\n -> Q2E1 has 3 bytes
|
|
||||||
/// GET -> Q2E1 itself
|
|
||||||
/// 1\n -> Q2E2 has 1 byte
|
|
||||||
/// x -> Q2E2 itself
|
|
||||||
/// ```
|
|
||||||
fn next_pipeline(&mut self) -> ParseResult<PipelinedQuery> {
|
|
||||||
let query_count = self.read_usize()?;
|
|
||||||
unsafe {
|
|
||||||
let mut queries = HeapArray::new_writer(query_count);
|
|
||||||
for i in 0..query_count {
|
|
||||||
let sq = self._next_simple_query()?;
|
|
||||||
queries.write_to_index(i, sq);
|
|
||||||
}
|
|
||||||
Ok(PipelinedQuery {
|
|
||||||
data: queries.finish(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
fn _parse(&mut self) -> ParseResult<QueryType> {
|
|
||||||
if self.not_exhausted() {
|
|
||||||
unsafe {
|
|
||||||
let first_byte = self.get_byte_at_cursor();
|
|
||||||
self.incr_cursor();
|
|
||||||
let data = match first_byte {
|
|
||||||
b'*' => {
|
|
||||||
// a simple query
|
|
||||||
QueryType::Simple(self.next_simple_query()?)
|
|
||||||
}
|
|
||||||
b'$' => {
|
|
||||||
// a pipelined query
|
|
||||||
QueryType::Pipelined(self.next_pipeline()?)
|
|
||||||
}
|
|
||||||
_ => return Err(ParseError::UnexpectedByte),
|
|
||||||
};
|
|
||||||
Ok(data)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(ParseError::NotEnough)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pub fn parse(buf: &[u8]) -> ParseResult<Query> {
|
|
||||||
let mut slf = Self::new(buf);
|
|
||||||
let body = slf._parse()?;
|
|
||||||
let consumed = slf.cursor_ptr() as usize - buf.as_ptr() as usize;
|
|
||||||
Ok(Query::new(consumed, body))
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,643 +0,0 @@
|
|||||||
/*
|
|
||||||
* Created on Tue Apr 12 2022
|
|
||||||
*
|
|
||||||
* This file is a part of Skytable
|
|
||||||
* Skytable (formerly known as TerrabaseDB or Skybase) is a free and open-source
|
|
||||||
* NoSQL database written by Sayan Nandan ("the Author") with the
|
|
||||||
* vision to provide flexibility in data modelling without compromising
|
|
||||||
* on performance, queryability or scalability.
|
|
||||||
*
|
|
||||||
* Copyright (c) 2022, Sayan Nandan <ohsayan@outlook.com>
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU Affero General Public License as published by
|
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
|
||||||
* (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
* GNU Affero General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU Affero General Public License
|
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
|
|
||||||
use super::{Parser, PipelinedQuery, Query, QueryType, SimpleQuery};
|
|
||||||
use crate::protocol::ParseError;
|
|
||||||
use std::iter::Map;
|
|
||||||
use std::vec::IntoIter as VecIntoIter;
|
|
||||||
|
|
||||||
type IterPacketWithLen = Map<VecIntoIter<Vec<u8>>, fn(Vec<u8>) -> (usize, Vec<u8>)>;
|
|
||||||
type Packets = Vec<Vec<u8>>;
|
|
||||||
|
|
||||||
macro_rules! v {
|
|
||||||
() => {
|
|
||||||
vec![]
|
|
||||||
};
|
|
||||||
($literal:literal) => {
|
|
||||||
$literal.to_vec()
|
|
||||||
};
|
|
||||||
($($lit:literal),*) => {
|
|
||||||
vec![$(
|
|
||||||
$lit.as_bytes().to_owned()
|
|
||||||
),*]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_exhausted(p: &Parser) {
|
|
||||||
assert!(!p.not_exhausted());
|
|
||||||
assert!(p.exhausted());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_remaining(p: &Parser, r: usize) {
|
|
||||||
assert_eq!(p.remaining(), r);
|
|
||||||
assert!(p.has_remaining(r));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_not_exhausted(p: &Parser) {
|
|
||||||
assert!(p.not_exhausted());
|
|
||||||
assert!(!p.exhausted());
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_slices(slices: &[&[u8]]) -> Packets {
|
|
||||||
slices.iter().map(|slc| slc.to_vec()).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn ensure_zero_reads(parser: &mut Parser) {
|
|
||||||
let r = parser.read_until(0).unwrap();
|
|
||||||
unsafe {
|
|
||||||
let slice = r.as_slice();
|
|
||||||
assert_eq!(slice, b"");
|
|
||||||
assert!(slice.is_empty());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We do this intentionally for "heap simulation"
|
|
||||||
fn slices() -> Packets {
|
|
||||||
const SLICE_COLLECTION: &[&[u8]] = &[
|
|
||||||
b"",
|
|
||||||
b"a",
|
|
||||||
b"ab",
|
|
||||||
b"abc",
|
|
||||||
b"abcd",
|
|
||||||
b"abcde",
|
|
||||||
b"abcdef",
|
|
||||||
b"abcdefg",
|
|
||||||
b"abcdefgh",
|
|
||||||
b"abcdefghi",
|
|
||||||
b"abcdefghij",
|
|
||||||
b"abcdefghijk",
|
|
||||||
b"abcdefghijkl",
|
|
||||||
b"abcdefghijklm",
|
|
||||||
];
|
|
||||||
get_slices(SLICE_COLLECTION)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_slices_with_len(slices: Packets) -> IterPacketWithLen {
|
|
||||||
slices.into_iter().map(|slc| (slc.len(), slc))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slices_with_len() -> IterPacketWithLen {
|
|
||||||
get_slices_with_len(slices())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slices_lf() -> Packets {
|
|
||||||
const SLICE_COLLECTION: &[&[u8]] = &[
|
|
||||||
b"",
|
|
||||||
b"a\n",
|
|
||||||
b"ab\n",
|
|
||||||
b"abc\n",
|
|
||||||
b"abcd\n",
|
|
||||||
b"abcde\n",
|
|
||||||
b"abcdef\n",
|
|
||||||
b"abcdefg\n",
|
|
||||||
b"abcdefgh\n",
|
|
||||||
b"abcdefghi\n",
|
|
||||||
b"abcdefghij\n",
|
|
||||||
b"abcdefghijk\n",
|
|
||||||
b"abcdefghijkl\n",
|
|
||||||
b"abcdefghijklm\n",
|
|
||||||
];
|
|
||||||
get_slices(SLICE_COLLECTION)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn slices_lf_with_len() -> IterPacketWithLen {
|
|
||||||
get_slices_with_len(slices_lf())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn simple_query(query: Query) -> SimpleQuery {
|
|
||||||
if let QueryType::Simple(sq) = query.data {
|
|
||||||
sq
|
|
||||||
} else {
|
|
||||||
panic!("Got pipeline instead of simple!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pipelined_query(query: Query) -> PipelinedQuery {
|
|
||||||
if let QueryType::Pipelined(pq) = query.data {
|
|
||||||
pq
|
|
||||||
} else {
|
|
||||||
panic!("Got simple instead of pipeline!");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// "actual" tests
|
|
||||||
// data_end_ptr
|
|
||||||
#[test]
|
|
||||||
fn data_end_ptr() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(parser.data_end_ptr(), src.as_ptr().add(len));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// cursor_ptr
|
|
||||||
#[test]
|
|
||||||
fn cursor_ptr() {
|
|
||||||
for src in slices() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
assert_eq!(parser.cursor_ptr(), src.as_ptr())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn cursor_ptr_with_incr() {
|
|
||||||
for src in slices() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
unsafe {
|
|
||||||
parser.incr_cursor_by(src.len());
|
|
||||||
assert_eq!(parser.cursor_ptr(), src.as_ptr().add(src.len()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// remaining
|
|
||||||
#[test]
|
|
||||||
fn remaining() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
assert_eq!(parser.remaining(), len);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn remaining_with_incr() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
unsafe {
|
|
||||||
// no change
|
|
||||||
parser.incr_cursor_by(0);
|
|
||||||
assert_eq!(parser.remaining(), len);
|
|
||||||
if len != 0 {
|
|
||||||
// move one byte ahead. should reach EOA or len - 1
|
|
||||||
parser.incr_cursor();
|
|
||||||
assert_eq!(parser.remaining(), len - 1);
|
|
||||||
// move the cursor to the end; should reach EOA
|
|
||||||
parser.incr_cursor_by(len - 1);
|
|
||||||
assert_eq!(parser.remaining(), 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// has_remaining
|
|
||||||
#[test]
|
|
||||||
fn has_remaining() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
assert!(parser.has_remaining(len), "should have {len} remaining")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn has_remaining_with_incr() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
unsafe {
|
|
||||||
// no change
|
|
||||||
parser.incr_cursor_by(0);
|
|
||||||
assert!(parser.has_remaining(len));
|
|
||||||
if len != 0 {
|
|
||||||
// move one byte ahead. should reach EOA or len - 1
|
|
||||||
parser.incr_cursor();
|
|
||||||
assert!(parser.has_remaining(len - 1));
|
|
||||||
// move the cursor to the end; should reach EOA
|
|
||||||
parser.incr_cursor_by(len - 1);
|
|
||||||
assert!(!parser.has_remaining(1));
|
|
||||||
// should always be true
|
|
||||||
assert!(parser.has_remaining(0));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// exhausted
|
|
||||||
#[test]
|
|
||||||
fn exhausted() {
|
|
||||||
for src in slices() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
if src.is_empty() {
|
|
||||||
assert!(parser.exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(!parser.exhausted())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn exhausted_with_incr() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
if len == 0 {
|
|
||||||
assert!(parser.exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(!parser.exhausted());
|
|
||||||
unsafe {
|
|
||||||
parser.incr_cursor();
|
|
||||||
if len == 1 {
|
|
||||||
assert!(parser.exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(!parser.exhausted());
|
|
||||||
parser.incr_cursor_by(len - 1);
|
|
||||||
assert!(parser.exhausted());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// not_exhausted
|
|
||||||
#[test]
|
|
||||||
fn not_exhausted() {
|
|
||||||
for src in slices() {
|
|
||||||
let parser = Parser::new(&src);
|
|
||||||
if src.is_empty() {
|
|
||||||
assert!(!parser.not_exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(parser.not_exhausted())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn not_exhausted_with_incr() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
if len == 0 {
|
|
||||||
assert!(!parser.not_exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(parser.not_exhausted());
|
|
||||||
unsafe {
|
|
||||||
parser.incr_cursor();
|
|
||||||
if len == 1 {
|
|
||||||
assert!(!parser.not_exhausted());
|
|
||||||
} else {
|
|
||||||
assert!(parser.not_exhausted());
|
|
||||||
parser.incr_cursor_by(len - 1);
|
|
||||||
assert!(!parser.not_exhausted());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read_until
|
|
||||||
#[test]
|
|
||||||
fn read_until_empty() {
|
|
||||||
let b = v!(b"");
|
|
||||||
let mut parser = Parser::new(&b);
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
assert_eq!(parser.read_until(1).unwrap_err(), ParseError::NotEnough);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_until_nonempty() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
// should always work
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
// now read the entire length; should always work
|
|
||||||
let r = parser.read_until(len).unwrap();
|
|
||||||
unsafe {
|
|
||||||
let slice = r.as_slice();
|
|
||||||
assert_eq!(slice, src.as_slice());
|
|
||||||
assert_eq!(slice.len(), len);
|
|
||||||
}
|
|
||||||
// even after the buffer is exhausted, `0` should always work
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_until_not_enough() {
|
|
||||||
for (len, src) in slices_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
// try to read more than the amount of data bufferred
|
|
||||||
assert_eq!(
|
|
||||||
parser.read_until(len + 1).unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
// should the above fail, zero reads should still work
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_until_more_bytes() {
|
|
||||||
let sample1 = v!(b"abcd1");
|
|
||||||
let mut p1 = Parser::new(&sample1);
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(
|
|
||||||
p1.read_until(&sample1.len() - 1).unwrap().as_slice(),
|
|
||||||
&sample1[..&sample1.len() - 1]
|
|
||||||
);
|
|
||||||
// ensure we have not exhasuted
|
|
||||||
ensure_not_exhausted(&p1);
|
|
||||||
ensure_remaining(&p1, 1);
|
|
||||||
}
|
|
||||||
let sample2 = v!(b"abcd1234567890!@#$");
|
|
||||||
let mut p2 = Parser::new(&sample2);
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(p2.read_until(4).unwrap().as_slice(), &sample2[..4]);
|
|
||||||
// ensure we have not exhasuted
|
|
||||||
ensure_not_exhausted(&p2);
|
|
||||||
ensure_remaining(&p2, sample2.len() - 4);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read_line
|
|
||||||
#[test]
|
|
||||||
fn read_line_special_case_only_lf() {
|
|
||||||
let b = v!(b"\n");
|
|
||||||
let mut parser = Parser::new(&b);
|
|
||||||
unsafe {
|
|
||||||
let r = parser.read_line().unwrap();
|
|
||||||
let slice = r.as_slice();
|
|
||||||
assert_eq!(slice, b"");
|
|
||||||
assert!(slice.is_empty());
|
|
||||||
};
|
|
||||||
// ensure it is exhausted
|
|
||||||
ensure_exhausted(&parser);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line() {
|
|
||||||
for (len, src) in slices_lf_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
if len == 0 {
|
|
||||||
// should be empty, so NotEnough
|
|
||||||
assert_eq!(parser.read_line().unwrap_err(), ParseError::NotEnough);
|
|
||||||
} else {
|
|
||||||
// should work
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(
|
|
||||||
parser.read_line().unwrap().as_slice(),
|
|
||||||
&src.as_slice()[..len - 1]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
// now, we attempt to read which should work
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
}
|
|
||||||
// ensure it is exhausted
|
|
||||||
ensure_exhausted(&parser);
|
|
||||||
// now, we attempt to read another line which should fail
|
|
||||||
assert_eq!(parser.read_line().unwrap_err(), ParseError::NotEnough);
|
|
||||||
// ensure that cursor is at end
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(parser.cursor_ptr(), src.as_ptr().add(len));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_more_bytes() {
|
|
||||||
let sample1 = v!(b"abcd\n1");
|
|
||||||
let mut p1 = Parser::new(&sample1);
|
|
||||||
let line = p1.read_line().unwrap();
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(line.as_slice(), b"abcd");
|
|
||||||
}
|
|
||||||
// we should still have one remaining
|
|
||||||
ensure_not_exhausted(&p1);
|
|
||||||
ensure_remaining(&p1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_subsequent_lf() {
|
|
||||||
let sample1 = v!(b"abcd\n1\n");
|
|
||||||
let mut p1 = Parser::new(&sample1);
|
|
||||||
let line = p1.read_line().unwrap();
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(line.as_slice(), b"abcd");
|
|
||||||
}
|
|
||||||
// we should still have two octets remaining
|
|
||||||
ensure_not_exhausted(&p1);
|
|
||||||
ensure_remaining(&p1, 2);
|
|
||||||
// and we should be able to read in another line
|
|
||||||
let line = p1.read_line().unwrap();
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(line.as_slice(), b"1");
|
|
||||||
}
|
|
||||||
ensure_exhausted(&p1);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_pedantic_okay() {
|
|
||||||
for (len, src) in slices_lf_with_len() {
|
|
||||||
let mut parser = Parser::new(&src);
|
|
||||||
if len == 0 {
|
|
||||||
// should be empty, so NotEnough
|
|
||||||
assert_eq!(
|
|
||||||
parser.read_line_pedantic().unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
// should work
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(
|
|
||||||
parser.read_line_pedantic().unwrap().as_slice(),
|
|
||||||
&src.as_slice()[..len - 1]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
// now, we attempt to read which should work
|
|
||||||
ensure_zero_reads(&mut parser);
|
|
||||||
}
|
|
||||||
// ensure it is exhausted
|
|
||||||
ensure_exhausted(&parser);
|
|
||||||
// now, we attempt to read another line which should fail
|
|
||||||
assert_eq!(
|
|
||||||
parser.read_line_pedantic().unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
// ensure that cursor is at end
|
|
||||||
unsafe {
|
|
||||||
assert_eq!(parser.cursor_ptr(), src.as_ptr().add(len));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_pedantic_fail_empty() {
|
|
||||||
let payload = v!(b"");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_line_pedantic().unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_pedantic_fail_only_lf() {
|
|
||||||
let payload = v!(b"\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_line_pedantic().unwrap_err(),
|
|
||||||
ParseError::BadPacket
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_line_pedantic_fail_only_lf_extra_data() {
|
|
||||||
let payload = v!(b"\n1");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_line_pedantic().unwrap_err(),
|
|
||||||
ParseError::BadPacket
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_usize_fail_empty() {
|
|
||||||
let payload = v!(b"");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
let payload = v!(b"\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::BadPacket
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_usize_fail_no_lf() {
|
|
||||||
let payload = v!(b"1");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::NotEnough
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_usize_okay() {
|
|
||||||
let payload = v!(b"1\n");
|
|
||||||
assert_eq!(Parser::new(&payload).read_usize().unwrap(), 1);
|
|
||||||
let payload = v!(b"1234\n");
|
|
||||||
assert_eq!(Parser::new(&payload).read_usize().unwrap(), 1234);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn read_usize_fail() {
|
|
||||||
let payload = v!(b"a\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::DatatypeParseFailure
|
|
||||||
);
|
|
||||||
let payload = v!(b"1a\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::DatatypeParseFailure
|
|
||||||
);
|
|
||||||
let payload = v!(b"a1\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::DatatypeParseFailure
|
|
||||||
);
|
|
||||||
let payload = v!(b"aa\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::DatatypeParseFailure
|
|
||||||
);
|
|
||||||
let payload = v!(b"12345abcde\n");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::new(&payload).read_usize().unwrap_err(),
|
|
||||||
ParseError::DatatypeParseFailure
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_fail_because_unknown_query_scheme() {
|
|
||||||
let body = v!(b"?3\n3\nSET1\nx3\n100");
|
|
||||||
assert_eq!(
|
|
||||||
Parser::parse(&body).unwrap_err(),
|
|
||||||
ParseError::UnexpectedByte
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn simple_query_okay() {
|
|
||||||
let body = v!(b"*3\n3\nSET1\nx3\n100");
|
|
||||||
let ret = Parser::parse(&body).unwrap();
|
|
||||||
assert_eq!(ret.forward, body.len());
|
|
||||||
let query = simple_query(ret);
|
|
||||||
assert_eq!(query.into_owned().data, v!["SET", "x", "100"]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn simple_query_okay_empty_elements() {
|
|
||||||
let body = v!(b"*3\n3\nSET0\n0\n");
|
|
||||||
let ret = Parser::parse(&body).unwrap();
|
|
||||||
assert_eq!(ret.forward, body.len());
|
|
||||||
let query = simple_query(ret);
|
|
||||||
assert_eq!(query.into_owned().data, v!["SET", "", ""]);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn parse_fail_because_not_enough() {
|
|
||||||
let full_payload = b"*3\n3\nSET1\nx3\n100";
|
|
||||||
let samples: Vec<Vec<u8>> = (0..full_payload.len() - 1)
|
|
||||||
.map(|i| full_payload.iter().take(i).cloned().collect())
|
|
||||||
.collect();
|
|
||||||
for body in samples {
|
|
||||||
assert_eq!(
|
|
||||||
Parser::parse(&body).unwrap_err(),
|
|
||||||
ParseError::NotEnough,
|
|
||||||
"Failed with body len: {}",
|
|
||||||
body.len()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pipelined_query_okay() {
|
|
||||||
let body = v!(b"$2\n3\n3\nSET1\nx3\n1002\n3\nGET1\nx");
|
|
||||||
let ret = Parser::parse(&body).unwrap();
|
|
||||||
assert_eq!(ret.forward, body.len());
|
|
||||||
let query = pipelined_query(ret);
|
|
||||||
assert_eq!(
|
|
||||||
query.into_owned().data,
|
|
||||||
vec![v!["SET", "x", "100"], v!["GET", "x"]]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pipelined_query_okay_empty_elements() {
|
|
||||||
let body = v!(b"$2\n3\n3\nSET0\n3\n1002\n3\nGET0\n");
|
|
||||||
let ret = Parser::parse(&body).unwrap();
|
|
||||||
assert_eq!(ret.forward, body.len());
|
|
||||||
let query = pipelined_query(ret);
|
|
||||||
assert_eq!(
|
|
||||||
query.into_owned().data,
|
|
||||||
vec![v!["SET", "", "100"], v!["GET", ""]]
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn pipelined_query_fail_because_not_enough() {
|
|
||||||
let full_payload = v!(b"$2\n3\n3\nSET1\nx3\n1002\n3\nGET1\nx");
|
|
||||||
let samples: Vec<Vec<u8>> = (0..full_payload.len() - 1)
|
|
||||||
.map(|i| full_payload.iter().cloned().take(i).collect())
|
|
||||||
.collect();
|
|
||||||
for body in samples {
|
|
||||||
let ret = Parser::parse(&body).unwrap_err();
|
|
||||||
assert_eq!(ret, ParseError::NotEnough)
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue