transactions are hard

main
Ziyang Hu 2 years ago
parent 2fb5a006f8
commit af8b328d13

@ -23,9 +23,16 @@
<cargoProject FILE="$PROJECT_DIR$/Cargo.toml" /> <cargoProject FILE="$PROJECT_DIR$/Cargo.toml" />
</component> </component>
<component name="ChangeListManager"> <component name="ChangeListManager">
<list default="true" id="fb7002fa-47b1-45d9-bc6d-711b16e752b3" name="Changes" comment="reintroduce bloom filter settings"> <list default="true" id="fb7002fa-47b1-45d9-bc6d-711b16e752b3" name="Changes" comment="manifest file">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" /> <change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/cozorocks/src/bridge/db.rs" beforeDir="false" afterPath="$PROJECT_DIR$/cozorocks/src/bridge/db.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/cozoscript.pest" beforeDir="false" afterPath="$PROJECT_DIR$/src/cozoscript.pest" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/parse/expr.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/parse/expr.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/parse/tx.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/parse/tx.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/runtime/db.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/runtime/db.rs" afterDir="false" /> <change beforePath="$PROJECT_DIR$/src/runtime/db.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/runtime/db.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/src/transact/triple.rs" beforeDir="false" afterPath="$PROJECT_DIR$/src/transact/triple.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/tests/air_routes.rs" beforeDir="false" afterPath="$PROJECT_DIR$/tests/air_routes.rs" afterDir="false" />
<change beforePath="$PROJECT_DIR$/tests/simple.rs" beforeDir="false" afterPath="$PROJECT_DIR$/tests/simple.rs" afterDir="false" />
</list> </list>
<option name="SHOW_DIALOG" value="false" /> <option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" /> <option name="HIGHLIGHT_CONFLICTS" value="true" />
@ -131,7 +138,7 @@
<option name="number" value="Default" /> <option name="number" value="Default" />
<option name="presentableId" value="Default" /> <option name="presentableId" value="Default" />
<updated>1663161524540</updated> <updated>1663161524540</updated>
<workItem from="1663161527741" duration="9360000" /> <workItem from="1663161527741" duration="19713000" />
</task> </task>
<task id="LOCAL-00001" summary="regenerate idea files"> <task id="LOCAL-00001" summary="regenerate idea files">
<created>1663161616722</created> <created>1663161616722</created>
@ -161,7 +168,14 @@
<option name="project" value="LOCAL" /> <option name="project" value="LOCAL" />
<updated>1663170510922</updated> <updated>1663170510922</updated>
</task> </task>
<option name="localTasksCounter" value="5" /> <task id="LOCAL-00005" summary="manifest file">
<created>1663220906043</created>
<option name="number" value="00005" />
<option name="presentableId" value="LOCAL-00005" />
<option name="project" value="LOCAL" />
<updated>1663220906043</updated>
</task>
<option name="localTasksCounter" value="6" />
<servers /> <servers />
</component> </component>
<component name="TypeScriptGeneratedFilesManager"> <component name="TypeScriptGeneratedFilesManager">
@ -183,6 +197,7 @@
<MESSAGE value="validity parsing" /> <MESSAGE value="validity parsing" />
<MESSAGE value="enforce local file security" /> <MESSAGE value="enforce local file security" />
<MESSAGE value="reintroduce bloom filter settings" /> <MESSAGE value="reintroduce bloom filter settings" />
<option name="LAST_COMMIT_MESSAGE" value="reintroduce bloom filter settings" /> <MESSAGE value="manifest file" />
<option name="LAST_COMMIT_MESSAGE" value="manifest file" />
</component> </component>
</project> </project>

@ -137,10 +137,10 @@ impl<'a> DbBuilder<'a> {
self.opts.snd_comparator_different_bytes_can_be_equal = different_bytes_can_be_equal; self.opts.snd_comparator_different_bytes_can_be_equal = different_bytes_can_be_equal;
self self
} }
pub fn destroy_on_exit(mut self, destroy: bool) -> Self { // pub fn destroy_on_exit(mut self, destroy: bool) -> Self {
self.opts.destroy_on_exit = destroy; // self.opts.destroy_on_exit = destroy;
self // self
} // }
pub fn build(self) -> Result<RocksDb, RocksDbStatus> { pub fn build(self) -> Result<RocksDb, RocksDbStatus> {
let mut status = RocksDbStatus::default(); let mut status = RocksDbStatus::default();

@ -178,9 +178,10 @@ nested_schema_clause = {compound_ident ~ ":" ~ ident+}
// tx // tx
tx_clause = { (tx_put | tx_retract )? ~ tx_map ~ ";"? } tx_clause = { (tx_put | tx_retract_all | tx_retract )? ~ tx_map ~ ";"? }
tx_put = {"put" ~ ("@" ~ expr)?} tx_put = {"put" ~ ("@" ~ expr)?}
tx_retract = {"retract" ~ ("@" ~ expr)?} tx_retract = {"retract" ~ ("@" ~ expr)?}
tx_retract_all = {"retract_all" ~ ("@" ~ expr)?}
tx_map = {"{" ~ (tx_pair ~ ",")* ~ tx_pair? ~ "}"} tx_map = {"{" ~ (tx_pair ~ ",")* ~ tx_pair? ~ "}"}
tx_pair = {tx_ident ~ ":" ~ (tx_list | tx_map | expr )} tx_pair = {tx_ident ~ ":" ~ (tx_list | tx_map | expr )}
tx_list = {"[" ~ ((expr | tx_map) ~ ",")* ~ (expr | tx_map)? ~ "]"} tx_list = {"[" ~ ((expr | tx_map) ~ ",")* ~ (expr | tx_map)? ~ "]"}

@ -2,12 +2,12 @@ use std::collections::BTreeMap;
use itertools::Itertools; use itertools::Itertools;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use miette::{bail, ensure, Diagnostic, Result}; use miette::{bail, Diagnostic, ensure, Result};
use pest::prec_climber::{Operator, PrecClimber}; use pest::prec_climber::{Operator, PrecClimber};
use smartstring::{LazyCompact, SmartString}; use smartstring::{LazyCompact, SmartString};
use thiserror::Error; use thiserror::Error;
use crate::data::expr::{get_op, Expr}; use crate::data::expr::{Expr, get_op};
use crate::data::functions::{ use crate::data::functions::{
OP_ADD, OP_AND, OP_CONCAT, OP_DIV, OP_EQ, OP_GE, OP_GT, OP_LE, OP_LIST, OP_LT, OP_MINUS, OP_ADD, OP_AND, OP_CONCAT, OP_DIV, OP_EQ, OP_GE, OP_GT, OP_LE, OP_LIST, OP_LT, OP_MINUS,
OP_MOD, OP_MUL, OP_NEGATE, OP_NEQ, OP_OR, OP_POW, OP_SUB, OP_MOD, OP_MUL, OP_NEGATE, OP_NEQ, OP_OR, OP_POW, OP_SUB,
@ -38,9 +38,14 @@ lazy_static! {
}; };
} }
#[derive(Debug, Error, Diagnostic)]
#[error("Invalid expression encountered")]
#[diagnostic(code(parser::invalid_expression))]
pub(crate) struct InvalidExpression(#[label] pub(crate) SourceSpan);
pub(crate) fn build_expr(pair: Pair<'_>, param_pool: &BTreeMap<String, DataValue>) -> Result<Expr> { pub(crate) fn build_expr(pair: Pair<'_>, param_pool: &BTreeMap<String, DataValue>) -> Result<Expr> {
// TODO remove this when we are sure ensure!(pair.as_rule() == Rule::expr, InvalidExpression(pair.extract_span()));
assert_eq!(pair.as_rule(), Rule::expr);
PREC_CLIMBER.climb( PREC_CLIMBER.climb(
pair.into_inner(), pair.into_inner(),
|v| build_unary(v, param_pool), |v| build_unary(v, param_pool),
@ -187,7 +192,7 @@ fn build_unary(pair: Pair<'_>, param_pool: &BTreeMap<String, DataValue>) -> Resu
span, span,
} }
} }
Rule::list => { Rule::list | Rule::tx_list => {
let mut collected = vec![]; let mut collected = vec![];
for p in p.into_inner() { for p in p.into_inner() {
collected.push(build_expr(p, param_pool)?) collected.push(build_expr(p, param_pool)?)

@ -1,23 +1,28 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::{Display, Formatter}; use std::fmt::{Display, Formatter};
use miette::{bail, ensure, Diagnostic, Result}; use itertools::Itertools;
use log::debug;
use miette::{bail, Diagnostic, ensure, Result};
use smartstring::{LazyCompact, SmartString}; use smartstring::{LazyCompact, SmartString};
use thiserror::Error; use thiserror::Error;
use crate::data::expr::Expr;
use crate::data::functions::OP_LIST;
use crate::data::id::{EntityId, Validity}; use crate::data::id::{EntityId, Validity};
use crate::data::program::InputProgram; use crate::data::program::InputProgram;
use crate::data::symb::Symbol; use crate::data::symb::Symbol;
use crate::data::value::{DataValue, LARGEST_UTF_CHAR}; use crate::data::value::{DataValue, LARGEST_UTF_CHAR};
use crate::parse::expr::{build_expr, parse_string};
use crate::parse::query::parse_query;
use crate::parse::{ExtractSpan, Pair, Pairs, ParseError, Rule, SourceSpan}; use crate::parse::{ExtractSpan, Pair, Pairs, ParseError, Rule, SourceSpan};
use crate::parse::expr::{build_expr, InvalidExpression, parse_string};
use crate::parse::query::parse_query;
#[repr(u8)] #[repr(u8)]
#[derive(Debug, Eq, PartialEq, Copy, Clone)] #[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub(crate) enum TxAction { pub(crate) enum TxAction {
Put, Put,
Retract, Retract,
RetractAll
} }
impl Display for TxAction { impl Display for TxAction {
@ -86,6 +91,7 @@ pub(crate) fn parse_tx(
_ => unreachable!(), _ => unreachable!(),
} }
} }
debug!("Quintuples {:?}", quintuples);
Ok(TripleTx { Ok(TripleTx {
quintuples, quintuples,
before, before,
@ -130,6 +136,19 @@ fn parse_tx_clause(
_ => unreachable!(), _ => unreachable!(),
} }
} }
Rule::tx_retract_all => {
op = TxAction::RetractAll;
let n = src.next().unwrap();
match n.as_rule() {
Rule::expr => {
let vld_expr = build_expr(n, param_pool)?;
vld = Some(Validity::try_from(vld_expr)?);
src.next().unwrap()
}
Rule::tx_map => n,
_ => unreachable!(),
}
}
_ => unreachable!(), _ => unreachable!(),
}; };
@ -159,7 +178,7 @@ fn parse_tx_map(
match fst.as_rule() { match fst.as_rule() {
Rule::tx_ident_id => { Rule::tx_ident_id => {
ensure!(identifier.is_none(), DupKeySpecError(whole_span)); ensure!(identifier.is_none(), DupKeySpecError(whole_span));
let expr = build_expr(src.next().unwrap(), param_pool)?; let expr = parse_tx_val_inline(src.next().unwrap(), param_pool)?;
let eid = expr.build_perm_eid()?; let eid = expr.build_perm_eid()?;
identifier = Some(EntityRep::Id(eid)) identifier = Some(EntityRep::Id(eid))
} }
@ -171,7 +190,7 @@ fn parse_tx_map(
struct BadTempId(DataValue, #[label] SourceSpan); struct BadTempId(DataValue, #[label] SourceSpan);
ensure!(identifier.is_none(), DupKeySpecError(whole_span)); ensure!(identifier.is_none(), DupKeySpecError(whole_span));
let expr = build_expr(src.next().unwrap(), param_pool)?; let expr = parse_tx_val_inline(src.next().unwrap(), param_pool)?;
let span = expr.span(); let span = expr.span();
let c = expr.eval_to_const()?; let c = expr.eval_to_const()?;
let c = c.get_string().ok_or_else(|| BadTempId(c.clone(), span))?; let c = c.get_string().ok_or_else(|| BadTempId(c.clone(), span))?;
@ -181,7 +200,7 @@ fn parse_tx_map(
ensure!(identifier.is_none(), DupKeySpecError(whole_span)); ensure!(identifier.is_none(), DupKeySpecError(whole_span));
let expr_p = src.next().unwrap(); let expr_p = src.next().unwrap();
let span = expr_p.extract_span(); let span = expr_p.extract_span();
let expr = build_expr(expr_p, param_pool)?; let expr = parse_tx_val_inline(expr_p, param_pool)?;
let c = expr.eval_to_const()?; let c = expr.eval_to_const()?;
let c = match c { let c = match c {
DataValue::List(l) => l, DataValue::List(l) => l,
@ -351,3 +370,30 @@ fn parse_tx_val(
} }
Ok(()) Ok(())
} }
fn parse_tx_val_inline(
pair: Pair<'_>,
param_pool: &BTreeMap<String, DataValue>,
) -> Result<Expr> {
Ok(match pair.as_rule() {
Rule::expr => {
let mut expr = build_expr(pair, param_pool)?;
expr.partial_eval()?;
expr
}
Rule::tx_map => {
bail!(InvalidExpression(pair.extract_span()))
}
Rule::tx_list => {
let span = pair.extract_span();
let list_coll = pair.into_inner().map(|p| parse_tx_val_inline(p, param_pool)).try_collect()?;
Expr::Apply {
op: &OP_LIST,
args: list_coll,
span,
}
}
_ => unreachable!(),
})
}

@ -228,20 +228,33 @@ impl Db {
self.run_query(&mut tx, before_prog) self.run_query(&mut tx, before_prog)
.wrap_err("Triple store transaction failed as a pre-condition failed")?; .wrap_err("Triple store transaction failed as a pre-condition failed")?;
} }
let res: JsonValue = tx let mut counter: BTreeMap<EntityId, (isize, isize)> = BTreeMap::new();
.tx_triples(payloads.quintuples)? let res = tx
.iter() .tx_triples(payloads.quintuples)?;
.map(|(eid, size)| json!([eid.0, size])) for (key, change) in res {
.collect(); let (asserts, retracts) = counter.entry(key).or_default();
if change > 0 {
*asserts += change;
} else {
*retracts -= change;
}
}
for after_prog in payloads.after { for after_prog in payloads.after {
self.run_query(&mut tx, after_prog) self.run_query(&mut tx, after_prog)
.wrap_err("Triple store transaction failed as a post-condition failed")?; .wrap_err("Triple store transaction failed as a post-condition failed")?;
} }
let tx_id = tx.get_write_tx_id()?; let tx_id = tx.get_write_tx_id()?;
tx.commit_tx("", false)?; tx.commit_tx("", false)?;
let counted_res: JsonValue = counter.into_iter().map(|(k, (v1, v2))|
json!([k.0, v1, v2])
).collect();
Ok(json!({ Ok(json!({
"tx_id": tx_id, "tx_id": tx_id,
"results": res "headers": ["entity_id", "asserts", "retracts"],
"rows": counted_res
})) }))
} }
fn transact_attributes(&self, attrs: Vec<AttrTxItem>) -> Result<JsonValue> { fn transact_attributes(&self, attrs: Vec<AttrTxItem>) -> Result<JsonValue> {

@ -1,12 +1,14 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::sync::atomic::Ordering; use std::sync::atomic::Ordering;
use miette::{bail, ensure, Diagnostic, Result}; use either::{Left, Right};
use log::debug;
use miette::{bail, Diagnostic, ensure, Result};
use smartstring::{LazyCompact, SmartString}; use smartstring::{LazyCompact, SmartString};
use thiserror::Error; use thiserror::Error;
use cozorocks::CfHandle::Pri;
use cozorocks::{DbIter, IterBuilder}; use cozorocks::{DbIter, IterBuilder};
use cozorocks::CfHandle::Pri;
use crate::data::attr::Attribute; use crate::data::attr::Attribute;
use crate::data::compare::compare_key; use crate::data::compare::compare_key;
@ -34,6 +36,13 @@ pub(crate) struct EntityNotFound(pub(crate) String);
#[diagnostic(help("This occurs when transacting against the attribute {0}"))] #[diagnostic(help("This occurs when transacting against the attribute {0}"))]
pub(crate) struct ExpectEntityId(String, DataValue); pub(crate) struct ExpectEntityId(String, DataValue);
#[derive(Debug, Error, Diagnostic)]
#[error("Unique constraint violated for attribute {0} and value {1:?}")]
#[diagnostic(code(eval::unique_constraint_violated))]
#[diagnostic(help("The existing one has entity ID {2:?}"))]
struct UniqueConstraintViolated(String, DataValue, u64);
impl SessionTx { impl SessionTx {
pub(crate) fn tx_triples( pub(crate) fn tx_triples(
&mut self, &mut self,
@ -103,7 +112,7 @@ impl SessionTx {
} }
} }
} }
TxAction::Retract => { TxAction::Retract | TxAction::RetractAll => {
let attr = self.attr_by_name(&payload.attr_name.name)?.unwrap(); let attr = self.attr_by_name(&payload.attr_name.name)?.unwrap();
let eid = match payload.entity { let eid = match payload.entity {
EntityRep::Id(id) => id, EntityRep::Id(id) => id,
@ -112,7 +121,7 @@ impl SessionTx {
#[error("Attempting to retract with temp ID {0}")] #[error("Attempting to retract with temp ID {0}")]
#[diagnostic(code(eval::retract_with_temp_id))] #[diagnostic(code(eval::retract_with_temp_id))]
#[diagnostic(help( #[diagnostic(help(
"This occurs when transacting against the attribute {1}" "This occurs when transacting against the attribute {1}"
))] ))]
struct RetractWithTempId(String, String); struct RetractWithTempId(String, String);
bail!(RetractWithTempId(id.to_string(), attr.name.to_string())) bail!(RetractWithTempId(id.to_string(), attr.name.to_string()))
@ -130,15 +139,37 @@ impl SessionTx {
eid eid
} }
}; };
ret.push(( if payload.action == TxAction::Retract {
self.retract_triple( ret.push((
eid, self.retract_triple(
&attr, eid,
&payload.value, &attr,
payload.validity.unwrap_or(default_vld), &payload.value,
)?, payload.validity.unwrap_or(default_vld),
-1, )?,
)); -1,
));
} else if payload.action == TxAction::RetractAll {
let it = if attr.with_history {
Left(self.triple_ae_scan(attr.id, eid))
} else {
Right(self.triple_ae_before_scan(attr.id, eid, payload.validity.unwrap_or(default_vld)))
};
for tuple in it {
let (_, _, value) = tuple?;
ret.push((
self.retract_triple(
eid,
&attr,
&value,
payload.validity.unwrap_or(default_vld),
)?,
-1,
));
}
} else {
unreachable!()
}
} }
} }
} }
@ -171,6 +202,7 @@ impl SessionTx {
let val_encoded = v_in_val.encode_with_op_and_tx(op, tx_id); let val_encoded = v_in_val.encode_with_op_and_tx(op, tx_id);
let aev_encoded = encode_aev_key(attr.id, eid, v_in_key, vld_in_key); let aev_encoded = encode_aev_key(attr.id, eid, v_in_key, vld_in_key);
debug!("aev encoded {:?}, {:?}, {:?}", aev_encoded, v_in_val, op);
if real_delete { if real_delete {
self.tx.del(&aev_encoded, Pri)?; self.tx.del(&aev_encoded, Pri)?;
} else { } else {
@ -214,12 +246,6 @@ impl SessionTx {
Validity::NO_HISTORY Validity::NO_HISTORY
}; };
#[derive(Debug, Error, Diagnostic)]
#[error("Unique constraint violated for attribute {0} and value {1:?}")]
#[diagnostic(code(eval::unique_constraint_violated))]
#[diagnostic(help("The existing one has entity ID {2:?}"))]
struct UniqueConstraintViolated(String, DataValue, u64);
if attr.with_history { if attr.with_history {
// back scan in time // back scan in time
for item in self.triple_av_before_scan(attr.id, v, vld_in_key) { for item in self.triple_av_before_scan(attr.id, v, vld_in_key) {
@ -296,7 +322,7 @@ impl SessionTx {
AmendingTripleByTempIdError(attr.name.to_string(), eid) AmendingTripleByTempIdError(attr.name.to_string(), eid)
); );
// checking that the eid actually exists should be done in the preprocessing step // checking that the eid actually exists should be done in the preprocessing step
self.write_triple(eid, attr, v, vld, StoreOp::Retract) self.write_triple(eid, attr, v, vld, StoreOp::Assert)
} }
pub(crate) fn retract_triple( pub(crate) fn retract_triple(
@ -353,7 +379,7 @@ impl SessionTx {
&self, &self,
aid: AttrId, aid: AttrId,
eid: EntityId, eid: EntityId,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, eid, &DataValue::Null, Validity::MAX); let lower = encode_aev_key(aid, eid, &DataValue::Null, Validity::MAX);
let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN);
TripleAttrEntityIter::new(self.tx.iterator(Pri), lower, upper) TripleAttrEntityIter::new(self.tx.iterator(Pri), lower, upper)
@ -364,7 +390,7 @@ impl SessionTx {
eid: EntityId, eid: EntityId,
v_lower: DataValue, v_lower: DataValue,
v_upper: DataValue, v_upper: DataValue,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, eid, &v_lower, Validity::MAX); let lower = encode_aev_key(aid, eid, &v_lower, Validity::MAX);
let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN);
TripleAttrEntityRangeIter::new(self.tx.iterator(Pri), lower, upper, v_upper) TripleAttrEntityRangeIter::new(self.tx.iterator(Pri), lower, upper, v_upper)
@ -374,7 +400,7 @@ impl SessionTx {
aid: AttrId, aid: AttrId,
eid: EntityId, eid: EntityId,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, eid, &DataValue::Null, Validity::MAX); let lower = encode_aev_key(aid, eid, &DataValue::Null, Validity::MAX);
let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN);
TripleAttrEntityBeforeIter::new(self.tx.iterator(Pri), lower, upper, before) TripleAttrEntityBeforeIter::new(self.tx.iterator(Pri), lower, upper, before)
@ -386,7 +412,7 @@ impl SessionTx {
v_lower: DataValue, v_lower: DataValue,
v_upper: DataValue, v_upper: DataValue,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, eid, &v_lower, Validity::MAX); let lower = encode_aev_key(aid, eid, &v_lower, Validity::MAX);
let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, eid, &DataValue::Bot, Validity::MIN);
TripleAttrEntityRangeBeforeIter::new(self.tx.iterator(Pri), lower, upper, v_upper, before) TripleAttrEntityRangeBeforeIter::new(self.tx.iterator(Pri), lower, upper, v_upper, before)
@ -411,7 +437,7 @@ impl SessionTx {
aid: AttrId, aid: AttrId,
lower: &DataValue, lower: &DataValue,
upper_inc: &DataValue, upper_inc: &DataValue,
) -> impl Iterator<Item = Result<(AttrId, DataValue, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, DataValue, EntityId)>> {
let lower = encode_ave_key(aid, lower, EntityId::ZERO, Validity::MAX); let lower = encode_ave_key(aid, lower, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_key(aid, &DataValue::Bot, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_key(aid, &DataValue::Bot, EntityId::MAX_PERM, Validity::MIN);
TripleAttrValueRangeIter::new(self.tx.iterator(Pri), lower, upper, upper_inc.clone()) TripleAttrValueRangeIter::new(self.tx.iterator(Pri), lower, upper, upper_inc.clone())
@ -420,7 +446,7 @@ impl SessionTx {
&self, &self,
aid: AttrId, aid: AttrId,
v: &DataValue, v: &DataValue,
) -> impl Iterator<Item = Result<(AttrId, DataValue, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, DataValue, EntityId)>> {
let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX); let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN);
TripleAttrValueIter::new(self.tx.iterator(Pri), lower, upper) TripleAttrValueIter::new(self.tx.iterator(Pri), lower, upper)
@ -431,7 +457,7 @@ impl SessionTx {
lower: &DataValue, lower: &DataValue,
upper_inc: &DataValue, upper_inc: &DataValue,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, DataValue, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, DataValue, EntityId)>> {
let lower = encode_ave_key(aid, lower, EntityId::ZERO, Validity::MAX); let lower = encode_ave_key(aid, lower, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_key(aid, &DataValue::Bot, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_key(aid, &DataValue::Bot, EntityId::MAX_PERM, Validity::MIN);
TripleAttrValueRangeBeforeIter::new( TripleAttrValueRangeBeforeIter::new(
@ -447,7 +473,7 @@ impl SessionTx {
aid: AttrId, aid: AttrId,
v: &DataValue, v: &DataValue,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, DataValue, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, DataValue, EntityId)>> {
let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX); let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN);
TripleAttrValueBeforeIter::new(self.tx.iterator(Pri), lower, upper, before) TripleAttrValueBeforeIter::new(self.tx.iterator(Pri), lower, upper, before)
@ -457,7 +483,7 @@ impl SessionTx {
aid: AttrId, aid: AttrId,
v: &DataValue, v: &DataValue,
after: Validity, after: Validity,
) -> impl Iterator<Item = Result<(AttrId, DataValue, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, DataValue, EntityId)>> {
let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX); let lower = encode_ave_key(aid, v, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_key(aid, v, EntityId::MAX_PERM, Validity::MIN);
TripleAttrValueAfterIter::new(self.tx.iterator(Pri), lower, upper, after) TripleAttrValueAfterIter::new(self.tx.iterator(Pri), lower, upper, after)
@ -466,7 +492,7 @@ impl SessionTx {
&self, &self,
aid: AttrId, aid: AttrId,
v_eid: EntityId, v_eid: EntityId,
) -> impl Iterator<Item = Result<(AttrId, EntityId, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, EntityId)>> {
let lower = encode_ave_ref_key(aid, v_eid, EntityId::ZERO, Validity::MAX); let lower = encode_ave_ref_key(aid, v_eid, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_ref_key(aid, v_eid, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_ref_key(aid, v_eid, EntityId::MAX_PERM, Validity::MIN);
TripleValueRefAttrIter::new(self.tx.iterator(Pri), lower, upper) TripleValueRefAttrIter::new(self.tx.iterator(Pri), lower, upper)
@ -476,7 +502,7 @@ impl SessionTx {
aid: AttrId, aid: AttrId,
v_eid: EntityId, v_eid: EntityId,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, EntityId, EntityId)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, EntityId)>> {
let lower = encode_ave_ref_key(aid, v_eid, EntityId::ZERO, Validity::MAX); let lower = encode_ave_ref_key(aid, v_eid, EntityId::ZERO, Validity::MAX);
let upper = encode_ave_ref_key(aid, v_eid, EntityId::MAX_PERM, Validity::MIN); let upper = encode_ave_ref_key(aid, v_eid, EntityId::MAX_PERM, Validity::MIN);
TripleValueRefAttrBeforeIter::new(self.tx.iterator(Pri), lower, upper, before) TripleValueRefAttrBeforeIter::new(self.tx.iterator(Pri), lower, upper, before)
@ -484,7 +510,7 @@ impl SessionTx {
pub(crate) fn triple_a_scan( pub(crate) fn triple_a_scan(
&self, &self,
aid: AttrId, aid: AttrId,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, EntityId::ZERO, &DataValue::Null, Validity::MAX); let lower = encode_aev_key(aid, EntityId::ZERO, &DataValue::Null, Validity::MAX);
let upper = encode_aev_key(aid, EntityId::MAX_PERM, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, EntityId::MAX_PERM, &DataValue::Bot, Validity::MIN);
TripleAttrEntityIter::new(self.tx.iterator(Pri), lower, upper) TripleAttrEntityIter::new(self.tx.iterator(Pri), lower, upper)
@ -493,7 +519,7 @@ impl SessionTx {
&self, &self,
aid: AttrId, aid: AttrId,
before: Validity, before: Validity,
) -> impl Iterator<Item = Result<(AttrId, EntityId, DataValue)>> { ) -> impl Iterator<Item=Result<(AttrId, EntityId, DataValue)>> {
let lower = encode_aev_key(aid, EntityId::ZERO, &DataValue::Null, Validity::MAX); let lower = encode_aev_key(aid, EntityId::ZERO, &DataValue::Null, Validity::MAX);
let upper = encode_aev_key(aid, EntityId::MAX_PERM, &DataValue::Bot, Validity::MIN); let upper = encode_aev_key(aid, EntityId::MAX_PERM, &DataValue::Bot, Validity::MIN);
TripleAttrEntityBeforeIter::new(self.tx.iterator(Pri), lower, upper, before) TripleAttrEntityBeforeIter::new(self.tx.iterator(Pri), lower, upper, before)

@ -10,11 +10,10 @@ use serde_json::json;
use cozo::Db; use cozo::Db;
use cozorocks::DbBuilder; use cozorocks::DbBuilder;
fn create_db(name: &str, destroy_on_exit: bool) -> Db { fn create_db(name: &str) -> Db {
let builder = DbBuilder::default() let builder = DbBuilder::default()
.path(name) .path(name)
.create_if_missing(true) .create_if_missing(true);
.destroy_on_exit(destroy_on_exit);
Db::build(builder).unwrap() Db::build(builder).unwrap()
} }
@ -25,7 +24,7 @@ fn init_logger() {
#[test] #[test]
fn air_routes() -> Result<()> { fn air_routes() -> Result<()> {
init_logger(); init_logger();
let db = create_db("_test_air_routes", false); let db = create_db("_test_air_routes");
let params: BTreeMap<String, serde_json::Value> = Default::default(); let params: BTreeMap<String, serde_json::Value> = Default::default();
let attr_res = db.run_script( let attr_res = db.run_script(
r#" r#"

@ -1,4 +1,5 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fs;
use log::info; use log::info;
use serde_json::to_string_pretty; use serde_json::to_string_pretty;
@ -6,11 +7,10 @@ use serde_json::to_string_pretty;
use cozo::Db; use cozo::Db;
use cozorocks::DbBuilder; use cozorocks::DbBuilder;
fn create_db(name: &str, destroy_on_exit: bool) -> Db { fn create_db(name: &str) -> Db {
let builder = DbBuilder::default() let builder = DbBuilder::default()
.path(name) .path(name)
.create_if_missing(true) .create_if_missing(true);
.destroy_on_exit(destroy_on_exit);
Db::build(builder).unwrap() Db::build(builder).unwrap()
} }
@ -23,7 +23,7 @@ fn test_send_sync<T: Send + Sync>(_: &T) {}
#[test] #[test]
fn simple() { fn simple() {
init_logger(); init_logger();
let db = create_db("_test_db", true); let db = create_db("_test_db");
test_send_sync(&db); test_send_sync(&db);
let params: BTreeMap<String, serde_json::Value> = Default::default(); let params: BTreeMap<String, serde_json::Value> = Default::default();
db.run_script( db.run_script(
@ -123,4 +123,5 @@ fn simple() {
let ret = db.run_script(query, &params, false).unwrap(); let ret = db.run_script(query, &params, false).unwrap();
let res = to_string_pretty(&ret).unwrap(); let res = to_string_pretty(&ret).unwrap();
println!("{}", res); println!("{}", res);
fs::remove_dir_all("_test_db").unwrap();
} }

Loading…
Cancel
Save