diff --git a/.idea/.name b/.idea/.name new file mode 100644 index 00000000..78f8453a --- /dev/null +++ b/.idea/.name @@ -0,0 +1 @@ +cozorocks \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..350e9b34 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 0f9133ac..1750769c 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -23,10 +23,20 @@ - + - - + + + + + + + + + + + + @@ -246,6 +263,7 @@ - \ No newline at end of file diff --git a/src/algo/mod.rs b/src/algo/mod.rs index 0009890e..f0e4bce7 100644 --- a/src/algo/mod.rs +++ b/src/algo/mod.rs @@ -2,7 +2,7 @@ use std::collections::BTreeMap; use either::Either; use itertools::Itertools; -use miette::{bail, ensure, Diagnostic, Result}; +use miette::{bail, Diagnostic, ensure, Result}; use smartstring::{LazyCompact, SmartString}; use thiserror::Error; @@ -25,7 +25,6 @@ use crate::algo::triangles::ClusteringCoefficients; use crate::algo::yen::KShortestPathYen; use crate::data::expr::Expr; use crate::data::functions::OP_LIST; -use crate::data::id::EntityId; use crate::data::program::{AlgoRuleArg, MagicAlgoApply, MagicAlgoRuleArg, MagicSymbol, TripleDir}; use crate::data::symb::Symbol; use crate::data::tuple::{Tuple, TupleIter}; @@ -150,11 +149,11 @@ struct NotAnEdgeError(#[label] SourceSpan); #[derive(Error, Diagnostic, Debug)] #[error( - "The value {0:?} at the third position in the relation cannot be interpreted as edge weights" +"The value {0:?} at the third position in the relation cannot be interpreted as edge weights" )] #[diagnostic(code(algo::invalid_edge_weight))] #[diagnostic(help( - "Edge weights must be finite numbers. Some algorithm also requires positivity." +"Edge weights must be finite numbers. Some algorithm also requires positivity." ))] struct BadEdgeWeightError(DataValue, #[label] SourceSpan); @@ -167,7 +166,7 @@ struct RuleNotFoundError(String, #[label] SourceSpan); #[error("Invalid reverse scanning of triples")] #[diagnostic(code(algo::invalid_reverse_triple_scan))] #[diagnostic(help( - "Inverse scanning of triples requires the type to be 'ref', or the value be indexed" +"Inverse scanning of triples requires the type to be 'ref', or the value be indexed" ))] struct InvalidInverseTripleUse(String, #[label] SourceSpan); @@ -175,7 +174,7 @@ struct InvalidInverseTripleUse(String, #[label] SourceSpan); #[error("Required node with key {missing:?} not found")] #[diagnostic(code(algo::node_with_key_not_found))] #[diagnostic(help( - "The relation is interpreted as a relation of nodes, but the required key is missing" +"The relation is interpreted as a relation of nodes, but the required key is missing" ))] pub(crate) struct NodeNotFoundError { pub(crate) missing: DataValue, @@ -370,14 +369,13 @@ impl MagicAlgoRuleArg { #[error("Encountered bad prefix value {0:?} during triple prefix scanning")] #[diagnostic(code(algo::invalid_triple_prefix))] #[diagnostic(help( - "Triple prefix should be an entity ID represented by an integer" + "Triple prefix should be an entity ID represented by a UUID" ))] struct InvalidTriplePrefixError(DataValue, #[label] SourceSpan); let id = prefix - .get_int() + .get_entity_id() .ok_or_else(|| InvalidTriplePrefixError(prefix.clone(), self.span()))?; - let id = EntityId(id as u64); match dir { TripleDir::Fwd => { if attr.with_history { diff --git a/src/data/encode.rs b/src/data/encode.rs index 9e83abea..0f9e7795 100644 --- a/src/data/encode.rs +++ b/src/data/encode.rs @@ -138,7 +138,7 @@ impl EncodedVec { let tx_bytes = vld.bytes(); #[allow(clippy::needless_range_loop)] for i in 1..8 { - self.inner[VEC_SIZE_16 + i] = tx_bytes[i]; + self.inner[VEC_SIZE_24 + i] = tx_bytes[i]; } } pub(crate) fn encoded_entity_amend_validity_to_inf_future(&mut self) { @@ -197,6 +197,7 @@ impl TryFrom for StorageTag { } pub(crate) const LARGE_VEC_SIZE: usize = 60; +pub(crate) const VEC_SIZE_48: usize = 48; pub(crate) const VEC_SIZE_32: usize = 32; pub(crate) const VEC_SIZE_24: usize = 24; pub(crate) const VEC_SIZE_16: usize = 16; @@ -221,9 +222,9 @@ pub(crate) fn decode_value(src: &[u8]) -> Result { #[inline] pub(crate) fn decode_value_from_key(src: &[u8]) -> Result { - Ok(rmp_serde::from_slice(&src[VEC_SIZE_24..]).map_err(|err| { + Ok(rmp_serde::from_slice(&src[VEC_SIZE_32..]).map_err(|err| { error!( - "Cannot deserialize DataValue from bytes: {:x?}, {:?}", + "Cannot deserialize DataValue from bytes for key: {:x?}, {:?}", src, err ); DataValueDeserError @@ -234,7 +235,7 @@ pub(crate) fn decode_value_from_key(src: &[u8]) -> Result { pub(crate) fn decode_value_from_val(src: &[u8]) -> Result { Ok(rmp_serde::from_slice(&src[VEC_SIZE_8..]).map_err(|err| { error!( - "Cannot deserialize DataValue from bytes: {:x?}, {:?}", + "Cannot deserialize DataValue from bytes for value: {:x?}, {:?}", src, err ); DataValueDeserError @@ -245,8 +246,8 @@ pub(crate) fn smallest_key() -> EncodedVec { encode_aev_key(AttrId(0), EntityId::ZERO, &DataValue::Null, Validity::MIN) } -/// eid: 8 bytes (incl. tag) -/// aid: 8 bytes +/// aid: 8 bytes (incl. tag) +/// eid: 16 bytes /// val: variable /// vld: 8 bytes #[inline] @@ -263,7 +264,7 @@ pub(crate) fn encode_aev_key( ret.extend(eid.bytes()); ret.extend(vld.bytes()); - debug_assert_eq!(ret.len(), VEC_SIZE_24); + debug_assert_eq!(ret.len(), VEC_SIZE_32); val.serialize(&mut Serializer::new(&mut ret)).unwrap(); @@ -276,10 +277,10 @@ pub(crate) fn decode_ae_key(src: &[u8]) -> Result<(AttrId, EntityId, Validity)> src[0] == StorageTag::TripleAttrEntityValue as u8 || src[0] == StorageTag::TripleAttrValueEntity as u8 ); - debug_assert!(src.len() >= VEC_SIZE_24); + debug_assert!(src.len() >= VEC_SIZE_32); let aid = AttrId::from_bytes(&src[0..VEC_SIZE_8]); - let eid = EntityId::from_bytes(&src[VEC_SIZE_8..VEC_SIZE_16]); - let vld = Validity::from_bytes(&src[VEC_SIZE_16..VEC_SIZE_24]); + let eid = EntityId::from_bytes(&src[VEC_SIZE_8..VEC_SIZE_24]); + let vld = Validity::from_bytes(&src[VEC_SIZE_24..VEC_SIZE_32]); Ok((aid, eid, vld)) } @@ -290,12 +291,12 @@ pub(crate) fn encode_ave_key_for_unique_v( val: &DataValue, vld: Validity, ) -> EncodedVec { - encode_ave_key(aid, val, EntityId(0), vld) + encode_ave_key(aid, val, EntityId::ZERO, vld) } /// aid: 8 bytes (incl. tag) /// val: variable -/// eid: 8 bytes +/// eid: 16 bytes /// vld: 8 bytes #[inline] pub(crate) fn encode_ave_key( @@ -311,16 +312,16 @@ pub(crate) fn encode_ave_key( ret.extend(eid.bytes()); ret.extend(vld.bytes()); - debug_assert_eq!(ret.len(), VEC_SIZE_24); + debug_assert_eq!(ret.len(), VEC_SIZE_32); val.serialize(&mut Serializer::new(&mut ret)).unwrap(); ret.into() } -/// aid: 8 bytes -/// val: 8 bytes (incl. tag) -/// eid: 8 bytes +/// aid: 8 bytes (incl. tag) +/// val: 16 bytes +/// eid: 16 bytes /// vld: 8 bytes #[inline] pub(crate) fn encode_ave_ref_key( @@ -336,9 +337,9 @@ pub(crate) fn encode_ave_ref_key( ret.extend(val.bytes()); ret.extend(vld.bytes()); - debug_assert_eq!(ret.len(), VEC_SIZE_24); - ret.extend(eid.bytes()); debug_assert_eq!(ret.len(), VEC_SIZE_32); + ret.extend(eid.bytes()); + debug_assert_eq!(ret.len(), VEC_SIZE_48); ret.into() } @@ -346,9 +347,9 @@ pub(crate) fn encode_ave_ref_key( #[inline] pub(crate) fn decode_ave_ref_key(src: &[u8]) -> Result<(AttrId, EntityId, EntityId, Validity)> { let aid = AttrId::from_bytes(&src[0..VEC_SIZE_8]); - let vid = EntityId::from_bytes(&src[VEC_SIZE_8..VEC_SIZE_16]); - let vld = Validity::from_bytes(&src[VEC_SIZE_16..VEC_SIZE_24]); - let eid = EntityId::from_bytes(&src[VEC_SIZE_24..VEC_SIZE_32]); + let vid = EntityId::from_bytes(&src[VEC_SIZE_8..VEC_SIZE_24]); + let vld = Validity::from_bytes(&src[VEC_SIZE_24..VEC_SIZE_32]); + let eid = EntityId::from_bytes(&src[VEC_SIZE_32..VEC_SIZE_48]); Ok((aid, vid, eid, vld)) } @@ -405,7 +406,7 @@ pub(crate) fn decode_sentinel_attr_val(src: &[u8]) -> Result<(AttrId, DataValue) let a_id = AttrId::from_bytes(&src[..VEC_SIZE_8]); let val = rmp_serde::from_slice(&src[VEC_SIZE_8..]).map_err(|err| { error!( - "Cannot deserialize DataValue from bytes: {:x?}, {:?}", + "Cannot deserialize DataValue from bytes for attribute: {:x?}, {:?}", src, err ); DataValueDeserError diff --git a/src/data/expr.rs b/src/data/expr.rs index 9bacbdd2..df9bcf11 100644 --- a/src/data/expr.rs +++ b/src/data/expr.rs @@ -47,17 +47,7 @@ impl Expr { pub(crate) fn build_perm_eid(self) -> Result { let span = self.span(); let value = self.eval_to_const()?; - match value.get_non_neg_int() { - Some(i) => { - let eid = EntityId(i); - if !eid.is_perm() { - Err(BadEntityId(value, span).into()) - } else { - Ok(eid) - } - } - None => Err(BadEntityId(value, span).into()), - } + value.get_entity_id().ok_or_else(|| BadEntityId(value, span).into()) } pub(crate) fn span(&self) -> SourceSpan { match self { @@ -185,10 +175,10 @@ impl Expr { { if op1.name == OP_NEGATE.name { if let Some(Expr::Apply { - op: op2, - args: arg2, - .. - }) = arg1.first() + op: op2, + args: arg2, + .. + }) = arg1.first() { if op2.name == OP_NEGATE.name { let mut new_self = arg2[0].clone(); diff --git a/src/data/functions.rs b/src/data/functions.rs index 7ad12c1a..202ac767 100644 --- a/src/data/functions.rs +++ b/src/data/functions.rs @@ -496,12 +496,8 @@ pub(crate) fn op_mod(args: &[DataValue]) -> Result { define_op!(OP_AND, 0, true); pub(crate) fn op_and(args: &[DataValue]) -> Result { for arg in args { - if let DataValue::Bool(b) = arg { - if !b { - return Ok(DataValue::Bool(false)); - } - } else { - bail!("'and' requires booleans"); + if !arg.get_bool().ok_or_else(|| miette!("'and' requires booleans"))? { + return Ok(DataValue::Bool(false)); } } Ok(DataValue::Bool(true)) @@ -510,12 +506,8 @@ pub(crate) fn op_and(args: &[DataValue]) -> Result { define_op!(OP_OR, 0, true); pub(crate) fn op_or(args: &[DataValue]) -> Result { for arg in args { - if let DataValue::Bool(b) = arg { - if *b { - return Ok(DataValue::Bool(true)); - } - } else { - bail!("'or' requires booleans"); + if arg.get_bool().ok_or_else(|| miette!("'or' requires booleans"))? { + return Ok(DataValue::Bool(true)); } } Ok(DataValue::Bool(false)) diff --git a/src/data/id.rs b/src/data/id.rs index 9260c4e7..fb3cd915 100644 --- a/src/data/id.rs +++ b/src/data/id.rs @@ -3,8 +3,11 @@ use std::time::{SystemTime, UNIX_EPOCH}; use chrono::{DateTime, NaiveDate, TimeZone, Utc}; use miette::{Diagnostic, Result}; +use rand::Rng; use serde_derive::{Deserialize, Serialize}; use thiserror::Error; +use uuid::Uuid; +use uuid::v1::Timestamp; use crate::data::expr::Expr; use crate::data::triple::StoreOp; @@ -97,43 +100,50 @@ impl Debug for Validity { } #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Hash)] -pub(crate) struct EntityId(pub(crate) u64); +pub(crate) struct EntityId(pub(crate) Uuid); impl EntityId { - pub(crate) const ZERO: EntityId = EntityId(0); - pub(crate) const MAX_TEMP: EntityId = EntityId(10_000_000); - pub(crate) const MIN_PERM: EntityId = EntityId(10_000_001); - pub(crate) const MAX_PERM: EntityId = EntityId(0x00ff_ffff_ff00_0000); + pub(crate) const ZERO: EntityId = EntityId(uuid::uuid!("00000000-0000-0000-0000-000000000000")); + pub(crate) const MAX_PERM: EntityId = EntityId(uuid::uuid!("FFFFFFFF-FFFF-FFFF-FFFF-FFFFFFFFFFFF")); pub(crate) fn as_datavalue(&self) -> DataValue { - DataValue::from(self.0 as i64) + DataValue::uuid(self.0) } pub(crate) fn from_bytes(b: &[u8]) -> Self { - EntityId(u64::from_be_bytes([ - 0, b[1], b[2], b[3], b[4], b[5], b[6], b[7], - ])) + EntityId(Uuid::from_bytes(b.try_into().expect("wrong length of bytes for uuid"))) } - pub(crate) fn bytes(&self) -> [u8; 8] { - self.0.to_be_bytes() + pub(crate) fn bytes(&self) -> [u8; 16] { + self.0.as_bytes().clone() } pub(crate) fn is_perm(&self) -> bool { - *self >= Self::MIN_PERM + self.0.get_version_num() == 1 } pub(crate) fn is_placeholder(&self) -> bool { - self.0 == 0 + self.0.is_nil() + } + pub(crate) fn new_perm_id() -> Self { + let mut rng = rand::thread_rng(); + let uuid_ctx = uuid::v1::Context::new(rng.gen()); + let now = SystemTime::now(); + let since_epoch = now.duration_since(UNIX_EPOCH).unwrap(); + let ts = Timestamp::from_unix(uuid_ctx, since_epoch.as_secs(), since_epoch.subsec_nanos()); + let mut rand_vals = [0u8; 6]; + rng.fill(&mut rand_vals); + let id = uuid::Uuid::new_v1(ts, &rand_vals); + Self(id) } } -impl From for EntityId { - fn from(u: u64) -> Self { - EntityId(u) - } -} +// impl From for EntityId { +// fn from(u: u64) -> Self { +// EntityId(u) +// } +// } impl Debug for EntityId { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "e{}", self.0) + write!(f, "{}", self.0) } } diff --git a/src/data/value.rs b/src/data/value.rs index 973c6af7..eaa42604 100644 --- a/src/data/value.rs +++ b/src/data/value.rs @@ -32,8 +32,13 @@ impl PartialOrd for UuidWrapper { impl Ord for UuidWrapper { fn cmp(&self, other: &Self) -> Ordering { - self.to_100_nanos().cmp(&other.to_100_nanos()).then_with(|| - self.0.as_bytes().cmp(other.0.as_bytes())) + match (self.to_100_nanos(), other.to_100_nanos()) { + (Some(a), Some(b)) => { + a.cmp(&b).then_with(|| + self.0.as_bytes().cmp(other.0.as_bytes())) + } + _ => self.0.as_bytes().cmp(other.0.as_bytes()) + } } } @@ -289,10 +294,7 @@ impl DataValue { } pub(crate) fn get_entity_id(&self) -> Option { - match self { - DataValue::Num(Num::I(id)) => Some(EntityId(*id as u64)), - _ => None, - } + self.get_uuid().map(EntityId) } pub(crate) fn get_list(&self) -> Option<&[DataValue]> { match self { @@ -335,6 +337,15 @@ impl DataValue { pub(crate) fn uuid(uuid: uuid::Uuid) -> Self { Self::Uuid(UuidWrapper(uuid)) } + pub(crate) fn get_uuid(&self) -> Option { + match self { + DataValue::Uuid(UuidWrapper(uuid)) => Some(*uuid), + DataValue::Str(s) => { + uuid::Uuid::try_parse(s).ok() + } + _ => None + } + } } pub(crate) const LARGEST_UTF_CHAR: char = '\u{10ffff}'; diff --git a/src/parse/tx.rs b/src/parse/tx.rs index 7a35e967..49782018 100644 --- a/src/parse/tx.rs +++ b/src/parse/tx.rs @@ -2,7 +2,7 @@ use std::collections::BTreeMap; use std::fmt::{Display, Formatter}; use itertools::Itertools; -use log::debug; +use log::trace; use miette::{bail, Diagnostic, ensure, Result}; use smartstring::{LazyCompact, SmartString}; use thiserror::Error; @@ -22,7 +22,7 @@ use crate::parse::query::parse_query; pub(crate) enum TxAction { Put, Retract, - RetractAll + RetractAll, } impl Display for TxAction { @@ -41,7 +41,7 @@ pub(crate) enum EntityRep { impl EntityRep { fn as_datavalue(&self) -> DataValue { match self { - EntityRep::Id(i) => DataValue::from(i.0 as i64), + EntityRep::Id(i) => DataValue::uuid(i.0), EntityRep::UserTempId(s) => DataValue::Str(s.clone()), EntityRep::PullByKey(attr, data) => { DataValue::List(vec![DataValue::Str(attr.clone()), data.clone()]) @@ -91,7 +91,7 @@ pub(crate) fn parse_tx( _ => unreachable!(), } } - debug!("Quintuples {:?}", quintuples); + trace!("Quintuples {:?}", quintuples); Ok(TripleTx { quintuples, before, diff --git a/src/query/pull.rs b/src/query/pull.rs index a6632927..924ac7ad 100644 --- a/src/query/pull.rs +++ b/src/query/pull.rs @@ -1,7 +1,7 @@ use std::collections::BTreeMap; use itertools::Itertools; -use miette::{ensure, Diagnostic, Result}; +use miette::{Diagnostic, ensure, Result}; use serde_json::{json, Map}; use thiserror::Error; @@ -50,7 +50,7 @@ impl OutPullSpec { impl SessionTx { pub(crate) fn execute_relation<'a>( &'a mut self, - res_iter: impl Iterator> + 'a, + res_iter: impl Iterator> + 'a, op: RelationOp, meta: &RelationMetadata, ) -> Result, Vec)>> { @@ -117,7 +117,7 @@ impl SessionTx { PullOnNonRef(spec.attr.name.to_string(), spec.span) ); let back_res: Vec<_> = if spec.attr.with_history { - self.triple_vref_a_before_scan(spec.attr.id,id, spec.vld) + self.triple_vref_a_before_scan(spec.attr.id, id, spec.vld) .map_ok(|(_, _, e)| e) .try_collect()? } else { @@ -195,7 +195,7 @@ impl SessionTx { } pub(crate) fn run_pull_on_query_results( &self, - res_iter: impl Iterator>, + res_iter: impl Iterator>, headers: Option<&[Symbol]>, out_spec: &BTreeMap, Option)>, default_vld: Validity, @@ -228,15 +228,14 @@ impl SessionTx { #[error("Cannot interpret {0:?} as an entity")] #[diagnostic(code(eval::bad_pull_id))] #[diagnostic(help( - "You specified pull operation for the variable '{1}', but the value in the output \ + "You specified pull operation for the variable '{1}', but the value in the output \ stream cannot be interpreted as an entity ID (must be an integer)." ))] struct BadPullInputError(DataValue, String, #[label] SourceSpan); - let id = - EntityId(item.get_int().ok_or_else(|| { - BadPullInputError(item, symb.to_string(), symb.span) - })? as u64); + let id = item.get_entity_id().ok_or_else(|| { + BadPullInputError(item, symb.to_string(), symb.span) + })?; let res = self.run_pull_on_item(id, specs)?; row_collected.push(res); } else { diff --git a/src/runtime/db.rs b/src/runtime/db.rs index 7eee2001..c4f1007f 100644 --- a/src/runtime/db.rs +++ b/src/runtime/db.rs @@ -66,7 +66,6 @@ const CURRENT_STORAGE_VERSION: u64 = 1; pub struct Db { db: RocksDb, last_attr_id: Arc, - last_ent_id: Arc, last_tx_id: Arc, relation_store_id: Arc, n_sessions: Arc, @@ -79,8 +78,8 @@ impl Debug for Db { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, - "Db", - self.session_id, self.last_tx_id, self.last_ent_id, self.last_tx_id, self.n_sessions + "Db", + self.session_id, self.last_tx_id, self.last_tx_id, self.n_sessions ) } } @@ -130,7 +129,6 @@ impl Db { let ret = Self { db, last_attr_id: Arc::new(Default::default()), - last_ent_id: Arc::new(Default::default()), last_tx_id: Arc::new(Default::default()), relation_store_id: Arc::new(Default::default()), n_sessions: Arc::new(Default::default()), @@ -162,7 +160,6 @@ impl Db { Ok(Self { db: self.db.clone(), last_attr_id: self.last_attr_id.clone(), - last_ent_id: self.last_ent_id.clone(), last_tx_id: self.last_tx_id.clone(), relation_store_id: self.relation_store_id.clone(), n_sessions: self.n_sessions.clone(), @@ -178,8 +175,6 @@ impl Db { .store(tx.load_last_tx_id()?.0, Ordering::Release); self.last_attr_id .store(tx.load_last_attr_id()?.0, Ordering::Release); - self.last_ent_id - .store(tx.load_last_entity_id()?.0, Ordering::Release); self.relation_store_id .store(tx.load_last_relation_store_id()?.0, Ordering::Release); Ok(()) @@ -191,7 +186,6 @@ impl Db { relation_store_id: self.relation_store_id.clone(), w_tx_id: None, last_attr_id: self.last_attr_id.clone(), - last_ent_id: self.last_ent_id.clone(), last_tx_id: self.last_tx_id.clone(), attr_by_id_cache: Default::default(), attr_by_kw_cache: Default::default(), @@ -209,7 +203,6 @@ impl Db { relation_store_id: self.relation_store_id.clone(), w_tx_id: Some(cur_tx_id), last_attr_id: self.last_attr_id.clone(), - last_ent_id: self.last_ent_id.clone(), last_tx_id: self.last_tx_id.clone(), attr_by_id_cache: Default::default(), attr_by_kw_cache: Default::default(), diff --git a/src/runtime/transact.rs b/src/runtime/transact.rs index 219be2e2..02ce1352 100644 --- a/src/runtime/transact.rs +++ b/src/runtime/transact.rs @@ -16,7 +16,7 @@ use cozorocks::{DbIter, Tx}; use crate::data::attr::Attribute; use crate::data::encode::{ - encode_sentinel_attr_by_id, encode_sentinel_entity_attr, encode_tx, EncodedVec, + encode_sentinel_attr_by_id, encode_tx, EncodedVec, }; use crate::data::id::{AttrId, EntityId, TxId, Validity}; use crate::data::program::MagicSymbol; @@ -33,7 +33,6 @@ pub struct SessionTx { pub(crate) mem_store_id: Arc, pub(crate) w_tx_id: Option, pub(crate) last_attr_id: Arc, - pub(crate) last_ent_id: Arc, pub(crate) last_tx_id: Arc, pub(crate) attr_by_id_cache: RefCell>>, pub(crate) attr_by_kw_cache: RefCell, Option>>, @@ -107,17 +106,6 @@ impl SessionTx { self.eid_by_attr_val_cache.borrow_mut().clear(); } - pub(crate) fn load_last_entity_id(&self) -> Result { - let e_lower = encode_sentinel_entity_attr(EntityId::MIN_PERM, AttrId::MIN_PERM); - let e_upper = encode_sentinel_entity_attr(EntityId::MAX_PERM, AttrId::MIN_PERM); - let it = self.bounded_scan_last(&e_lower, &e_upper); - - Ok(match it.key()? { - None => EntityId::MAX_TEMP, - Some(data) => EntityId::from_bytes(data), - }) - } - pub(crate) fn load_last_attr_id(&self) -> Result { let e_lower = encode_sentinel_attr_by_id(AttrId::MIN_PERM); let e_upper = encode_sentinel_attr_by_id(AttrId::MAX_PERM); diff --git a/src/transact/triple.rs b/src/transact/triple.rs index bc173b98..cf459318 100644 --- a/src/transact/triple.rs +++ b/src/transact/triple.rs @@ -1,11 +1,11 @@ use std::collections::BTreeMap; -use std::sync::atomic::Ordering; use either::{Left, Right}; -use log::debug; +use log::{debug}; use miette::{bail, Diagnostic, ensure, Result}; use smartstring::{LazyCompact, SmartString}; use thiserror::Error; +use uuid::Uuid; use cozorocks::{DbIter, IterBuilder}; use cozorocks::CfHandle::Pri; @@ -40,7 +40,7 @@ pub(crate) struct ExpectEntityId(String, DataValue); #[error("Unique constraint violated for attribute {0} and value {1:?}")] #[diagnostic(code(eval::unique_constraint_violated))] #[diagnostic(help("The existing one has entity ID {2:?}"))] -struct UniqueConstraintViolated(String, DataValue, u64); +struct UniqueConstraintViolated(String, DataValue, Uuid); impl SessionTx { @@ -65,7 +65,7 @@ impl SessionTx { TempIdInNonPutError(symb.to_string(), payload.attr_name.to_string()) ); if !str_temp_to_perm_ids.contains_key(symb) { - let new_eid = EntityId(self.last_ent_id.fetch_add(1, Ordering::AcqRel) + 1); + let new_eid = EntityId::new_perm_id(); str_temp_to_perm_ids.insert(symb.clone(), new_eid); } } @@ -233,7 +233,7 @@ impl SessionTx { if attr.indexing.should_index() { // elide e for unique index let e_in_key = if attr.indexing.is_unique_index() { - EntityId(0) + EntityId::ZERO } else { eid }; diff --git a/tests/air_routes.rs b/tests/air_routes.rs index aaa268b3..483a8f22 100644 --- a/tests/air_routes.rs +++ b/tests/air_routes.rs @@ -290,16 +290,16 @@ fn air_routes() -> Result<()> { json!([[7176], [7270], [7311], [7722]]) ); - let simple_query_time = Instant::now(); - let res = db.run_script(r#" - ?[c, code, desc] := [c country.code 'CU'] or c <- 10000239, [c country.code code], [c country.desc desc]; - "#, ¶ms, false, - )?; - dbg!(simple_query_time.elapsed()); - assert_eq!( - *res.get("rows").unwrap(), - json!([[10000060, "CU", "Cuba"], [10000239, "VN", "Viet Nam"]]) - ); + // let simple_query_time = Instant::now(); + // let res = db.run_script(r#" + // ?[c, code, desc] := [c country.code 'CU'] or c <- 10000239, [c country.code code], [c country.desc desc]; + // "#, ¶ms, false, + // )?; + // dbg!(simple_query_time.elapsed()); + // assert_eq!( + // *res.get("rows").unwrap(), + // json!([[10000060, "CU", "Cuba"], [10000239, "VN", "Viet Nam"]]) + // ); let no_airports_time = Instant::now(); let res = db.run_script(