Add remove and capacity optimization ops to VInline

Also updated tests to make debugging easier (the previous test simply
placed all operations in one test making it harder to debug any bugs)
next
Sayan Nandan 2 years ago
parent 10a36fa77d
commit 49dfac11b5
No known key found for this signature in database
GPG Key ID: 42EEDF4AE9D96B54

@ -74,10 +74,51 @@ impl<const N: usize, T> VInline<N, T> {
self.grow();
unsafe {
// UNSAFE(@ohsayan): grow allocated the cap we needed
self._as_mut_ptr().add(self.l).write(v);
self.push_unchecked(v);
}
self.l += 1;
}
#[inline(always)]
pub fn clear(&mut self) {
unsafe {
// UNSAFE(@ohsayan): as_slice_mut will always give a valid ptr
ptr::drop_in_place(self._as_slice_mut());
}
self.l = 0;
}
#[inline(always)]
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.len() {
panic!("index out of range");
}
unsafe {
// UNSAFE(@ohsayan): Verified index is within range
self.remove_unchecked(idx)
}
}
#[inline(always)]
pub fn remove_compact(&mut self, idx: usize) -> T {
let r = self.remove(idx);
self.optimize_capacity();
r
}
#[inline(always)]
/// SAFETY: `idx` must be < l
unsafe fn remove_unchecked(&mut self, idx: usize) -> T {
// UNSAFE(@ohsayan): idx is in range
let ptr = self.as_mut_ptr().add(idx);
// UNSAFE(@ohsayan): idx is in range and is valid
let ret = ptr::read(ptr);
// UNSAFE(@ohsayan): move all elements to the left
ptr::copy(ptr.add(1), ptr, self.len() - idx - 1);
// UNSAFE(@ohsayan): this is our new length
self.set_len(self.len() - 1);
ret
}
#[inline(always)]
unsafe fn set_len(&mut self, len: usize) {
self.l = len;
}
}
impl<const N: usize, T> VInline<N, T> {
@ -86,9 +127,9 @@ impl<const N: usize, T> VInline<N, T> {
const ALLOC_MULTIPLIER: usize = 2;
const _ENSURE_ALIGN: () =
debug_assert!(mem::align_of::<Vec<String>>() == mem::align_of::<VInline<N, String>>());
#[cfg(test)]
fn will_be_on_stack(&self) -> bool {
N >= self.l + 1
#[inline(always)]
fn on_heap(&self) -> bool {
self.c > N
}
#[inline(always)]
fn on_stack(&self) -> bool {
@ -146,6 +187,39 @@ impl<const N: usize, T> VInline<N, T> {
p as *mut T
}
}
unsafe fn push_unchecked(&mut self, v: T) {
self._as_mut_ptr().add(self.l).write(v);
}
pub fn optimize_capacity(&mut self) {
if self.on_stack() || self.len() == self.capacity() {
return;
}
if self.l <= N {
unsafe {
// UNSAFE(@ohsayan): non-null heap
self.mv_to_stack();
}
} else {
let nb = Self::alloc_block(self.len());
unsafe {
// UNSAFE(@ohsayan): nonov; non-null
ptr::copy_nonoverlapping(self.d.h, nb, self.len());
// UNSAFE(@ohsayan): non-null heap
self.dealloc_heap(self.d.h);
}
self.d.h = nb;
self.c = self.len();
}
}
/// SAFETY: (1) non-null heap
unsafe fn mv_to_stack(&mut self) {
let heap = self.d.h;
// UNSAFE(@ohsayan): nonov; non-null (stack lol)
ptr::copy_nonoverlapping(self.d.h, (&mut self.d).s.as_mut_ptr() as *mut T, self.len());
// UNSAFE(@ohsayan): non-null heap
self.dealloc_heap(heap);
self.c = N;
}
#[inline]
fn grow(&mut self) {
if !(self.l == self.capacity()) {
@ -165,15 +239,15 @@ impl<const N: usize, T> VInline<N, T> {
// UNSAFE(@ohsayan): non-null; valid len
ptr::copy_nonoverlapping(self.d.h.cast_const(), nb, self.l);
// UNSAFE(@ohsayan): non-null heap
self.dealloc_heap();
self.dealloc_heap(self.d.h);
}
}
self.d.h = nb;
self.c = nc;
}
#[inline(always)]
unsafe fn dealloc_heap(&mut self) {
dealloc(self.d.h as *mut u8, Self::layout(self.capacity()))
unsafe fn dealloc_heap(&mut self, heap: *mut T) {
dealloc(heap as *mut u8, Self::layout(self.capacity()))
}
}
@ -197,7 +271,7 @@ impl<const N: usize, T> Drop for VInline<N, T> {
ptr::drop_in_place(self._as_slice_mut());
if !self.on_stack() {
// UNSAFE(@ohsayan): non-null heap
self.dealloc_heap();
self.dealloc_heap(self.d.h);
}
}
}
@ -290,12 +364,37 @@ impl<const N: usize, T> UArray<N, T> {
panic!("stack,capof");
}
unsafe {
// UNSAFE(@ohsayan): verified correct offsets (N)
self.a.as_mut_ptr().add(self.l).write(MaybeUninit::new(v));
// UNSAFE(@ohsayan): all G since l =< N
self.incr_len();
// UNSAFE(@ohsayan): verified length is smaller
self.push_unchecked(v);
}
}
pub fn remove(&mut self, idx: usize) -> T {
if idx >= self.len() {
panic!("out of range. idx is `{idx}` but len is `{}`", self.len());
}
unsafe {
// UNSAFE(@ohsayan): verified idx < l
self.remove_unchecked(idx)
}
}
/// SAFETY: idx < self.l
unsafe fn remove_unchecked(&mut self, idx: usize) -> T {
// UNSAFE(@ohsayan): Verified idx
let target = self.a.as_mut_ptr().add(idx).cast::<T>();
// UNSAFE(@ohsayan): Verified idx
let ret = ptr::read(target);
// UNSAFE(@ohsayan): ov; not-null; correct len
ptr::copy(target.add(1), target, self.len() - idx - 1);
ret
}
#[inline(always)]
/// SAFETY: self.l < N
unsafe fn push_unchecked(&mut self, v: T) {
// UNSAFE(@ohsayan): verified correct offsets (N)
self.a.as_mut_ptr().add(self.l).write(MaybeUninit::new(v));
// UNSAFE(@ohsayan): all G since l =< N
self.incr_len();
}
pub fn as_slice(&self) -> &[T] {
unsafe {
// UNSAFE(@ohsayan): ptr is always valid and len is correct, due to push impl

@ -28,40 +28,104 @@ use super::*;
mod vinline {
use super::*;
const CAP: usize = 512;
const CAP: usize = 8;
#[test]
fn vinline_drop_empty() {
let array = VInline::<CAP, String>::new();
drop(array);
fn drop_empty() {
let vi = VInline::<CAP, String>::new();
drop(vi);
}
/// This will:
/// - returns an array [0..upto]
/// - verify length
/// - verify payload
/// - verify capacity (if upto <= CAP)
/// - verify stack/heap logic
fn mkvi(upto: usize) -> VInline<CAP, usize> {
let r: VInline<CAP, _> = (0..upto).into_iter().collect();
assert_eq!(r.len(), upto);
if upto <= CAP {
assert_eq!(r.capacity(), CAP);
assert!(r.on_stack());
} else {
assert!(r.on_heap());
}
assert!((0..upto)
.into_iter()
.zip(r.iter())
.all(|(x, y)| { x == *y }));
r
}
#[test]
fn push_on_stack() {
let vi = mkvi(CAP);
assert!(vi.on_stack());
}
#[test]
fn push_on_heap() {
let vi = mkvi(CAP + 1);
assert_eq!(vi.capacity(), CAP * 2);
}
#[test]
fn remove_on_stack() {
let mut vi = mkvi(CAP);
assert_eq!(vi.remove(6), 6);
assert_eq!(vi.len(), CAP - 1);
assert_eq!(vi.capacity(), CAP);
assert_eq!(vi.as_ref(), [0, 1, 2, 3, 4, 5, 7]);
}
#[test]
fn remove_on_heap() {
let mut vi = mkvi(CAP + 1);
assert_eq!(vi.remove(6), 6);
assert_eq!(vi.len(), CAP);
assert_eq!(vi.capacity(), CAP * 2);
assert_eq!(vi.as_ref(), [0, 1, 2, 3, 4, 5, 7, 8]);
}
#[test]
fn optimize_capacity_none_on_stack() {
let mut vi = mkvi(CAP);
vi.optimize_capacity();
assert_eq!(vi.capacity(), CAP);
assert!(vi.on_stack());
}
#[test]
fn optimize_capacity_none_on_heap() {
let mut vi = mkvi(CAP + 1);
assert_eq!(vi.capacity(), CAP * 2);
vi.extend((CAP + 1..CAP * 2).into_iter());
assert_eq!(vi.capacity(), CAP * 2);
vi.optimize_capacity();
assert_eq!(vi.capacity(), CAP * 2);
}
#[test]
fn optimize_capacity_on_heap() {
let mut vi = mkvi(CAP + 1);
assert_eq!(vi.capacity(), CAP * 2);
vi.optimize_capacity();
assert_eq!(vi.capacity(), CAP + 1);
}
#[test]
fn optimize_capacity_mv_stack() {
let mut vi = mkvi(CAP + 1);
assert_eq!(vi.capacity(), CAP * 2);
let _ = vi.remove_compact(0);
assert_eq!(vi.len(), CAP);
assert_eq!(vi.capacity(), CAP);
assert!(vi.on_stack());
}
#[test]
fn clear_stack() {
let mut vi = mkvi(CAP);
vi.clear();
assert_eq!(vi.capacity(), CAP);
assert_eq!(vi.len(), 0);
}
#[test]
fn vinline_test() {
// first alloc on stack
let mut array = VInline::<CAP, String>::new();
(0..CAP).for_each(|i| array.push(format!("elem-{i}")));
// check meta methods
debug_assert!(array.on_stack());
debug_assert!(!array.will_be_on_stack());
// now iterate
array
.iter()
.enumerate()
.for_each(|(i, elem)| assert_eq!(elem, format!("elem-{i}").as_str()));
// now iter_mut
array
.iter_mut()
.enumerate()
.for_each(|(i, st)| *st = format!("elem-{}", i + 1));
// now let's get off the stack
(0..10).for_each(|i| array.push(format!("elem-{}", CAP + i + 1)));
// verify all elements
array
.iter()
.enumerate()
.for_each(|(i, st)| assert_eq!(st, format!("elem-{}", i + 1).as_str()));
debug_assert!(!array.on_stack());
debug_assert!(!array.will_be_on_stack());
fn clear_heap() {
let mut vi = mkvi(CAP + 1);
vi.clear();
assert_eq!(vi.capacity(), CAP * 2);
assert_eq!(vi.len(), 0);
}
}

Loading…
Cancel
Save