Implement Bitmap with bidirectional linked list

This commit is contained in:
Stjepan Glavina 2016-10-24 19:49:52 +02:00
parent fe7eee7359
commit f56b289659
2 changed files with 232 additions and 131 deletions

View File

@ -32,26 +32,25 @@ impl<T> Splay<T> where T: Ord + Eq + Clone {
}
fn rotate(&mut self, a: usize, b: usize) {
let x = &mut self.arena;
let p = x[a].parent;
let p = self.arena[a].parent;
let dir = if x[a].children[0] == b { 0 } else { 1 };
let t = x[b].children[dir ^ 1];
let dir = if self.arena[a].children[0] == b { 0 } else { 1 };
let t = self.arena[b].children[dir ^ 1];
x[a].children[dir] = t;
self.arena[a].children[dir] = t;
if t != !0 {
x[t].parent = a;
self.arena[t].parent = a;
}
x[b].children[dir ^ 1] = a;
x[a].parent = b;
self.arena[b].children[dir ^ 1] = a;
self.arena[a].parent = b;
if p == !0 {
self.root = b;
x[b].parent = !0;
self.arena[b].parent = !0;
} else {
let dir = if x[p].children[0] == a { 0 } else { 1 };
x[p].children[dir] = b;
x[b].parent = p;
let dir = if self.arena[p].children[0] == a { 0 } else { 1 };
self.arena[p].children[dir] = b;
self.arena[b].parent = p;
}
}
@ -82,7 +81,7 @@ impl<T> Splay<T> where T: Ord + Eq + Clone {
}
fn insert(&mut self, value: T) {
let node = self.arena.push(Node::new(value));
let node = self.arena.insert(Node::new(value));
if self.root == !0 {
self.root = node;

View File

@ -5,38 +5,165 @@ use std::ptr;
// TODO: Handle ZST differently
// TODO: Test ZST, make a ZST implementing Drop
// TODO: check for overflow
#[inline(always)]
fn bits() -> usize {
std::mem::size_of::<usize>() * 8
}
// TODO: Move into VecArena?
#[inline(always)]
fn num_blocks(cap: usize) -> usize {
(cap + bits() - 1) / bits()
struct Bitmap {
data: *mut usize,
cap: usize,
blocks: usize,
head: usize,
}
impl Bitmap {
fn new() -> Self {
let data = {
let mut v = Vec::with_capacity(0);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
Bitmap {
data: data,
cap: 0,
blocks: 0,
head: !0,
}
}
#[inline(always)]
unsafe fn mask(&self, b: usize) -> *mut usize {
self.data.offset(b as isize)
}
#[inline(always)]
unsafe fn next(&self, b: usize) -> *mut usize {
self.data.offset((self.blocks + b) as isize)
}
#[inline(always)]
unsafe fn prev(&self, b: usize) -> *mut usize {
self.data.offset((2 * self.blocks + b) as isize)
}
fn allocate(&mut self) -> usize {
assert!(self.blocks > 0);
let b = if self.head == !0 {
self.blocks - 1
} else {
self.head
};
unsafe {
let i = {
let mask = *self.mask(b);
if self.head == !0 {
assert!(mask != !0);
}
(!mask).trailing_zeros() as usize
};
let index = b * bits() + i;
debug_assert!(0 <= index && index < self.cap);
debug_assert!(0 <= i && i < bits());
debug_assert!(*self.mask(b) >> i & 1 == 0);
*self.mask(b) |= 1 << i;
if *self.mask(b) == !0 && self.head == b {
let b = *self.next(b);
if b != !0 {
*self.prev(b) = !0;
}
self.head = b;
}
index
}
}
unsafe fn resize(&mut self, cap: usize) {
assert!(self.cap <= cap);
self.cap = cap;
let new_blocks = (cap + bits() - 1) / bits();
assert!(self.blocks <= new_blocks);
let diff = new_blocks - self.blocks;
if diff == 0 {
return;
}
if self.blocks > 0 && *self.mask(self.blocks - 1) != !0 {
*self.next(self.blocks - 1) = self.head;
*self.prev(self.blocks - 1) = !0;
self.head = self.blocks - 1;
}
let new_data = {
let mut v = Vec::with_capacity(3 * new_blocks);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
for i in 0..3 {
ptr::copy_nonoverlapping(
self.data.offset((self.blocks * i) as isize),
new_data.offset((new_blocks * i) as isize),
self.blocks);
}
Vec::from_raw_parts(self.data, 0, 3 * self.blocks);
ptr::write_bytes(new_data.offset(self.blocks as isize), 0, new_blocks - self.blocks);
let old_blocks = self.blocks;
self.data = new_data;
self.blocks = new_blocks;
if diff >= 2 {
for b in old_blocks .. new_blocks - 2 {
*self.next(b) = b + 1;
}
*self.next(new_blocks - 2) = self.head;
for b in old_blocks + 1 .. new_blocks - 1 {
*self.prev(b) = b - 1;
}
*self.prev(old_blocks) = !0;
if self.head != !0 {
*self.prev(self.head) = new_blocks - 2;
}
self.head = old_blocks;
}
}
#[inline]
unsafe fn is_allocated(&self, index: usize) -> bool {
let b = index / bits();
let i = index % bits();
unsafe {
*self.mask(b) >> i & 1 != 0
}
}
}
pub struct VecArena<T> {
elems: *const T,
meta: *mut usize,
cap: usize,
head: usize,
count: usize,
cap: usize,
bitmap: Bitmap,
marker: PhantomData<T>,
}
impl<T> VecArena<T> {
#[inline(always)]
unsafe fn get_alive(&self, block: usize) -> *mut usize {
self.meta.offset(block as isize)
}
#[inline(always)]
unsafe fn get_next(&self, block: usize) -> *mut usize {
self.meta.offset((num_blocks(self.cap) + block) as isize)
}
pub fn new() -> Self {
let elems = {
let mut v = Vec::with_capacity(0);
@ -44,111 +171,76 @@ impl<T> VecArena<T> {
mem::forget(v);
ptr
};
let meta = {
let mut v = Vec::with_capacity(0);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
VecArena {
elems: elems,
meta: meta,
cap: 0,
head: !0,
count: 0,
cap: 0,
bitmap: Bitmap::new(),
marker: PhantomData,
}
}
pub fn push(&mut self, value: T) -> usize {
pub fn insert(&mut self, value: T) -> usize {
if self.count == self.cap {
let new_cap = if self.cap == 0 { 4 } else { self.cap * 2 };
self.resize(new_cap);
}
unsafe {
if self.count == self.cap {
self.grow();
}
while self.head != !0 && *self.get_alive(self.head) == !0 {
self.head = *self.get_next(self.head);
}
if self.head == !0 {
self.grow();
}
let i = (!*self.get_alive(self.head)).trailing_zeros() as usize;
let index = self.head * bits() + i;
unsafe {
ptr::write(self.elems.offset(index as isize) as *mut T, value);
}
let block = self.head;
*self.get_alive(block) |= 1 << i;
let index = self.bitmap.allocate();
ptr::write(self.elems.offset(index as isize) as *mut T, value);
self.count += 1;
index
}
}
pub fn take(&mut self, index: usize) -> T {
self.validate_index(index);
let b = index / bits();
let i = index % bits();
// pub fn remove(&mut self, index: usize) -> T {
// self.validate_index(index);
//
// let b = index / bits();
// let i = index % bits();
//
// unsafe {
// self.count -= 1;
// *self.get_alive(b) ^= 1 << i;
//
// if *self.get_alive(b) == 0 {
// *self.get_next(b) = self.head;
// self.head = b;
// }
//
// ptr::read(self.elems.offset(index as isize) as *mut T)
// }
// }
#[cold]
fn resize(&mut self, new_cap: usize) {
unsafe {
self.count -= 1;
*self.get_alive(b) ^= 1 << i;
let new_elems = {
let mut v = Vec::with_capacity(new_cap);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
ptr::copy_nonoverlapping(self.elems, new_elems, self.cap);
Vec::from_raw_parts(self.elems as *mut T, 0, self.cap);
if *self.get_alive(b) == 0 {
*self.get_next(b) = self.head;
self.head = b;
}
ptr::read(self.elems.offset(index as isize) as *mut T)
self.elems = new_elems;
self.cap = new_cap;
self.bitmap.resize(new_cap);
}
}
unsafe fn grow(&mut self) {
let new_cap = if self.cap == 0 { 4 } else { self.cap * 2 };
let blocks = num_blocks(self.cap);
let new_blocks = num_blocks(new_cap);
let new_elems = {
let mut v = Vec::with_capacity(new_cap);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
ptr::copy_nonoverlapping(self.elems, new_elems, self.cap);
Vec::from_raw_parts(self.elems as *mut T, 0, self.cap);
self.elems = new_elems;
let new_meta = {
let mut v = Vec::from_raw_parts(self.meta, 2 * blocks, 2 * blocks);
v.reserve_exact(new_blocks * 2 - blocks * 2);
let ptr = v.as_mut_ptr();
mem::forget(v);
ptr
};
ptr::write_bytes(new_meta.offset(blocks as isize), 0, new_blocks - blocks);
for i in blocks .. new_blocks {
ptr::write(new_meta.offset((new_blocks + i) as isize), i.wrapping_sub(1));
}
self.meta = new_meta;
self.cap = new_cap;
self.head = new_blocks - 1;
}
#[inline]
fn validate_index(&self, index: usize) {
let b = index / bits();
let i = index % bits();
unsafe {
if index >= self.cap || *self.get_alive(b) >> i & 1 == 0 {
if index >= self.cap || !self.bitmap.is_allocated(index) {
self.panic_invalid_index(index);
}
}
}
#[cold]
#[inline(never)]
fn panic_invalid_index(&self, index: usize) {
if index >= self.cap {
@ -158,27 +250,27 @@ impl<T> VecArena<T> {
}
}
impl<T> Drop for VecArena<T> {
fn drop(&mut self) {
unsafe {
for b in 0 .. num_blocks(self.cap) {
let alive = *self.get_alive(b);
if alive != 0 {
for i in 0 .. bits() {
if alive & (1 << i) != 0 {
let index = b * bits() + i;
ptr::drop_in_place(self.elems.offset(index as isize) as *mut T);
}
}
}
}
let blocks = num_blocks(self.cap);
Vec::from_raw_parts(self.elems as *mut T, 0, self.cap);
Vec::from_raw_parts(self.meta, 0, 2 * blocks);
}
}
}
// impl<T> Drop for VecArena<T> {
// fn drop(&mut self) {
// unsafe {
// for b in 0 .. self.num_blocks() {
// let alive = *self.get_alive(b);
// if alive != 0 {
// for i in 0 .. bits() {
// if alive & (1 << i) != 0 {
// let index = b * bits() + i;
// ptr::drop_in_place(self.elems.offset(index as isize) as *mut T);
// }
// }
// }
// }
//
// let blocks = self.num_blocks();
// Vec::from_raw_parts(self.elems as *mut T, 0, self.cap);
// Vec::from_raw_parts(self.meta, 0, 2 * blocks);
// }
// }
// }
impl<T> Index<usize> for VecArena<T> {
type Output = T;
@ -200,7 +292,11 @@ impl<T> IndexMut<usize> for VecArena<T> {
}
}
// TODO: impl Default
impl<T> Default for VecArena<T> {
fn default() -> Self {
VecArena::new()
}
}
#[cfg(test)]
mod tests {
@ -209,6 +305,12 @@ mod tests {
#[test]
fn it_works() {
let mut arena = VecArena::new();
arena.alloc(1);
for i in 0..10 {
assert_eq!(arena.insert(()), i);
assert!(arena[i] == ());
}
for i in 0..10 {
assert!(arena[i] == ());
}
}
}