Created
May 27, 2019 05:15
-
-
Save djg/1e4cf1d2aa2e3fe7b03f908c9ca7a714 to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// snippet of code @ 2019-01-03 09:34:24 | |
// === Rust Playground === | |
// This snippet is in: ~/.emacs.d/rust-playground/at-2019-01-03-093424/ | |
// Execute the snippet: C-c C-c | |
// Delete the snippet completely: C-c k | |
// Toggle between main.rs and Cargo.toml: C-c b | |
use std::{ | |
alloc::{alloc, dealloc, Layout}, | |
marker, | |
mem, | |
ops, | |
ptr, | |
slice | |
}; | |
trait PtrDiff: Sized { | |
fn diff(self, other: Self) -> isize; | |
} | |
impl<T> PtrDiff for *const T { | |
fn diff(self, other: Self) -> isize { | |
let pointee_size = mem::size_of::<T>(); | |
assert!(0 < pointee_size && pointee_size <= isize::max_value() as usize); | |
let d = isize::wrapping_sub(self as _, other as _); | |
d / pointee_size as isize | |
} | |
} | |
const INITIAL_MAP_SIZE: usize = 8; | |
const CHUNK_SIZE_IN_BYTES: usize = 64 * 1024; | |
fn chunk_size(size: usize) -> usize { | |
if size < CHUNK_SIZE_IN_BYTES { | |
CHUNK_SIZE_IN_BYTES / size | |
} else { | |
1 | |
} | |
} | |
fn chunk_size_align<T>() -> (usize, usize) { | |
(CHUNK_SIZE_IN_BYTES, mem::align_of::<T>()) | |
} | |
#[inline] | |
unsafe fn construct<T>(ptr: *mut T, value: T) { | |
ptr::write(ptr, value); | |
} | |
struct Inner<T> { | |
map: Box<[*const T]>, | |
end: *const T, | |
cap: *const T, | |
chunk: *const *const T, | |
} | |
impl<T> Inner<T> { | |
fn initialize_map(num_elements: usize) -> Self { | |
let num_chunks = num_elements / Self::chunk_size() + 1; | |
let map_size = num_chunks.max(INITIAL_MAP_SIZE); | |
let mut map = Self::allocate_map(map_size); | |
let start = map.as_mut_ptr(); | |
unsafe { | |
let finish = start.add(num_chunks); | |
Self::create_chunks(start, finish); | |
let mut inner = Inner { map, end: ptr::null(), cap: ptr::null(), chunk: ptr::null() }; | |
inner.set_chunk(finish.offset(-1)); | |
inner.end = (*inner.chunk).add(num_elements % Self::chunk_size()); | |
inner | |
} | |
} | |
fn chunk_size() -> usize { | |
chunk_size(mem::size_of::<T>()) | |
} | |
unsafe fn set_chunk(&mut self, new_chunk: *const *const T) { | |
self.chunk = new_chunk; | |
self.cap = (*self.chunk).add(Self::chunk_size()); | |
} | |
#[inline(never)] | |
#[cold] | |
unsafe fn create_chunks(mut start: *const *const T, end: *const *const T) { | |
while start < end { | |
construct(start as *mut _, Self::allocate_chunk()); | |
start = start.add(1); | |
} | |
} | |
#[inline(never)] | |
#[cold] | |
fn destroy_chunks(map: &mut [*const T]) { | |
for cur in map { | |
Self::deallocate_chunk(*cur); | |
} | |
} | |
#[inline(never)] | |
#[cold] | |
fn allocate_chunk() -> *const T { | |
let (size, align) = chunk_size_align::<T>(); | |
unsafe { alloc(Layout::from_size_align_unchecked(size, align)) as _ } | |
} | |
#[inline(never)] | |
#[cold] | |
fn deallocate_chunk(ptr: *const T) { | |
let (size, align) = chunk_size_align::<T>(); | |
unsafe { | |
dealloc(ptr as _, Layout::from_size_align_unchecked(size, align)); | |
} | |
} | |
#[inline(never)] | |
#[cold] | |
fn allocate_map(n: usize) -> Box<[*const T]> { | |
let mut vec = Vec::<*const T>::with_capacity(n); | |
let start = vec.as_mut_ptr(); | |
mem::forget(vec); | |
unsafe { Box::from_raw(slice::from_raw_parts_mut(start as *mut _, n)) } | |
} | |
#[inline(never)] | |
#[cold] | |
fn reallocate_map(&mut self, chunks_to_add: usize) { | |
let new_map_size = self.map.len() + self.map.len().max(chunks_to_add); | |
let mut new_map = Self::allocate_map(new_map_size); | |
unsafe { | |
ptr::copy_nonoverlapping(self.map.as_ptr(), new_map.as_mut_ptr(), self.map.len()); | |
} | |
self.map = new_map; | |
} | |
} | |
impl<T> Drop for Inner<T> { | |
fn drop(&mut self) { | |
Self::destroy_chunks(&mut self.map) | |
} | |
} | |
#[derive(Clone, Copy)] | |
struct Ptr<'a, T: 'a> { | |
ptr: *const T, | |
chunk: *const *const T, | |
_marker: marker::PhantomData<&'a T> | |
} | |
impl<'a, T> ops::AddAssign<isize> for Ptr<'a, T> { | |
fn add_assign(&mut self, other: isize) { | |
let chunk_size = chunk_size(mem::size_of::<T>()) as isize; | |
unsafe { | |
let offset = self.ptr.diff(*self.chunk); | |
let offset = offset.wrapping_add(other); | |
if 0 <= offset && offset < chunk_size { | |
self.ptr = self.ptr.offset(offset); | |
} else { | |
let chunk_offset = if offset > 0 { | |
offset / chunk_size | |
} else { | |
-((-offset - 1) / chunk_size) - 1 | |
}; | |
self.chunk = self.chunk.offset(chunk_offset); | |
self.ptr = (*self.chunk).offset(offset - chunk_offset * chunk_size); | |
} | |
} | |
} | |
} | |
impl<'a, T> ops::Add<isize> for Ptr<'a, T> { | |
type Output = Ptr<'a, T>; | |
fn add(mut self, other: isize) -> Self::Output { | |
self += other; | |
self | |
} | |
} | |
impl<'a, T> ops::Deref for Ptr<'a, T> { | |
type Target = T; | |
fn deref(&self) -> &T { | |
unsafe { &*self.ptr } | |
} | |
} | |
pub struct ChunkyVec<T> { | |
inner: Inner<T>, | |
} | |
impl<T> ChunkyVec<T> { | |
pub fn new() -> Self { | |
ChunkyVec { | |
inner: Inner::initialize_map(0), | |
} | |
} | |
pub fn len(&self) -> usize { | |
Inner::<T>::chunk_size() * self.inner.chunk.diff(self.inner.map.as_ptr()) as usize + | |
self.inner.end.diff(unsafe { *self.inner.chunk }) as usize | |
} | |
pub fn is_empty(&self) -> bool { | |
self.len() == 0 | |
} | |
fn begin(&self) -> Ptr<T> { | |
Ptr { | |
ptr: self.inner.map[0], | |
chunk: self.inner.map.as_ptr(), | |
_marker: marker::PhantomData, | |
} | |
} | |
pub fn push(&mut self, value: T) { | |
unsafe { | |
if self.inner.end != self.inner.cap.sub(1) { | |
construct(self.inner.end as _, value); | |
self.inner.end = self.inner.end.add(1); | |
} else { | |
self.push_aux(value); | |
} | |
} | |
} | |
#[inline(never)] | |
#[cold] | |
fn push_aux(&mut self, value: T) { | |
self.reserve_map(1); | |
unsafe { | |
let new_chunk = self.inner.chunk.add(1); | |
construct(new_chunk as *mut _, Inner::<T>::allocate_chunk()); | |
construct(self.inner.end as *mut _, value); | |
self.inner.set_chunk(new_chunk); | |
self.inner.end = *self.inner.chunk; | |
} | |
} | |
#[inline(never)] | |
#[cold] | |
fn reserve_map(&mut self, chunks_to_add: usize) { | |
if chunks_to_add + 1 > self.inner.map.len() - | |
(self.inner.chunk.diff(self.inner.map.as_ptr())) as usize { | |
self.inner.reallocate_map(chunks_to_add); | |
} | |
} | |
} | |
impl<T> ops::Index<usize> for &ChunkyVec<T> { | |
type Output = T; | |
fn index(&self, index: usize) -> &Self::Output { | |
assert!(index < isize::max_value() as usize); | |
let ptr = self.begin() + index as isize; | |
let ptr = ptr.ptr; | |
unsafe { &*ptr } | |
} | |
} | |
/* | |
impl<T> ops::IndexMut<usize> for ChunkyVec<T> { | |
fn index_mut(&mut self, index: usize) -> &mut Self::Output { | |
assert!(index < isize::max_value() as usize); | |
let ptr = self.begin() + index as isize; | |
&mut *ptr | |
} | |
} | |
*/ | |
fn main() { | |
struct Foo([u32; 512]); | |
let v = ChunkyVec::<Foo>::new(); | |
assert_eq!(v.len(), 0); | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment