Files
aho_corasick
anyhow
drone_config
drone_core
drone_core_macros
drone_ctypes
drone_macros_core
futures
futures_channel
futures_core
futures_io
futures_macro
futures_sink
futures_task
futures_util
if_chain
inflector
cases
camelcase
case
classcase
kebabcase
pascalcase
screamingsnakecase
sentencecase
snakecase
tablecase
titlecase
traincase
numbers
deordinalize
ordinalize
string
constants
deconstantize
demodulize
pluralize
singularize
suffix
foreignkey
lazy_static
memchr
pin_project_lite
pin_utils
proc_macro2
proc_macro_hack
proc_macro_nested
quote
regex
regex_syntax
serde
serde_derive
syn
toml
typenum
unicode_xid
  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
use core::{
    alloc::Layout,
    ptr::{self, NonNull},
    sync::atomic::{AtomicPtr, Ordering},
};

/// The set of free memory blocks.
///
/// It operates by connecting unallocated regions of memory together in a linked
/// list, using the first word of each unallocated region as a pointer to the
/// next.
pub struct Pool {
    /// Block size. Doesn't change in the run-time.
    size: usize,
    /// Address of the byte past the last element. Doesn't change in the
    /// run-time.
    edge: *mut u8,
    /// Free List of previously allocated blocks.
    free: AtomicPtr<u8>,
    /// Pointer growing from the starting address until it reaches the `edge`.
    uninit: AtomicPtr<u8>,
}

unsafe impl Sync for Pool {}

impl Pool {
    /// Creates a new `Pool`.
    pub const fn new(address: usize, size: usize, capacity: usize) -> Self {
        Self {
            size,
            edge: (address + size * capacity) as *mut u8,
            free: AtomicPtr::new(ptr::null_mut()),
            uninit: AtomicPtr::new(address as *mut u8),
        }
    }

    /// Returns the block size.
    #[inline]
    pub fn size(&self) -> usize {
        self.size
    }

    /// Allocates one block of memory.
    ///
    /// If this method returns `Some(addr)`, then the `addr` returned will be
    /// non-null address pointing to the block. If this method returns `None`,
    /// then the pool is exhausted.
    ///
    /// This operation is lock-free and has *O(1)* time complexity.
    pub fn allocate(&self) -> Option<NonNull<u8>> {
        unsafe { self.alloc_free().or_else(|| self.alloc_uninit()) }
    }

    /// Deallocates the block referenced by `ptr`.
    ///
    /// This operation is lock-free and has *O(1)* time complexity.
    ///
    /// # Safety
    ///
    /// * `ptr` must point to a block previously allocated by
    ///   [`alloc`](Pool::alloc).
    /// * `ptr` must not be used after deallocation.
    #[allow(clippy::cast_ptr_alignment)]
    pub unsafe fn deallocate(&self, ptr: NonNull<u8>) {
        loop {
            let curr = self.free.load(Ordering::Acquire);
            unsafe { ptr::write(ptr.as_ptr().cast::<*mut u8>(), curr) };
            let next = ptr.as_ptr().cast::<u8>();
            if self
                .free
                .compare_exchange_weak(curr, next, Ordering::AcqRel, Ordering::Acquire)
                .is_ok()
            {
                break;
            }
        }
    }

    #[allow(clippy::cast_ptr_alignment)]
    unsafe fn alloc_free(&self) -> Option<NonNull<u8>> {
        loop {
            let curr = self.free.load(Ordering::Acquire);
            if curr.is_null() {
                break None;
            }
            let next = unsafe { ptr::read(curr as *const *mut u8) };
            if self
                .free
                .compare_exchange_weak(curr, next, Ordering::AcqRel, Ordering::Acquire)
                .is_ok()
            {
                break Some(unsafe { NonNull::new_unchecked(curr) });
            }
        }
    }

    unsafe fn alloc_uninit(&self) -> Option<NonNull<u8>> {
        loop {
            let curr = self.uninit.load(Ordering::Relaxed);
            if curr == self.edge {
                break None;
            }
            let next = unsafe { curr.add(self.size) };
            if self
                .uninit
                .compare_exchange_weak(curr, next, Ordering::Relaxed, Ordering::Relaxed)
                .is_ok()
            {
                break Some(unsafe { NonNull::new_unchecked(curr) });
            }
        }
    }
}

pub trait Fits: Copy {
    fn fits(self, pool: &Pool) -> bool;
}

impl<'a> Fits for &'a Layout {
    #[inline]
    fn fits(self, pool: &Pool) -> bool {
        self.size() <= pool.size
    }
}

impl Fits for NonNull<u8> {
    #[inline]
    fn fits(self, pool: &Pool) -> bool {
        (self.as_ptr().cast::<u8>()) < pool.edge
    }
}