Skip to content
Snippets Groups Projects
Commit a8320da0 authored by Carl Lerche's avatar Carl Lerche
Browse files

Lazily allocate the Arc

parent 93c08064
No related branches found
No related tags found
No related merge requests found
use {IntoBuf, ByteBuf, SliceBuf}; use {IntoBuf, ByteBuf, SliceBuf};
use std::{cmp, fmt, mem, ops, slice};
use std::cell::UnsafeCell; use std::cell::UnsafeCell;
use std::sync::Arc; use std::sync::Arc;
use std::{cmp, fmt, ops};
/// A reference counted slice of bytes. /// A reference counted slice of bytes.
/// ///
...@@ -20,14 +20,17 @@ pub struct Bytes { ...@@ -20,14 +20,17 @@ pub struct Bytes {
/// A `BytesMut` is a unique handle to a slice of bytes allowing mutation of /// A `BytesMut` is a unique handle to a slice of bytes allowing mutation of
/// the underlying bytes. /// the underlying bytes.
pub struct BytesMut { pub struct BytesMut {
mem: Mem, // Pointer to the start of the memory owned by this BytesMut
pos: usize, ptr: *mut u8,
// Number of bytes that have been initialized
len: usize, len: usize,
// Total number of bytes owned by this BytesMut
cap: usize, cap: usize,
}
struct Mem { // If this pointer is set, then the the BytesMut is backed by an Arc
inner: Arc<UnsafeCell<Box<[u8]>>>, arc: UnsafeCell<Option<Arc<Vec<u8>>>>,
} }
/* /*
...@@ -61,9 +64,10 @@ impl Bytes { ...@@ -61,9 +64,10 @@ impl Bytes {
pub fn slice(&self, start: usize, end: usize) -> Bytes { pub fn slice(&self, start: usize, end: usize) -> Bytes {
let mut ret = self.clone(); let mut ret = self.clone();
ret.inner unsafe {
.set_end(end) ret.inner.set_end(end);
.set_start(start); ret.inner.set_start(start);
}
ret ret
} }
...@@ -113,7 +117,7 @@ impl Bytes { ...@@ -113,7 +117,7 @@ impl Bytes {
/// This will only succeed if there are no other outstanding references to /// This will only succeed if there are no other outstanding references to
/// the underlying chunk of memory. /// the underlying chunk of memory.
pub fn try_mut(mut self) -> Result<BytesMut, Bytes> { pub fn try_mut(mut self) -> Result<BytesMut, Bytes> {
if self.inner.mem.is_mut_safe() { if self.inner.is_mut_safe() {
Ok(self.inner) Ok(self.inner)
} else { } else {
Err(self) Err(self)
...@@ -207,12 +211,7 @@ unsafe impl Sync for Bytes {} ...@@ -207,12 +211,7 @@ unsafe impl Sync for Bytes {}
impl BytesMut { impl BytesMut {
/// Create a new `BytesMut` with the specified capacity. /// Create a new `BytesMut` with the specified capacity.
pub fn with_capacity(cap: usize) -> BytesMut { pub fn with_capacity(cap: usize) -> BytesMut {
BytesMut { BytesMut::from(Vec::with_capacity(cap))
mem: Mem::with_capacity(cap),
pos: 0,
len: 0,
cap: cap,
}
} }
/// Creates a new `BytesMut` and copy the given slice into it. /// Creates a new `BytesMut` and copy the given slice into it.
...@@ -255,8 +254,10 @@ impl BytesMut { ...@@ -255,8 +254,10 @@ impl BytesMut {
pub fn split_off(&mut self, at: usize) -> BytesMut { pub fn split_off(&mut self, at: usize) -> BytesMut {
let mut other = self.shallow_clone(); let mut other = self.shallow_clone();
other.set_start(at); unsafe {
self.set_end(at); other.set_start(at);
self.set_end(at);
}
return other return other
} }
...@@ -275,8 +276,10 @@ impl BytesMut { ...@@ -275,8 +276,10 @@ impl BytesMut {
pub fn drain_to(&mut self, at: usize) -> BytesMut { pub fn drain_to(&mut self, at: usize) -> BytesMut {
let mut other = self.shallow_clone(); let mut other = self.shallow_clone();
other.set_end(at); unsafe {
self.set_start(at); other.set_end(at);
self.set_start(at);
}
return other return other
} }
...@@ -290,8 +293,7 @@ impl BytesMut { ...@@ -290,8 +293,7 @@ impl BytesMut {
/// ///
/// This a slice of bytes that have been initialized /// This a slice of bytes that have been initialized
pub fn as_mut(&mut self) -> &mut [u8] { pub fn as_mut(&mut self) -> &mut [u8] {
let end = self.pos + self.len; unsafe { slice::from_raw_parts_mut(self.ptr, self.len) }
&mut self.mem.as_mut()[self.pos..end]
} }
/// Sets the length of the buffer /// Sets the length of the buffer
...@@ -313,22 +315,23 @@ impl BytesMut { ...@@ -313,22 +315,23 @@ impl BytesMut {
/// ///
/// This a slice of all bytes, including uninitialized memory /// This a slice of all bytes, including uninitialized memory
pub unsafe fn as_raw(&mut self) -> &mut [u8] { pub unsafe fn as_raw(&mut self) -> &mut [u8] {
let end = self.pos + self.cap; slice::from_raw_parts_mut(self.ptr, self.cap)
&mut self.mem.as_mut()[self.pos..end]
} }
/// Changes the starting index of this window to the index specified. /// Changes the starting index of this window to the index specified.
/// ///
/// Returns the windows back to chain multiple calls to this method.
///
/// # Panics /// # Panics
/// ///
/// This method will panic if `start` is out of bounds for the underlying /// This method will panic if `start` is out of bounds for the underlying
/// slice. /// slice.
fn set_start(&mut self, start: usize) -> &mut BytesMut { unsafe fn set_start(&mut self, start: usize) {
assert!(start <= self.cap); assert!(start <= self.cap);
self.pos += start; debug_assert!(self.is_shared());
debug_assert!(self.len <= self.cap);
self.ptr = self.ptr.offset(start as isize);
// TODO: This could probably be optimized with some bit fiddling
if self.len >= start { if self.len >= start {
self.len -= start; self.len -= start;
} else { } else {
...@@ -336,33 +339,75 @@ impl BytesMut { ...@@ -336,33 +339,75 @@ impl BytesMut {
} }
self.cap -= start; self.cap -= start;
self
} }
/// Changes the end index of this window to the index specified. /// Changes the end index of this window to the index specified.
/// ///
/// Returns the windows back to chain multiple calls to this method.
///
/// # Panics /// # Panics
/// ///
/// This method will panic if `start` is out of bounds for the underlying /// This method will panic if `start` is out of bounds for the underlying
/// slice. /// slice.
fn set_end(&mut self, end: usize) -> &mut BytesMut { unsafe fn set_end(&mut self, end: usize) {
assert!(end <= self.cap); assert!(end <= self.cap);
debug_assert!(self.is_shared());
self.cap = end; self.cap = end;
self.len = cmp::min(self.len, end); self.len = cmp::min(self.len, end);
self }
/// Checks if it is safe to mutate the memory
fn is_mut_safe(&mut self) -> bool {
unsafe {
(*self.arc.get()).as_mut()
// Check if there is only one outstanding reference to the memory
.map(|a| Arc::get_mut(a).is_some())
// If there is no arc, then this is a unique pointer
.unwrap_or(true)
}
} }
/// Increments the ref count. This should only be done if it is known that /// Increments the ref count. This should only be done if it is known that
/// it can be done safely. As such, this fn is not public, instead other /// it can be done safely. As such, this fn is not public, instead other
/// fns will use this one while maintaining the guarantees. /// fns will use this one while maintaining the guarantees.
fn shallow_clone(&self) -> BytesMut { fn shallow_clone(&self) -> BytesMut {
let arc = unsafe {
match *self.arc.get() {
Some(ref arc) => {
// Already backed by an arc, just clone it
arc.clone()
}
None => {
// Promote this `Bytes` to an arc, and clone it
let v = Vec::from_raw_parts(self.ptr, self.len, self.cap);
let a = Arc::new(v);
*self.arc.get() = Some(a.clone());
a
}
}
};
BytesMut { BytesMut {
mem: self.mem.clone(), arc: UnsafeCell::new(Some(arc)),
.. *self .. *self
} }
} }
fn is_shared(&self) -> bool {
unsafe { (*self.arc.get()).is_some() }
}
}
impl Drop for BytesMut {
fn drop(&mut self) {
if !self.is_shared() {
unsafe {
// Not shared, manually free
let _ = Vec::from_raw_parts(self.ptr, self.len, self.cap);
}
}
}
} }
impl IntoBuf for BytesMut { impl IntoBuf for BytesMut {
...@@ -383,8 +428,7 @@ impl<'a> IntoBuf for &'a BytesMut { ...@@ -383,8 +428,7 @@ impl<'a> IntoBuf for &'a BytesMut {
impl AsRef<[u8]> for BytesMut { impl AsRef<[u8]> for BytesMut {
fn as_ref(&self) -> &[u8] { fn as_ref(&self) -> &[u8] {
let end = self.pos + self.len; unsafe { slice::from_raw_parts(self.ptr, self.len) }
&self.mem.as_ref()[self.pos..end]
} }
} }
...@@ -403,15 +447,18 @@ impl ops::DerefMut for BytesMut { ...@@ -403,15 +447,18 @@ impl ops::DerefMut for BytesMut {
} }
impl From<Vec<u8>> for BytesMut { impl From<Vec<u8>> for BytesMut {
fn from(src: Vec<u8>) -> BytesMut { fn from(mut src: Vec<u8>) -> BytesMut {
let len = src.len(); let len = src.len();
let cap = src.capacity(); let cap = src.capacity();
let ptr = src.as_mut_ptr();
mem::forget(src);
BytesMut { BytesMut {
mem: Mem::from_vec(src), ptr: ptr,
pos: 0,
len: len, len: len,
cap: cap, cap: cap,
arc: UnsafeCell::new(None),
} }
} }
} }
...@@ -439,44 +486,6 @@ impl fmt::Debug for BytesMut { ...@@ -439,44 +486,6 @@ impl fmt::Debug for BytesMut {
unsafe impl Send for BytesMut {} unsafe impl Send for BytesMut {}
/*
*
* ===== Mem =====
*
*/
impl Mem {
fn with_capacity(cap: usize) -> Mem {
let mut vec = Vec::with_capacity(cap);
unsafe { vec.set_len(cap); }
Mem { inner: Arc::new(UnsafeCell::new(vec.into_boxed_slice())) }
}
fn from_vec(mut vec: Vec<u8>) -> Mem {
let cap = vec.capacity();
unsafe { vec.set_len(cap); }
Mem { inner: Arc::new(UnsafeCell::new(vec.into_boxed_slice())) }
}
fn as_ref(&self) -> &[u8] {
unsafe { &*self.inner.get() }
}
fn as_mut(&mut self) -> &mut [u8] {
unsafe { &mut *self.inner.get() }
}
fn is_mut_safe(&mut self) -> bool {
Arc::get_mut(&mut self.inner).is_some()
}
fn clone(&self) -> Mem {
Mem { inner: self.inner.clone() }
}
}
/* /*
* *
* ===== PartialEq ===== * ===== PartialEq =====
...@@ -563,11 +572,7 @@ impl Clone for BytesMut { ...@@ -563,11 +572,7 @@ impl Clone for BytesMut {
fn clone(&self) -> BytesMut { fn clone(&self) -> BytesMut {
let mut v = Vec::with_capacity(self.len()); let mut v = Vec::with_capacity(self.len());
v.extend_from_slice(&self[..]); v.extend_from_slice(&self[..]);
BytesMut {
mem: Mem { inner : Arc::new(UnsafeCell::new(v.into_boxed_slice())) }, BytesMut::from(v)
pos: self.pos,
len: self.len,
cap: self.cap,
}
} }
} }
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment