diff --git a/src/alloc/heap.rs b/src/alloc/heap.rs index 7dad574135f40cdc31cec48e7dbadf6c3b3945ce..2a8a73e7aa8abe4fd8dce410050f1181ebb73625 100644 --- a/src/alloc/heap.rs +++ b/src/alloc/heap.rs @@ -5,5 +5,5 @@ pub unsafe fn allocate(len: usize) -> MemRef { let mut v = Vec::with_capacity(len); v.set_len(len); - MemRef::new(Arc::new(v)) + MemRef::new(Arc::new(v.into_boxed_slice())) } diff --git a/src/alloc/mod.rs b/src/alloc/mod.rs index 49978ea59076b232efa3290fd28cf376e2c3ee4f..5ab497c8c6dd8882ed61ac82f11b8c0ba5ec4be8 100644 --- a/src/alloc/mod.rs +++ b/src/alloc/mod.rs @@ -7,7 +7,7 @@ mod heap; use std::sync::Arc; pub struct MemRef { - mem: Arc<Vec<u8>>, + mem: Arc<Box<[u8]>>, } /// Allocate a segment of memory and return a `MemRef`. @@ -17,7 +17,7 @@ pub unsafe fn heap(len: usize) -> MemRef { impl MemRef { #[inline] - pub unsafe fn new(mem: Arc<Vec<u8>>) -> MemRef { + pub unsafe fn new(mem: Arc<Box<[u8]>>) -> MemRef { MemRef { mem: mem } } @@ -52,6 +52,10 @@ impl MemRef { let ptr = self.mem.as_ptr().offset(start as isize); slice::from_raw_parts_mut(ptr as *mut u8, end - start) } + + pub fn get_ref(&self) -> &Arc<Box<[u8]>> { + &self.mem + } } impl Clone for MemRef { diff --git a/src/imp/alloc.rs b/src/imp/alloc.rs new file mode 100644 index 0000000000000000000000000000000000000000..fe38c7b4424c2accaeec506eca83afc081224c16 --- /dev/null +++ b/src/imp/alloc.rs @@ -0,0 +1,68 @@ +#![allow(warnings)] + +use std::sync::Arc; + +/// A sequential chunk of memory that is atomically reference counted. +pub struct Mem { + mem: Arc<Box<[u8]>>, +} + +pub unsafe fn with_capacity(mut capacity: usize) -> Box<[u8]> { + // Round up to the next power of two + capacity = capacity.next_power_of_two(); + + let mut v: Vec<u8> = Vec::with_capacity(capacity); + v.set_len(capacity); + v.into_boxed_slice() +} + +impl Mem { + /// Return a new `Mem` with the given capacity + pub unsafe fn with_capacity(capacity: usize) -> Mem { + let mem = Arc::new(with_capacity(capacity)); + Mem { mem: mem } + } + + pub unsafe fn from_boxed(src: Arc<Box<[u8]>>) -> Mem { + Mem { mem: src } + } + + /// Returns the length in bytes + pub fn len(&self) -> usize { + self.mem.len() + } + + /// View of the underlying memory. + /// + /// The memory could be uninitialized. + pub unsafe fn bytes(&self) -> &[u8] { + &*self.mem + } + + /// View of a range of the underlying memory. + /// + /// The offsets are not checked and the memory could be uninitialized. + pub unsafe fn slice(&self, start: usize, end: usize) -> &[u8] { + use std::slice; + let ptr = self.mem.as_ptr().offset(start as isize); + slice::from_raw_parts(ptr, end - start) + } + + /// Mutable view of the underlying memory. + /// + /// The memory could be uninitialized. + pub unsafe fn mut_bytes(&mut self) -> &mut [u8] { + use std::slice; + let len = self.mem.len(); + slice::from_raw_parts_mut(self.mem.as_ptr() as *mut u8, len) + } + + /// Mutable view of a range of the underlying memory. + /// + /// The offsets are not checked and the memory could be uninitialized. + pub unsafe fn mut_bytes_slice(&mut self, start: usize, end: usize) -> &mut [u8] { + use std::slice; + let ptr = self.mem.as_ptr().offset(start as isize); + slice::from_raw_parts_mut(ptr as *mut u8, end - start) + } +} diff --git a/src/imp/buf/append.rs b/src/imp/buf/append.rs index 30f326c402321b4d91dea2ac7aa16b57ded9debd..d671f18fb22dc18bb5e09caae7e116239d208ad0 100644 --- a/src/imp/buf/append.rs +++ b/src/imp/buf/append.rs @@ -73,7 +73,7 @@ impl AppendBuf { assert!(begin <= end && end <= wr, "invalid range"); - unsafe { Bytes::from_mem_ref(self.mem.clone(), begin, end - begin) } + Bytes::from_boxed(self.mem.get_ref().clone(), begin as usize, (end - begin) as usize) } } @@ -111,3 +111,12 @@ impl AsRef<[u8]> for AppendBuf { self.bytes() } } + +impl From<AppendBuf> for Bytes { + fn from(src: AppendBuf) -> Bytes { + let rd = src.rd.get(); + let wr = src.wr; + + Bytes::from_boxed(src.mem.get_ref().clone(), rd as usize, (wr - rd) as usize) + } +} diff --git a/src/imp/buf/byte.rs b/src/imp/buf/byte.rs deleted file mode 100644 index 5120fd2a2debef8c2db7aae7f55076f6ed2e38c6..0000000000000000000000000000000000000000 --- a/src/imp/buf/byte.rs +++ /dev/null @@ -1,240 +0,0 @@ -use {alloc, Buf, MutBuf, Bytes, MAX_CAPACITY}; -use std::{cmp, fmt}; - -/* - * - * ===== ByteBuf ===== - * - */ - -/// A `Buf` backed by a contiguous region of memory. -/// -/// This `Buf` is better suited for cases where there is a clear delineation -/// between reading and writing. -pub struct ByteBuf { - mem: alloc::MemRef, - cap: u32, - pos: u32, - lim: u32, - mark: Option<u32>, -} - -impl ByteBuf { - /// Create a new `ByteBuf` by copying the contents of the given slice. - pub fn from_slice(bytes: &[u8]) -> ByteBuf { - let mut buf = MutByteBuf::with_capacity(bytes.len()); - buf.write_slice(bytes); - buf.flip() - } - - pub unsafe fn from_mem_ref(mem: alloc::MemRef, cap: u32, pos: u32, lim: u32) -> ByteBuf { - debug_assert!(pos <= lim && lim <= cap, "invalid arguments; cap={}; pos={}; lim={}", cap, pos, lim); - - ByteBuf { - mem: mem, - cap: cap, - pos: pos, - lim: lim, - mark: None, - } - } - - fn new(mut capacity: u32) -> ByteBuf { - // Round the capacity to the closest power of 2 - capacity = capacity.next_power_of_two(); - - unsafe { - // Allocate the memory - let mem = alloc::heap(capacity as usize); - - ByteBuf { - mem: mem, - cap: capacity, - pos: 0, - lim: capacity, - mark: None, - } - } - } - - pub fn capacity(&self) -> usize { - self.cap as usize - } - - pub fn flip(self) -> MutByteBuf { - let mut buf = MutByteBuf { buf: self }; - buf.clear(); - buf - } - - /// Flips the buffer back to mutable, resetting the write position - /// to the byte after the previous write. - pub fn resume(mut self) -> MutByteBuf { - self.pos = self.lim; - self.lim = self.cap; - MutByteBuf { buf: self } - } - - pub fn read_slice(&mut self, dst: &mut [u8]) { - assert!(self.remaining() >= dst.len()); - let len = dst.len(); - let cnt = len as u32; - let pos = self.pos as usize; - - unsafe { - dst.copy_from_slice(&self.mem.bytes()[pos..pos+len]); - } - - self.pos += cnt; - } - - /// Marks the current read location. - /// - /// Together with `reset`, this can be used to read from a section of the - /// buffer multiple times. The marked location will be cleared when the - /// buffer is flipped. - pub fn mark(&mut self) { - self.mark = Some(self.pos); - } - - /// Resets the read position to the previously marked position. - /// - /// Together with `mark`, this can be used to read from a section of the - /// buffer multiple times. - /// - /// # Panics - /// - /// This method will panic if no mark has been set. - pub fn reset(&mut self) { - self.pos = self.mark.take().expect("no mark set"); - } - - #[inline] - fn pos(&self) -> usize { - self.pos as usize - } - - #[inline] - fn lim(&self) -> usize { - self.lim as usize - } - - #[inline] - fn remaining_u32(&self) -> u32 { - self.lim - self.pos - } -} - -impl Buf for ByteBuf { - - #[inline] - fn remaining(&self) -> usize { - self.remaining_u32() as usize - } - - #[inline] - fn bytes(&self) -> &[u8] { - unsafe { &self.mem.bytes()[self.pos()..self.lim()] } - } - - #[inline] - fn advance(&mut self, mut cnt: usize) { - cnt = cmp::min(cnt, self.remaining()); - self.pos += cnt as u32; - } - - #[inline] - fn read_slice(&mut self, dst: &mut [u8]) { - ByteBuf::read_slice(self, dst) - } -} - -impl From<ByteBuf> for Bytes { - fn from(src: ByteBuf) -> Bytes { - unsafe { - let ByteBuf { mem, pos, lim, .. } = src; - Bytes::from_mem_ref(mem, pos, lim - pos) - } - } -} - -impl fmt::Debug for ByteBuf { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - self.bytes().fmt(fmt) - } -} - -/* - * - * ===== MutByteBuf ===== - * - */ - -pub struct MutByteBuf { - buf: ByteBuf, -} - -impl MutByteBuf { - pub fn with_capacity(capacity: usize) -> MutByteBuf { - assert!(capacity <= MAX_CAPACITY); - MutByteBuf { buf: ByteBuf::new(capacity as u32) } - } - - pub fn capacity(&self) -> usize { - self.buf.capacity() as usize - } - - pub fn flip(self) -> ByteBuf { - let mut buf = self.buf; - - buf.lim = buf.pos; - buf.pos = 0; - buf - } - - pub fn clear(&mut self) { - self.buf.pos = 0; - self.buf.lim = self.buf.cap; - } - - #[inline] - pub fn write_slice(&mut self, src: &[u8]) -> usize { - let cnt = cmp::min(src.len(), self.buf.remaining()); - let pos = self.buf.pos as usize; - - unsafe { - self.buf.mem.mut_bytes()[pos..pos+cnt] - .copy_from_slice(&src[0..cnt]); - } - - self.buf.pos += cnt as u32; - - cnt - } - - pub fn bytes(&self) -> &[u8] { - unsafe { &self.buf.mem.bytes()[..self.buf.pos()] } - } -} - -impl MutBuf for MutByteBuf { - fn remaining(&self) -> usize { - self.buf.remaining() - } - - unsafe fn advance(&mut self, cnt: usize) { - self.buf.advance(cnt) - - } - unsafe fn mut_bytes(&mut self) -> &mut [u8] { - let pos = self.buf.pos(); - let lim = self.buf.lim(); - &mut self.buf.mem.mut_bytes()[pos..lim] - } -} - -impl fmt::Debug for MutByteBuf { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - self.bytes().fmt(fmt) - } -} diff --git a/src/imp/buf/mod.rs b/src/imp/buf/mod.rs index 7a82aa6ed83e0dd2cb431c42d936d859f8820670..ca41f616d572d3eb44b2908c001c9d9ea1398024 100644 --- a/src/imp/buf/mod.rs +++ b/src/imp/buf/mod.rs @@ -1,6 +1,6 @@ pub mod append; pub mod block; -pub mod byte; +pub mod slice_buf; pub mod ring; pub mod take; diff --git a/src/imp/buf/slice_buf.rs b/src/imp/buf/slice_buf.rs new file mode 100644 index 0000000000000000000000000000000000000000..b304de4f90d74a7a91cc334eeca7f846b0051210 --- /dev/null +++ b/src/imp/buf/slice_buf.rs @@ -0,0 +1,154 @@ +//! A buffer backed by a contiguous region of memory. + +use {Buf, MutBuf}; +use imp::alloc; +use std::fmt; + +/* + * + * ===== SliceBuf ===== + * + */ + +/// A `Buf` backed by a contiguous region of memory. +/// +/// This `Buf` is better suited for cases where there is a clear delineation +/// between reading and writing. +pub struct SliceBuf<T = Box<[u8]>> { + // Contiguous memory + mem: T, + // Current read position + rd: usize, + // Current write position + wr: usize, +} + +impl SliceBuf { + /// Constructs a new, empty `SliceBuf` with the specified capacity + /// + /// The `SliceBuf` will be backed by a `Box<[u8]>`. + pub fn with_capacity(capacity: usize) -> SliceBuf { + let mem = unsafe { alloc::with_capacity(capacity) }; + SliceBuf::new(mem) + } + + /// Create a new `SliceBuf` and copy the contents of the given slice into + /// it. + pub fn from_slice<T: AsRef<[u8]>>(bytes: &T) -> SliceBuf { + let mut buf = SliceBuf::with_capacity(bytes.as_ref().len()); + buf.write_slice(bytes.as_ref()); + buf + } +} + +impl<T: AsRef<[u8]>> SliceBuf<T> { + /// Creates a new `SliceBuf` wrapping the provided slice + pub fn new(mem: T) -> SliceBuf<T> { + SliceBuf { + mem: mem, + rd: 0, + wr: 0, + } + } + + /// Return the number of bytes the buffer can contain + pub fn capacity(&self) -> usize { + self.mem.as_ref().len() + } + + /// Return the read cursor position + pub fn position(&self) -> usize { + self.rd + } + + /// Set the read cursor position + pub fn set_position(&mut self, position: usize) { + assert!(position <= self.wr, "position out of bounds"); + self.rd = position + } + + /// Return the number of buffered bytes + pub fn len(&self) -> usize { + self.wr + } + + /// Returns `true` if the buffer contains no unread bytes + pub fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Clears the buffer, removing any written data + pub fn clear(&mut self) { + self.rd = 0; + self.wr = 0; + } + + /// Return the number of bytes left to read + pub fn remaining_read(&self) -> usize { + self.wr - self.rd + } + + /// Return the remaining write capacity + pub fn remaining_write(&self) -> usize { + self.capacity() - self.wr + } +} + +impl<T> Buf for SliceBuf<T> + where T: AsRef<[u8]>, +{ + fn remaining(&self) -> usize { + self.remaining_read() + } + + fn bytes(&self) -> &[u8] { + &self.mem.as_ref()[self.rd..self.wr] + } + + fn advance(&mut self, cnt: usize) { + assert!(cnt <= self.remaining(), "buffer overflow"); + self.rd += cnt; + } + + fn read_slice(&mut self, dst: &mut [u8]) { + assert!(self.remaining() >= dst.len()); + + let len = dst.len(); + dst.copy_from_slice(&self.mem.as_ref()[self.rd..self.rd+len]); + self.rd += len; + } +} + +impl<T> MutBuf for SliceBuf<T> + where T: AsRef<[u8]> + AsMut<[u8]>, +{ + fn remaining(&self) -> usize { + self.remaining_write() + } + + unsafe fn advance(&mut self, cnt: usize) { + assert!(cnt <= self.remaining_write()); + self.wr += cnt; + } + + unsafe fn mut_bytes(&mut self) -> &mut [u8] { + &mut self.mem.as_mut()[self.wr..] + } + + fn write_slice(&mut self, src: &[u8]) { + let wr = self.wr; + + self.mem.as_mut()[wr..wr+src.len()] + .copy_from_slice(src); + + self.wr += src.len(); + } +} + +impl<T> fmt::Debug for SliceBuf<T> + where T: AsRef<[u8]>, +{ + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + self.bytes().fmt(fmt) + } +} diff --git a/src/imp/bytes/mod.rs b/src/imp/bytes/mod.rs index d10ed24f461cae23bc02d81d8bf3c5d9b2f6952d..21f51aff17f06f03c1fb1234ceffa37591c10d6f 100644 --- a/src/imp/bytes/mod.rs +++ b/src/imp/bytes/mod.rs @@ -1,8 +1,8 @@ -mod rope; -mod seq; -mod small; +pub mod rope; +pub mod seq; +pub mod small; -use {alloc, Buf}; +use Buf; use self::seq::Seq; use self::small::Small; use self::rope::{Rope, RopeBuf}; @@ -38,16 +38,22 @@ impl Bytes { Bytes { kind: Kind::Small(Small::empty()) } } - /// Creates a new `Bytes` from a `MemRef`, an offset, and a length. - /// - /// This function is unsafe as there are no guarantees that the given - /// arguments are valid. + pub fn from_slice<T: AsRef<[u8]>>(slice: T) -> Bytes { + Small::from_slice(slice.as_ref()) + .map(|b| Bytes { kind: Kind::Small(b)}) + .unwrap_or_else(|| Seq::from_slice(slice.as_ref())) + } + + /// Creates a new `Bytes` from an `Arc<Box<[u8]>>`, an offset, and a length. #[inline] - pub unsafe fn from_mem_ref(mem: alloc::MemRef, pos: u32, len: u32) -> Bytes { - Small::from_slice(&mem.bytes_slice(pos as usize, pos as usize + len as usize)) + pub fn from_boxed(mem: Arc<Box<[u8]>>, pos: usize, len: usize) -> Bytes { + // Check ranges + assert!(pos + len <= mem.len(), "invalid arguments"); + + Small::from_slice(&mem[pos..pos + len]) .map(|b| Bytes { kind: Kind::Small(b) }) .unwrap_or_else(|| { - let seq = Seq::from_mem_ref(mem, pos, len); + let seq = Seq::new(mem, pos, len); Bytes { kind: Kind::Seq(seq) } }) } @@ -120,6 +126,21 @@ impl Bytes { } } +impl<'a> From<&'a [u8]> for Bytes { + fn from(src: &'a [u8]) -> Bytes { + Bytes::from_slice(src) + } +} + +impl From<Vec<u8>> for Bytes { + fn from(src: Vec<u8>) -> Bytes { + let mem = Arc::new(src.into_boxed_slice()); + let len = mem.len(); + + Bytes::from_boxed(mem, 0, len) + } +} + impl ops::Index<usize> for Bytes { type Output = u8; @@ -132,14 +153,6 @@ impl ops::Index<usize> for Bytes { } } -impl<T: AsRef<[u8]>> From<T> for Bytes { - fn from(src: T) -> Bytes { - Small::from_slice(src.as_ref()) - .map(|b| Bytes { kind: Kind::Small(b) }) - .unwrap_or_else(|| Seq::from_slice(src.as_ref())) - } -} - impl cmp::PartialEq<Bytes> for Bytes { fn eq(&self, other: &Bytes) -> bool { if self.len() != other.len() { diff --git a/src/imp/bytes/rope.rs b/src/imp/bytes/rope.rs index 584f3cb388d5e66d9838292879db0d6eebb8d3d8..d72eeb4c4232c890223d79c95d5024f26193d7d3 100644 --- a/src/imp/bytes/rope.rs +++ b/src/imp/bytes/rope.rs @@ -1,7 +1,7 @@ use {Buf, MutBuf, Bytes}; use super::seq::Seq; use super::small::{Small}; -use buf::{Source, MutByteBuf}; +use buf::{Source, AppendBuf}; use std::{cmp, ops}; use std::io::Cursor; use std::sync::Arc; @@ -351,12 +351,12 @@ impl ops::Index<usize> for Node { fn concat_bytes<S1, S2>(left: S1, right: S2, len: usize) -> Bytes where S1: Source, S2: Source, { - let mut buf = MutByteBuf::with_capacity(len); + let mut buf = AppendBuf::with_capacity(len as u32); buf.copy_from(left); buf.copy_from(right); - return buf.flip().into(); + return buf.into(); } fn depth_for_len(len: usize) -> u16 { diff --git a/src/imp/bytes/seq.rs b/src/imp/bytes/seq.rs index cdf2552ffcc9f704cec47ee49109b3ca52c3257a..08204c8d93cc6a3f65b795908e26ad76acc983ea 100644 --- a/src/imp/bytes/seq.rs +++ b/src/imp/bytes/seq.rs @@ -1,29 +1,23 @@ //! Immutable set of bytes sequential in memory. -use {alloc, MutBuf, Bytes}; -use buf::{MutByteBuf}; +use {MutBuf, Bytes}; +use buf::{AppendBuf}; use std::ops; use std::io::Cursor; +use std::sync::Arc; pub struct Seq { - mem: alloc::MemRef, - pos: u32, - len: u32, + mem: Arc<Box<[u8]>>, + pos: usize, + len: usize, } impl Seq { - pub fn from_slice(bytes: &[u8]) -> Bytes { - let mut buf = MutByteBuf::with_capacity(bytes.len()); - - buf.copy_from(bytes); - buf.flip().into() - } - /// Creates a new `SeqByteStr` from a `MemRef`, an offset, and a length. /// /// This function is unsafe as there are no guarantees that the given /// arguments are valid. - pub unsafe fn from_mem_ref(mem: alloc::MemRef, pos: u32, len: u32) -> Seq { + pub fn new(mem: Arc<Box<[u8]>>, pos: usize, len: usize) -> Seq { Seq { mem: mem, pos: pos, @@ -31,6 +25,13 @@ impl Seq { } } + pub fn from_slice(bytes: &[u8]) -> Bytes { + let mut buf = AppendBuf::with_capacity(bytes.len() as u32); + + buf.copy_from(bytes); + buf.into() + } + pub fn len(&self) -> usize { self.len as usize } @@ -40,12 +41,10 @@ impl Seq { assert!(begin <= end && end <= self.len(), "invalid range"); - let seq = unsafe { - Seq::from_mem_ref( - self.mem.clone(), - self.pos + begin as u32, - (end - begin) as u32) - }; + let seq = Seq::new( + self.mem.clone(), + self.pos + begin, + end - begin); Bytes { kind: Kind::Seq(seq) } } @@ -55,7 +54,7 @@ impl Seq { } pub fn as_slice(&self) -> &[u8] { - unsafe { &self.mem.bytes()[self.pos as usize..self.pos as usize + self.len as usize] } + &self.mem[self.pos..self.pos+self.len] } } @@ -64,7 +63,7 @@ impl ops::Index<usize> for Seq { fn index(&self, index: usize) -> &u8 { assert!(index < self.len()); - unsafe { self.mem.bytes().index(index + self.pos as usize) } + self.mem.index(index + self.pos as usize) } } diff --git a/src/imp/bytes/small.rs b/src/imp/bytes/small.rs index 2cc208540feb0a153ef155d1139e577bb35efa78..3f010fb1bcd1bf3db28e6b2fcc33b74104213864 100644 --- a/src/imp/bytes/small.rs +++ b/src/imp/bytes/small.rs @@ -58,7 +58,7 @@ impl Small { } pub fn slice(&self, begin: usize, end: usize) -> Bytes { - Bytes::from(&self.as_ref()[begin..end]) + Bytes::from_slice(&self.as_ref()[begin..end]) } pub fn len(&self) -> usize { diff --git a/src/imp/mod.rs b/src/imp/mod.rs index 4c5772bf97c2ef5197a995df3a8dbc7ca59a2c67..9e453f763f924088c78032c38c47fdd523252e5e 100644 --- a/src/imp/mod.rs +++ b/src/imp/mod.rs @@ -1,4 +1,5 @@ //! Used for internal code structure +pub mod alloc; pub mod buf; pub mod bytes; diff --git a/src/lib.rs b/src/lib.rs index cf153ea000354bd5626df52ef506fb73d8984982..a3c92bad6e821859b5b3d6e58af2111bd6f86198 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,8 +7,8 @@ extern crate byteorder; // Implementation in here mod imp; - -pub mod alloc; +// TODO: delete +mod alloc; pub use imp::buf::{Buf, MutBuf}; pub use imp::bytes::Bytes; @@ -25,15 +25,11 @@ pub mod buf { WriteExt, Fmt, }; + + pub use imp::buf::slice_buf::SliceBuf; pub use imp::buf::append::AppendBuf; pub use imp::buf::block::{BlockBuf, BlockBufCursor}; - pub use imp::buf::byte::{ByteBuf, MutByteBuf}; pub use imp::buf::ring::RingBuf; pub use imp::buf::take::Take; - pub use imp::bytes::BytesBuf; } - -use std::u32; - -const MAX_CAPACITY: usize = u32::MAX as usize; diff --git a/test/test.rs b/test/test.rs index 51588564098c76043f23a2e7856d1e2fceafc081..e05924c165c1447c5c7c0c3dd841d0c96ee63bdf 100644 --- a/test/test.rs +++ b/test/test.rs @@ -9,7 +9,7 @@ mod test_append; mod test_block; mod test_buf; mod test_buf_fill; -mod test_byte_buf; +mod test_slice_buf; mod test_mut_buf; mod test_ring; diff --git a/test/test_buf_fill.rs b/test/test_buf_fill.rs index fbde4c0cf5a26f2ab907397efd747b8b28f8b1d8..6083b9f817fb8dffb9e7f5a084c0f8ca7356d5d0 100644 --- a/test/test_buf_fill.rs +++ b/test/test_buf_fill.rs @@ -5,10 +5,10 @@ use std::io; #[test] pub fn test_readijng_buf_from_reader() { let mut reader = chunks(vec![b"foo", b"bar", b"baz"]); - let mut buf = MutByteBuf::with_capacity(1024); + let mut buf = AppendBuf::with_capacity(1024); assert_eq!(3, reader.read_buf(&mut buf).unwrap()); - assert_eq!(Bytes::from(&b"foo"), Bytes::from(buf.flip())); + assert_eq!(Bytes::from_slice(&b"foo"), Bytes::from(buf)); } fn chunks(chunks: Vec<&'static [u8]>) -> Chunked { diff --git a/test/test_bytes.rs b/test/test_bytes.rs index ceb0cfc0bbc904ffdbb359887e74fbea0623c884..2d1681ea54c150d434aa79c3a2bfbc512c635723 100644 --- a/test/test_bytes.rs +++ b/test/test_bytes.rs @@ -2,7 +2,7 @@ use bytes::*; #[test] pub fn test_debug_short_str_valid_ascii() { - let b = Bytes::from(b"abcdefghij234"); + let b = Bytes::from_slice(b"abcdefghij234"); let d = format!("{:?}", b); assert_eq!(d, "Bytes[len=13; abcdefghij234]"); @@ -33,7 +33,7 @@ pub fn test_debug_long_str_valid_ascii() { #[test] pub fn test_short_string_invalid_ascii() { - let b = Bytes::from(b"foo\x00bar\xFFbaz"); + let b = Bytes::from_slice(b"foo\x00bar\xFFbaz"); let d = format!("{:?}", b); println!("{:?}", b); diff --git a/test/test_seq.rs b/test/test_seq.rs index a7b80ddff142cdc72727d3b6e28e266aef6fc1f8..2fb9bde074b4cca265d561546d0b9c2e43e474a1 100644 --- a/test/test_seq.rs +++ b/test/test_seq.rs @@ -6,7 +6,7 @@ pub fn test_slice_round_trip() { let mut dst = vec![]; let src = gen_bytes(2000); - let s = Bytes::from(&src); + let s = Bytes::from(src.clone()); assert_eq!(2000, s.len()); s.buf().copy_to(&mut dst); @@ -17,7 +17,7 @@ pub fn test_slice_round_trip() { pub fn test_index() { let src = gen_bytes(2000); - let s = Bytes::from(&src); + let s = Bytes::from(src.clone()); for i in 0..2000 { assert_eq!(src[i], s[i]); @@ -27,6 +27,6 @@ pub fn test_index() { #[test] #[should_panic] pub fn test_index_out_of_range() { - let s = Bytes::from(&gen_bytes(2000)); + let s = Bytes::from(gen_bytes(2000)); let _ = s[2001]; } diff --git a/test/test_byte_buf.rs b/test/test_slice_buf.rs similarity index 58% rename from test/test_byte_buf.rs rename to test/test_slice_buf.rs index a4a7fa26932b99bba02aa8648d209136aa6c9557..cb00c0eaddcd00fbf0d744f970d623209607271c 100644 --- a/test/test_byte_buf.rs +++ b/test/test_slice_buf.rs @@ -1,51 +1,43 @@ use bytes::{Buf, MutBuf}; -use bytes::buf::MutByteBuf; +use bytes::buf::SliceBuf; #[test] pub fn test_initial_buf_empty() { - let buf = MutByteBuf::with_capacity(100); + let buf = SliceBuf::with_capacity(100); assert!(buf.capacity() == 128); - assert!(buf.remaining() == 128); - - let buf = buf.flip(); - - assert!(buf.remaining() == 0); - - let buf = buf.flip(); - - assert!(buf.remaining() == 128); + assert!(buf.remaining_write() == 128); + assert!(buf.remaining_read() == 0); } #[test] -pub fn test_byte_buf_bytes() { - let mut buf = MutByteBuf::with_capacity(32); +pub fn test_slice_buf_bytes() { + let mut buf = SliceBuf::with_capacity(32); + buf.copy_from(&b"hello "[..]); assert_eq!(&b"hello "[..], buf.bytes()); buf.copy_from(&b"world"[..]); assert_eq!(&b"hello world"[..], buf.bytes()); - let buf = buf.flip(); - assert_eq!(&b"hello world"[..], buf.bytes()); } #[test] pub fn test_byte_buf_read_write() { - let mut buf = MutByteBuf::with_capacity(32); + let mut buf = SliceBuf::with_capacity(32); buf.copy_from(&b"hello world"[..]); - assert_eq!(21, buf.remaining()); + assert_eq!(21, buf.remaining_write()); buf.copy_from(&b" goodbye"[..]); - assert_eq!(13, buf.remaining()); + assert_eq!(13, buf.remaining_write()); - let mut buf = buf.flip(); let mut dst = [0; 5]; - buf.mark(); + let pos = buf.position(); assert_eq!(5, buf.copy_to(&mut dst[..])); assert_eq!(b"hello", &dst); - buf.reset(); + + buf.set_position(pos); assert_eq!(5, buf.copy_to(&mut dst[..])); assert_eq!(b"hello", &dst); @@ -60,12 +52,16 @@ pub fn test_byte_buf_read_write() { assert_eq!(7, buf.copy_to(&mut dst[..])); assert_eq!(b"goodbye", &dst); - let mut buf = buf.resume(); - assert_eq!(13, buf.remaining()); + assert_eq!(13, buf.remaining_write()); buf.copy_from(&b" have fun"[..]); - assert_eq!(4, buf.remaining()); + assert_eq!(4, buf.remaining_write()); - let buf = buf.flip(); + assert_eq!(buf.bytes(), b" have fun"); + + buf.set_position(0); assert_eq!(buf.bytes(), b"hello world goodbye have fun"); + + buf.clear(); + assert_eq!(buf.bytes(), b""); } diff --git a/test/test_small.rs b/test/test_small.rs index 2f3ff88d912915b33e396f2c71679453fb96ddb5..48176f8ee3bb39d745153d38e98ca4e55ce1e595 100644 --- a/test/test_small.rs +++ b/test/test_small.rs @@ -6,7 +6,7 @@ pub fn test_slice_round_trip() { let mut dst = vec![]; let src = gen_bytes(3); - let s = Bytes::from(&src); + let s = Bytes::from(src.clone()); assert_eq!(3, s.len()); s.buf().copy_to(&mut dst); @@ -17,7 +17,7 @@ pub fn test_slice_round_trip() { pub fn test_index() { let src = gen_bytes(3); - let s = Bytes::from(&src); + let s = Bytes::from(src.clone()); for i in 0..3 { assert_eq!(src[i], s[i]); @@ -27,6 +27,6 @@ pub fn test_index() { #[test] #[should_panic] pub fn test_index_out_of_range() { - let s = Bytes::from(&gen_bytes(3)); + let s = Bytes::from(gen_bytes(3)); let _ = s[2001]; }