From 70deac1e0084011c8b908193bf5109872fe4990b Mon Sep 17 00:00:00 2001 From: Andre Bogus Date: Wed, 15 Mar 2017 07:24:43 +0100 Subject: [PATCH] fixed some clippy warnings in libcollections --- src/libcollections/binary_heap.rs | 8 +-- src/libcollections/borrow.rs | 4 +- src/libcollections/btree/map.rs | 30 ++++---- src/libcollections/btree/node.rs | 10 +-- src/libcollections/btree/set.rs | 2 +- src/libcollections/enum_set.rs | 2 +- src/libcollections/lib.rs | 2 +- src/libcollections/linked_list.rs | 12 ++-- src/libcollections/range.rs | 2 +- src/libcollections/slice.rs | 2 +- src/libcollections/str.rs | 4 +- src/libcollections/string.rs | 12 ++-- src/libcollections/vec.rs | 44 ++++++------ src/libcollections/vec_deque.rs | 111 +++++++++++++++--------------- 14 files changed, 119 insertions(+), 126 deletions(-) diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index a5a2f70492dc9..92f6374d1b67d 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -941,7 +941,7 @@ impl<'a, T> Hole<'a, T> { /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn get(&self, index: usize) -> &T { - debug_assert!(index != self.pos); + debug_assert_ne!(index, self.pos); debug_assert!(index < self.data.len()); self.data.get_unchecked(index) } @@ -951,7 +951,7 @@ impl<'a, T> Hole<'a, T> { /// Unsafe because index must be within the data slice and not equal to pos. #[inline] unsafe fn move_to(&mut self, index: usize) { - debug_assert!(index != self.pos); + debug_assert_ne!(index, self.pos); debug_assert!(index < self.data.len()); let index_ptr: *const _ = self.data.get_unchecked(index); let hole_ptr = self.data.get_unchecked_mut(self.pos); @@ -1194,8 +1194,8 @@ impl> SpecExtend for BinaryHeap { } impl SpecExtend> for BinaryHeap { - fn spec_extend(&mut self, ref mut other: BinaryHeap) { - self.append(other); + fn spec_extend(&mut self, mut other: BinaryHeap) { + self.append(&mut other); } } diff --git a/src/libcollections/borrow.rs b/src/libcollections/borrow.rs index 65056121f05a0..d625a94fe072d 100644 --- a/src/libcollections/borrow.rs +++ b/src/libcollections/borrow.rs @@ -254,7 +254,7 @@ impl<'a, B: ?Sized> fmt::Debug for Cow<'a, B> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - Borrowed(ref b) => fmt::Debug::fmt(b, f), + Borrowed(b) => fmt::Debug::fmt(b, f), Owned(ref o) => fmt::Debug::fmt(o, f), } } @@ -267,7 +267,7 @@ impl<'a, B: ?Sized> fmt::Display for Cow<'a, B> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - Borrowed(ref b) => fmt::Display::fmt(b, f), + Borrowed(b) => fmt::Display::fmt(b, f), Owned(ref o) => fmt::Display::fmt(o, f), } } diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index 7218d15ded5f8..b2dbe8ba17b61 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -141,7 +141,7 @@ pub struct BTreeMap { unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap { fn drop(&mut self) { unsafe { - for _ in ptr::read(self).into_iter() { + for _ in ptr::read(self) { } } } @@ -263,7 +263,7 @@ impl super::Recover for BTreeMap } } -/// An iterator over a BTreeMap's entries. +/// An iterator over a `BTreeMap`'s entries. #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, K: 'a, V: 'a> { range: Range<'a, K, V>, @@ -277,7 +277,7 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Iter<'a, K, V> { } } -/// A mutable iterator over a BTreeMap's entries. +/// A mutable iterator over a `BTreeMap`'s entries. #[stable(feature = "rust1", since = "1.0.0")] #[derive(Debug)] pub struct IterMut<'a, K: 'a, V: 'a> { @@ -285,7 +285,7 @@ pub struct IterMut<'a, K: 'a, V: 'a> { length: usize, } -/// An owning iterator over a BTreeMap's entries. +/// An owning iterator over a `BTreeMap`'s entries. #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { front: Handle, marker::Edge>, @@ -304,7 +304,7 @@ impl fmt::Debug for IntoIter { } } -/// An iterator over a BTreeMap's keys. +/// An iterator over a `BTreeMap`'s keys. #[stable(feature = "rust1", since = "1.0.0")] pub struct Keys<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, @@ -317,7 +317,7 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Keys<'a, K, V> { } } -/// An iterator over a BTreeMap's values. +/// An iterator over a `BTreeMap`'s values. #[stable(feature = "rust1", since = "1.0.0")] pub struct Values<'a, K: 'a, V: 'a> { inner: Iter<'a, K, V>, @@ -330,14 +330,14 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Values<'a, K, V> } } -/// A mutable iterator over a BTreeMap's values. +/// A mutable iterator over a `BTreeMap`'s values. #[stable(feature = "map_values_mut", since = "1.10.0")] #[derive(Debug)] pub struct ValuesMut<'a, K: 'a, V: 'a> { inner: IterMut<'a, K, V>, } -/// An iterator over a sub-range of BTreeMap's entries. +/// An iterator over a sub-range of `BTreeMap`'s entries. pub struct Range<'a, K: 'a, V: 'a> { front: Handle, K, V, marker::Leaf>, marker::Edge>, back: Handle, K, V, marker::Leaf>, marker::Edge>, @@ -350,7 +350,7 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Range<'a, K, V> } } -/// A mutable iterator over a sub-range of BTreeMap's entries. +/// A mutable iterator over a sub-range of `BTreeMap`'s entries. pub struct RangeMut<'a, K: 'a, V: 'a> { front: Handle, K, V, marker::Leaf>, marker::Edge>, back: Handle, K, V, marker::Leaf>, marker::Edge>, @@ -684,12 +684,12 @@ impl BTreeMap { #[stable(feature = "btree_append", since = "1.11.0")] pub fn append(&mut self, other: &mut Self) { // Do we have to append anything at all? - if other.len() == 0 { + if other.is_empty() { return; } // We can just swap `self` and `other` if `self` is empty. - if self.len() == 0 { + if self.is_empty() { mem::swap(self, other); return; } @@ -1901,7 +1901,7 @@ impl BTreeMap { /// assert_eq!(keys, [1, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn keys<'a>(&'a self) -> Keys<'a, K, V> { + pub fn keys(&self) -> Keys { Keys { inner: self.iter() } } @@ -1922,7 +1922,7 @@ impl BTreeMap { /// assert_eq!(values, ["hello", "goodbye"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn values<'a>(&'a self) -> Values<'a, K, V> { + pub fn values(&self) -> Values { Values { inner: self.iter() } } @@ -2361,8 +2361,8 @@ enum UnderflowResult<'a, K, V> { Stole(NodeRef, K, V, marker::Internal>), } -fn handle_underfull_node<'a, K, V>(node: NodeRef, K, V, marker::LeafOrInternal>) - -> UnderflowResult<'a, K, V> { +fn handle_underfull_node(node: NodeRef) + -> UnderflowResult { let parent = if let Ok(parent) = node.ascend() { parent } else { diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index e9bc29118d508..e4cee815315fc 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -347,7 +347,7 @@ impl NodeRef { } /// Temporarily takes out another, immutable reference to the same node. - fn reborrow<'a>(&'a self) -> NodeRef, K, V, Type> { + fn reborrow(&self) -> NodeRef { NodeRef { height: self.height, node: self.node, @@ -964,7 +964,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: fn insert_fit(&mut self, key: K, val: V, edge: Root) { // Necessary for correctness, but in an internal module debug_assert!(self.node.len() < CAPACITY); - debug_assert!(edge.height == self.node.height - 1); + debug_assert_eq!(edge.height, self.node.height - 1); unsafe { // This cast is a lie, but it allows us to reuse the key/value insertion logic. @@ -992,7 +992,7 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: -> InsertResult<'a, K, V, marker::Internal> { // Necessary for correctness, but this is an internal module - debug_assert!(edge.height == self.node.height - 1); + debug_assert_eq!(edge.height, self.node.height - 1); if self.node.len() < CAPACITY { self.insert_fit(key, val, edge); @@ -1488,8 +1488,8 @@ impl<'a, K, V> Handle, K, V, marker::LeafOrInternal>, ma let right_new_len = left_node.len() - left_new_len; let mut right_node = right.reborrow_mut(); - debug_assert!(right_node.len() == 0); - debug_assert!(left_node.height == right_node.height); + debug_assert_eq!(right_node.len(), 0); + debug_assert_eq!(left_node.height, right_node.height); let left_kv = left_node.reborrow_mut().into_kv_pointers_mut(); let right_kv = right_node.reborrow_mut().into_kv_pointers_mut(); diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index e3c990c80decf..3d1f6c4dd1398 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -1056,7 +1056,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> { fn next(&mut self) -> Option<&'a T> { loop { let o_cmp = match (self.a.peek(), self.b.peek()) { - (None, _) => None, + (None, _) | (_, None) => None, (Some(a1), Some(b1)) => Some(a1.cmp(b1)), }; diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index 602e874aaeec0..18df904c0f6b3 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -215,7 +215,7 @@ impl BitXor for EnumSet { } } -/// An iterator over an EnumSet +/// An iterator over an `EnumSet` pub struct Iter { index: usize, bits: usize, diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index f88bdd0ecf382..f0cc60f565fbb 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -10,7 +10,7 @@ //! Collection types. //! -//! See [std::collections](../std/collections/index.html) for a detailed discussion of +//! See [`std::collections`](../std/collections/index.html) for a detailed discussion of //! collections in Rust. #![crate_name = "collections"] diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index d4f77d625b361..0ac27cc33b4a4 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -95,7 +95,7 @@ pub struct IterMut<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IterMut") - .field(self.clone()) + .field(self) .finish() } } @@ -111,7 +111,7 @@ pub struct IntoIter { impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IntoIter") - .field(self.clone()) + .field(self) .finish() } } @@ -1020,8 +1020,8 @@ impl SpecExtend for LinkedList { } impl SpecExtend> for LinkedList { - fn spec_extend(&mut self, ref mut other: LinkedList) { - self.append(other); + fn spec_extend(&mut self, mut other: LinkedList) { + self.append(&mut other); } } @@ -1110,7 +1110,7 @@ pub struct FrontPlace<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for FrontPlace<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("FrontPlace") - .field(self.clone()) + .field(self) .finish() } } @@ -1165,7 +1165,7 @@ pub struct BackPlace<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for BackPlace<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("BackPlace") - .field(self.clone()) + .field(self) .finish() } } diff --git a/src/libcollections/range.rs b/src/libcollections/range.rs index e4b94a1d70ee4..2e367d1c54cbc 100644 --- a/src/libcollections/range.rs +++ b/src/libcollections/range.rs @@ -17,7 +17,7 @@ use core::ops::{RangeFull, Range, RangeTo, RangeFrom, RangeInclusive, RangeToInclusive}; use Bound::{self, Excluded, Included, Unbounded}; -/// **RangeArgument** is implemented by Rust's built-in range types, produced +/// **`RangeArgument`** is implemented by Rust's built-in range types, produced /// by range syntax like `..`, `a..`, `..b` or `c..d`. pub trait RangeArgument { /// Start index bound diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 2ea953df87357..31fbf36862799 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -1538,7 +1538,7 @@ unsafe fn merge(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F) } } -/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail +/// This merge sort borrows some (but not all) ideas from `TimSort`, which is described in detail /// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt). /// /// The algorithm identifies strictly descending and non-descending subsequences, which are called diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index e27c45773441a..d6295a13ac861 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -1702,7 +1702,7 @@ impl str { fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) { // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992 // for the definition of `Final_Sigma`. - debug_assert!('Σ'.len_utf8() == 2); + debug_assert_eq!('Σ'.len_utf8(), 2); let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) && !case_ignoreable_then_cased(from[i + 2..].chars()); to.push_str(if is_word_final { "ς" } else { "σ" }); @@ -1749,7 +1749,7 @@ impl str { pub fn to_uppercase(&self) -> String { let mut s = String::with_capacity(self.len()); s.extend(self.chars().flat_map(|c| c.to_uppercase())); - return s; + s } /// Escapes each char in `s` with `char::escape_debug`. diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 43323676ab459..972b9720282c4 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -533,7 +533,7 @@ impl String { /// assert_eq!("Hello �World", output); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn from_utf8_lossy<'a>(v: &'a [u8]) -> Cow<'a, str> { + pub fn from_utf8_lossy(v: &[u8]) -> Cow { let mut i; match str::from_utf8(v) { Ok(s) => return Cow::Borrowed(s), @@ -591,9 +591,9 @@ impl String { } 3 => { match (byte, safe_get(v, i, total)) { - (0xE0, 0xA0...0xBF) => (), - (0xE1...0xEC, 0x80...0xBF) => (), - (0xED, 0x80...0x9F) => (), + (0xE0, 0xA0...0xBF) | + (0xE1...0xEC, 0x80...0xBF) | + (0xED, 0x80...0x9F) | (0xEE...0xEF, 0x80...0xBF) => (), _ => { error!(); @@ -609,8 +609,8 @@ impl String { } 4 => { match (byte, safe_get(v, i, total)) { - (0xF0, 0x90...0xBF) => (), - (0xF1...0xF3, 0x80...0xBF) => (), + (0xF0, 0x90...0xBF) | + (0xF1...0xF3, 0x80...0xBF) | (0xF4, 0x80...0x8F) => (), _ => { error!(); diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index d38c9f6e1cf80..aba37d30e1459 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -2011,21 +2011,19 @@ impl Iterator for IntoIter { unsafe { if self.ptr as *const _ == self.end { None + } else if mem::size_of::() == 0 { + // purposefully don't use 'ptr.offset' because for + // vectors with 0-size elements this would return the + // same pointer. + self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T; + + // Use a non-null pointer value + Some(ptr::read(EMPTY as *mut T)) } else { - if mem::size_of::() == 0 { - // purposefully don't use 'ptr.offset' because for - // vectors with 0-size elements this would return the - // same pointer. - self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T; - - // Use a non-null pointer value - Some(ptr::read(EMPTY as *mut T)) - } else { - let old = self.ptr; - self.ptr = self.ptr.offset(1); - - Some(ptr::read(old)) - } + let old = self.ptr; + self.ptr = self.ptr.offset(1); + + Some(ptr::read(old)) } } } @@ -2056,18 +2054,16 @@ impl DoubleEndedIterator for IntoIter { unsafe { if self.end == self.ptr { None - } else { - if mem::size_of::() == 0 { - // See above for why 'ptr.offset' isn't used - self.end = arith_offset(self.end as *const i8, -1) as *mut T; + } else if mem::size_of::() == 0 { + // See above for why 'ptr.offset' isn't used + self.end = arith_offset(self.end as *const i8, -1) as *mut T; - // Use a non-null pointer value - Some(ptr::read(EMPTY as *mut T)) - } else { - self.end = self.end.offset(-1); + // Use a non-null pointer value + Some(ptr::read(EMPTY as *mut T)) + } else { + self.end = self.end.offset(-1); - Some(ptr::read(self.end)) - } + Some(ptr::read(self.end)) } } } diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index 1985be7f901c6..074fdcfb9bd51 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! VecDeque is a double-ended queue, which is implemented with the help of a +//! `VecDeque` is a double-ended queue, which is implemented with the help of a //! growing ring buffer. //! //! This queue has `O(1)` amortized inserts and removals from both ends of the @@ -357,7 +357,7 @@ impl VecDeque { } debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); - debug_assert!(self.cap().count_ones() == 1); + debug_assert_eq!(self.cap().count_ones(), 1); } } @@ -631,7 +631,7 @@ impl VecDeque { debug_assert!(self.head < self.cap()); debug_assert!(self.tail < self.cap()); - debug_assert!(self.cap().count_ones() == 1); + debug_assert_eq!(self.cap().count_ones(), 1); } } @@ -1614,7 +1614,7 @@ impl VecDeque { } } - return elem; + elem } /// Splits the collection into two at the given index. @@ -1847,7 +1847,7 @@ fn wrap_index(index: usize, size: usize) -> usize { index & (size - 1) } -/// Returns the two slices that cover the VecDeque's valid range +/// Returns the two slices that cover the `VecDeque`'s valid range trait RingSlices: Sized { fn slice(self, from: usize, to: usize) -> Self; fn split_at(self, i: usize) -> (Self, Self); @@ -1983,7 +1983,7 @@ pub struct IterMut<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for IterMut<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IterMut") - .field(&self.clone()) + .field(&self) .finish() } } @@ -2047,7 +2047,7 @@ impl<'a, T> ExactSizeIterator for IterMut<'a, T> { #[unstable(feature = "fused", issue = "35602")] impl<'a, T> FusedIterator for IterMut<'a, T> {} -/// A by-value VecDeque iterator +/// A by-value `VecDeque` iterator #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { @@ -2058,7 +2058,7 @@ pub struct IntoIter { impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IntoIter") - .field(&self.clone()) + .field(&self) .finish() } } @@ -2097,7 +2097,7 @@ impl ExactSizeIterator for IntoIter { #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} -/// A draining VecDeque iterator +/// A draining `VecDeque` iterator #[stable(feature = "drain", since = "1.6.0")] pub struct Drain<'a, T: 'a> { after_tail: usize, @@ -2110,7 +2110,7 @@ pub struct Drain<'a, T: 'a> { impl<'a, T: 'a + fmt::Debug> fmt::Debug for Drain<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("Drain") - .field(&self.clone()) + .field(&self) .finish() } } @@ -2426,57 +2426,54 @@ impl From> for Vec { // Need to move the ring to the front of the buffer, as vec will expect this. if other.is_contiguous() { ptr::copy(buf.offset(tail as isize), buf, len); - } else { - if (tail - head) >= cmp::min((cap - tail), head) { - // There is enough free space in the centre for the shortest block so we can - // do this in at most three copy moves. - if (cap - tail) > head { - // right hand block is the long one; move that enough for the left - ptr::copy(buf.offset(tail as isize), - buf.offset((tail - head) as isize), - cap - tail); - // copy left in the end - ptr::copy(buf, buf.offset((cap - head) as isize), head); - // shift the new thing to the start - ptr::copy(buf.offset((tail - head) as isize), buf, len); - } else { - // left hand block is the long one, we can do it in two! - ptr::copy(buf, buf.offset((cap - tail) as isize), head); - ptr::copy(buf.offset(tail as isize), buf, cap - tail); - } + } else if (tail - head) >= cmp::min((cap - tail), head) { + // There is enough free space in the centre for the shortest block so we can + // do this in at most three copy moves. + if (cap - tail) > head { + // right hand block is the long one; move that enough for the left + ptr::copy(buf.offset(tail as isize), + buf.offset((tail - head) as isize), + cap - tail); + // copy left in the end + ptr::copy(buf, buf.offset((cap - head) as isize), head); + // shift the new thing to the start + ptr::copy(buf.offset((tail - head) as isize), buf, len); } else { - // Need to use N swaps to move the ring - // We can use the space at the end of the ring as a temp store - - let mut left_edge: usize = 0; - let mut right_edge: usize = tail; - - // The general problem looks like this - // GHIJKLM...ABCDEF - before any swaps - // ABCDEFM...GHIJKL - after 1 pass of swaps - // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store - // - then restart the algorithm with a new (smaller) store - // Sometimes the temp store is reached when the right edge is at the end - // of the buffer - this means we've hit the right order with fewer swaps! - // E.g - // EF..ABCD - // ABCDEF.. - after four only swaps we've finished - - while left_edge < len && right_edge != cap { - let mut right_offset = 0; - for i in left_edge..right_edge { - right_offset = (i - left_edge) % (cap - right_edge); - let src: isize = (right_edge + right_offset) as isize; - ptr::swap(buf.offset(i as isize), buf.offset(src)); - } - let n_ops = right_edge - left_edge; - left_edge += n_ops; - right_edge += right_offset + 1; - + // left hand block is the long one, we can do it in two! + ptr::copy(buf, buf.offset((cap - tail) as isize), head); + ptr::copy(buf.offset(tail as isize), buf, cap - tail); + } + } else { + // Need to use N swaps to move the ring + // We can use the space at the end of the ring as a temp store + + let mut left_edge: usize = 0; + let mut right_edge: usize = tail; + + // The general problem looks like this + // GHIJKLM...ABCDEF - before any swaps + // ABCDEFM...GHIJKL - after 1 pass of swaps + // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store + // - then restart the algorithm with a new (smaller) store + // Sometimes the temp store is reached when the right edge is at the end + // of the buffer - this means we've hit the right order with fewer swaps! + // E.g + // EF..ABCD + // ABCDEF.. - after four only swaps we've finished + + while left_edge < len && right_edge != cap { + let mut right_offset = 0; + for i in left_edge..right_edge { + right_offset = (i - left_edge) % (cap - right_edge); + let src: isize = (right_edge + right_offset) as isize; + ptr::swap(buf.offset(i as isize), buf.offset(src)); } + let n_ops = right_edge - left_edge; + left_edge += n_ops; + right_edge += right_offset + 1; } - } + let out = Vec::from_raw_parts(buf, len, cap); mem::forget(other); out