The Final Code

  1. #![feature(ptr_internals)]
  2. #![feature(allocator_api)]
  3. #![feature(alloc_layout_extra)]
  4. use std::ptr::{Unique, NonNull, self};
  5. use std::mem;
  6. use std::ops::{Deref, DerefMut};
  7. use std::marker::PhantomData;
  8. use std::alloc::{Alloc, GlobalAlloc, Layout, Global, handle_alloc_error};
  9. struct RawVec<T> {
  10. ptr: Unique<T>,
  11. cap: usize,
  12. }
  13. impl<T> RawVec<T> {
  14. fn new() -> Self {
  15. // !0 is usize::MAX. This branch should be stripped at compile time.
  16. let cap = if mem::size_of::<T>() == 0 { !0 } else { 0 };
  17. // Unique::empty() doubles as "unallocated" and "zero-sized allocation"
  18. RawVec { ptr: Unique::empty(), cap: cap }
  19. }
  20. fn grow(&mut self) {
  21. unsafe {
  22. let elem_size = mem::size_of::<T>();
  23. // since we set the capacity to usize::MAX when elem_size is
  24. // 0, getting to here necessarily means the Vec is overfull.
  25. assert!(elem_size != 0, "capacity overflow");
  26. let (new_cap, ptr) = if self.cap == 0 {
  27. let ptr = Global.alloc(Layout::array::<T>(1).unwrap());
  28. (1, ptr)
  29. } else {
  30. let new_cap = 2 * self.cap;
  31. let c: NonNull<T> = self.ptr.into();
  32. let ptr = Global.realloc(c.cast(),
  33. Layout::array::<T>(self.cap).unwrap(),
  34. Layout::array::<T>(new_cap).unwrap().size());
  35. (new_cap, ptr)
  36. };
  37. // If allocate or reallocate fail, oom
  38. if ptr.is_err() {
  39. handle_alloc_error(Layout::from_size_align_unchecked(
  40. new_cap * elem_size,
  41. mem::align_of::<T>(),
  42. ))
  43. }
  44. let ptr = ptr.unwrap();
  45. self.ptr = Unique::new_unchecked(ptr.as_ptr() as *mut _);
  46. self.cap = new_cap;
  47. }
  48. }
  49. }
  50. impl<T> Drop for RawVec<T> {
  51. fn drop(&mut self) {
  52. let elem_size = mem::size_of::<T>();
  53. if self.cap != 0 && elem_size != 0 {
  54. unsafe {
  55. let c: NonNull<T> = self.ptr.into();
  56. Global.dealloc(c.cast(),
  57. Layout::array::<T>(self.cap).unwrap());
  58. }
  59. }
  60. }
  61. }
  62. pub struct Vec<T> {
  63. buf: RawVec<T>,
  64. len: usize,
  65. }
  66. impl<T> Vec<T> {
  67. fn ptr(&self) -> *mut T { self.buf.ptr.as_ptr() }
  68. fn cap(&self) -> usize { self.buf.cap }
  69. pub fn new() -> Self {
  70. Vec { buf: RawVec::new(), len: 0 }
  71. }
  72. pub fn push(&mut self, elem: T) {
  73. if self.len == self.cap() { self.buf.grow(); }
  74. unsafe {
  75. ptr::write(self.ptr().offset(self.len as isize), elem);
  76. }
  77. // Can't fail, we'll OOM first.
  78. self.len += 1;
  79. }
  80. pub fn pop(&mut self) -> Option<T> {
  81. if self.len == 0 {
  82. None
  83. } else {
  84. self.len -= 1;
  85. unsafe {
  86. Some(ptr::read(self.ptr().offset(self.len as isize)))
  87. }
  88. }
  89. }
  90. pub fn insert(&mut self, index: usize, elem: T) {
  91. assert!(index <= self.len, "index out of bounds");
  92. if self.cap() == self.len { self.buf.grow(); }
  93. unsafe {
  94. if index < self.len {
  95. ptr::copy(self.ptr().offset(index as isize),
  96. self.ptr().offset(index as isize + 1),
  97. self.len - index);
  98. }
  99. ptr::write(self.ptr().offset(index as isize), elem);
  100. self.len += 1;
  101. }
  102. }
  103. pub fn remove(&mut self, index: usize) -> T {
  104. assert!(index < self.len, "index out of bounds");
  105. unsafe {
  106. self.len -= 1;
  107. let result = ptr::read(self.ptr().offset(index as isize));
  108. ptr::copy(self.ptr().offset(index as isize + 1),
  109. self.ptr().offset(index as isize),
  110. self.len - index);
  111. result
  112. }
  113. }
  114. pub fn into_iter(self) -> IntoIter<T> {
  115. unsafe {
  116. let iter = RawValIter::new(&self);
  117. let buf = ptr::read(&self.buf);
  118. mem::forget(self);
  119. IntoIter {
  120. iter: iter,
  121. _buf: buf,
  122. }
  123. }
  124. }
  125. pub fn drain(&mut self) -> Drain<T> {
  126. unsafe {
  127. let iter = RawValIter::new(&self);
  128. // this is a mem::forget safety thing. If Drain is forgotten, we just
  129. // leak the whole Vec's contents. Also we need to do this *eventually*
  130. // anyway, so why not do it now?
  131. self.len = 0;
  132. Drain {
  133. iter: iter,
  134. vec: PhantomData,
  135. }
  136. }
  137. }
  138. }
  139. impl<T> Drop for Vec<T> {
  140. fn drop(&mut self) {
  141. while let Some(_) = self.pop() {}
  142. // allocation is handled by RawVec
  143. }
  144. }
  145. impl<T> Deref for Vec<T> {
  146. type Target = [T];
  147. fn deref(&self) -> &[T] {
  148. unsafe {
  149. ::std::slice::from_raw_parts(self.ptr(), self.len)
  150. }
  151. }
  152. }
  153. impl<T> DerefMut for Vec<T> {
  154. fn deref_mut(&mut self) -> &mut [T] {
  155. unsafe {
  156. ::std::slice::from_raw_parts_mut(self.ptr(), self.len)
  157. }
  158. }
  159. }
  160. struct RawValIter<T> {
  161. start: *const T,
  162. end: *const T,
  163. }
  164. impl<T> RawValIter<T> {
  165. unsafe fn new(slice: &[T]) -> Self {
  166. RawValIter {
  167. start: slice.as_ptr(),
  168. end: if mem::size_of::<T>() == 0 {
  169. ((slice.as_ptr() as usize) + slice.len()) as *const _
  170. } else if slice.len() == 0 {
  171. slice.as_ptr()
  172. } else {
  173. slice.as_ptr().offset(slice.len() as isize)
  174. }
  175. }
  176. }
  177. }
  178. impl<T> Iterator for RawValIter<T> {
  179. type Item = T;
  180. fn next(&mut self) -> Option<T> {
  181. if self.start == self.end {
  182. None
  183. } else {
  184. unsafe {
  185. let result = ptr::read(self.start);
  186. self.start = if mem::size_of::<T>() == 0 {
  187. (self.start as usize + 1) as *const _
  188. } else {
  189. self.start.offset(1)
  190. };
  191. Some(result)
  192. }
  193. }
  194. }
  195. fn size_hint(&self) -> (usize, Option<usize>) {
  196. let elem_size = mem::size_of::<T>();
  197. let len = (self.end as usize - self.start as usize)
  198. / if elem_size == 0 { 1 } else { elem_size };
  199. (len, Some(len))
  200. }
  201. }
  202. impl<T> DoubleEndedIterator for RawValIter<T> {
  203. fn next_back(&mut self) -> Option<T> {
  204. if self.start == self.end {
  205. None
  206. } else {
  207. unsafe {
  208. self.end = if mem::size_of::<T>() == 0 {
  209. (self.end as usize - 1) as *const _
  210. } else {
  211. self.end.offset(-1)
  212. };
  213. Some(ptr::read(self.end))
  214. }
  215. }
  216. }
  217. }
  218. pub struct IntoIter<T> {
  219. _buf: RawVec<T>, // we don't actually care about this. Just need it to live.
  220. iter: RawValIter<T>,
  221. }
  222. impl<T> Iterator for IntoIter<T> {
  223. type Item = T;
  224. fn next(&mut self) -> Option<T> { self.iter.next() }
  225. fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
  226. }
  227. impl<T> DoubleEndedIterator for IntoIter<T> {
  228. fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
  229. }
  230. impl<T> Drop for IntoIter<T> {
  231. fn drop(&mut self) {
  232. for _ in &mut *self {}
  233. }
  234. }
  235. pub struct Drain<'a, T: 'a> {
  236. vec: PhantomData<&'a mut Vec<T>>,
  237. iter: RawValIter<T>,
  238. }
  239. impl<'a, T> Iterator for Drain<'a, T> {
  240. type Item = T;
  241. fn next(&mut self) -> Option<T> { self.iter.next() }
  242. fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
  243. }
  244. impl<'a, T> DoubleEndedIterator for Drain<'a, T> {
  245. fn next_back(&mut self) -> Option<T> { self.iter.next_back() }
  246. }
  247. impl<'a, T> Drop for Drain<'a, T> {
  248. fn drop(&mut self) {
  249. // pre-drain the iter
  250. for _ in &mut self.iter {}
  251. }
  252. }
  253. # fn main() {
  254. # tests::create_push_pop();
  255. # tests::iter_test();
  256. # tests::test_drain();
  257. # tests::test_zst();
  258. # println!("All tests finished OK");
  259. # }
  260. # mod tests {
  261. # use super::*;
  262. # pub fn create_push_pop() {
  263. # let mut v = Vec::new();
  264. # v.push(1);
  265. # assert_eq!(1, v.len());
  266. # assert_eq!(1, v[0]);
  267. # for i in v.iter_mut() {
  268. # *i += 1;
  269. # }
  270. # v.insert(0, 5);
  271. # let x = v.pop();
  272. # assert_eq!(Some(2), x);
  273. # assert_eq!(1, v.len());
  274. # v.push(10);
  275. # let x = v.remove(0);
  276. # assert_eq!(5, x);
  277. # assert_eq!(1, v.len());
  278. # }
  279. #
  280. # pub fn iter_test() {
  281. # let mut v = Vec::new();
  282. # for i in 0..10 {
  283. # v.push(Box::new(i))
  284. # }
  285. # let mut iter = v.into_iter();
  286. # let first = iter.next().unwrap();
  287. # let last = iter.next_back().unwrap();
  288. # drop(iter);
  289. # assert_eq!(0, *first);
  290. # assert_eq!(9, *last);
  291. # }
  292. #
  293. # pub fn test_drain() {
  294. # let mut v = Vec::new();
  295. # for i in 0..10 {
  296. # v.push(Box::new(i))
  297. # }
  298. # {
  299. # let mut drain = v.drain();
  300. # let first = drain.next().unwrap();
  301. # let last = drain.next_back().unwrap();
  302. # assert_eq!(0, *first);
  303. # assert_eq!(9, *last);
  304. # }
  305. # assert_eq!(0, v.len());
  306. # v.push(Box::new(1));
  307. # assert_eq!(1, *v.pop().unwrap());
  308. # }
  309. #
  310. # pub fn test_zst() {
  311. # let mut v = Vec::new();
  312. # for _i in 0..10 {
  313. # v.push(())
  314. # }
  315. #
  316. # let mut count = 0;
  317. #
  318. # for _ in v.into_iter() {
  319. # count += 1
  320. # }
  321. #
  322. # assert_eq!(10, count);
  323. # }
  324. # }