1 // Copyright 2017 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
6 #define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
7
8 #include <algorithm>
9 #include <cstddef>
10 #include <iterator>
11 #include <type_traits>
12 #include <utility>
13
14 #include "base/containers/vector_buffer.h"
15 #include "base/logging.h"
16 #include "base/macros.h"
17 #include "base/template_util.h"
18
19 // base::circular_deque is similar to std::deque. Unlike std::deque, the
20 // storage is provided in a flat circular buffer conceptually similar to a
21 // vector. The beginning and end will wrap around as necessary so that
22 // pushes and pops will be constant time as long as a capacity expansion is
23 // not required.
24 //
25 // The API should be identical to std::deque with the following differences:
26 //
27 // - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
28 // iterators.
29 //
30 // - Insertions may resize the vector and so are not constant time (std::deque
31 // guarantees constant time for insertions at the ends).
32 //
33 // - Container-wide comparisons are not implemented. If you want to compare
34 // two containers, use an algorithm so the expensive iteration is explicit.
35 //
36 // If you want a similar container with only a queue API, use base::queue in
37 // base/containers/queue.h.
38 //
39 // Constructors:
40 // circular_deque();
41 // circular_deque(size_t count);
42 // circular_deque(size_t count, const T& value);
43 // circular_deque(InputIterator first, InputIterator last);
44 // circular_deque(const circular_deque&);
45 // circular_deque(circular_deque&&);
46 // circular_deque(std::initializer_list<value_type>);
47 //
48 // Assignment functions:
49 // circular_deque& operator=(const circular_deque&);
50 // circular_deque& operator=(circular_deque&&);
51 // circular_deque& operator=(std::initializer_list<T>);
52 // void assign(size_t count, const T& value);
53 // void assign(InputIterator first, InputIterator last);
54 // void assign(std::initializer_list<T> value);
55 //
56 // Random accessors:
57 // T& at(size_t);
58 // const T& at(size_t) const;
59 // T& operator[](size_t);
60 // const T& operator[](size_t) const;
61 //
62 // End accessors:
63 // T& front();
64 // const T& front() const;
65 // T& back();
66 // const T& back() const;
67 //
68 // Iterator functions:
69 // iterator begin();
70 // const_iterator begin() const;
71 // const_iterator cbegin() const;
72 // iterator end();
73 // const_iterator end() const;
74 // const_iterator cend() const;
75 // reverse_iterator rbegin();
76 // const_reverse_iterator rbegin() const;
77 // const_reverse_iterator crbegin() const;
78 // reverse_iterator rend();
79 // const_reverse_iterator rend() const;
80 // const_reverse_iterator crend() const;
81 //
82 // Memory management:
83 // void reserve(size_t); // SEE IMPLEMENTATION FOR SOME GOTCHAS.
84 // size_t capacity() const;
85 // void shrink_to_fit();
86 //
87 // Size management:
88 // void clear();
89 // bool empty() const;
90 // size_t size() const;
91 // void resize(size_t);
92 // void resize(size_t count, const T& value);
93 //
94 // Positional insert and erase:
95 // void insert(const_iterator pos, size_type count, const T& value);
96 // void insert(const_iterator pos,
97 // InputIterator first, InputIterator last);
98 // iterator insert(const_iterator pos, const T& value);
99 // iterator insert(const_iterator pos, T&& value);
100 // iterator emplace(const_iterator pos, Args&&... args);
101 // iterator erase(const_iterator pos);
102 // iterator erase(const_iterator first, const_iterator last);
103 //
104 // End insert and erase:
105 // void push_front(const T&);
106 // void push_front(T&&);
107 // void push_back(const T&);
108 // void push_back(T&&);
109 // T& emplace_front(Args&&...);
110 // T& emplace_back(Args&&...);
111 // void pop_front();
112 // void pop_back();
113 //
114 // General:
115 // void swap(circular_deque&);
116
117 namespace base {
118
119 template <class T>
120 class circular_deque;
121
122 namespace internal {
123
124 // Start allocating nonempty buffers with this many entries. This is the
125 // external capacity so the internal buffer will be one larger (= 4) which is
126 // more even for the allocator. See the descriptions of internal vs. external
127 // capacity on the comment above the buffer_ variable below.
128 constexpr size_t kCircularBufferInitialCapacity = 3;
129
130 template <typename T>
131 class circular_deque_const_iterator {
132 public:
133 using difference_type = std::ptrdiff_t;
134 using value_type = T;
135 using pointer = const T*;
136 using reference = const T&;
137 using iterator_category = std::random_access_iterator_tag;
138
circular_deque_const_iterator()139 circular_deque_const_iterator() : parent_deque_(nullptr), index_(0) {
140 #if DCHECK_IS_ON()
141 created_generation_ = 0;
142 #endif // DCHECK_IS_ON()
143 }
144
145 // Dereferencing.
146 const T& operator*() const {
147 CheckUnstableUsage();
148 parent_deque_->CheckValidIndex(index_);
149 return parent_deque_->buffer_[index_];
150 }
151 const T* operator->() const {
152 CheckUnstableUsage();
153 parent_deque_->CheckValidIndex(index_);
154 return &parent_deque_->buffer_[index_];
155 }
156 const value_type& operator[](difference_type i) const { return *(*this + i); }
157
158 // Increment and decrement.
159 circular_deque_const_iterator& operator++() {
160 Increment();
161 return *this;
162 }
163 circular_deque_const_iterator operator++(int) {
164 circular_deque_const_iterator ret = *this;
165 Increment();
166 return ret;
167 }
168 circular_deque_const_iterator& operator--() {
169 Decrement();
170 return *this;
171 }
172 circular_deque_const_iterator operator--(int) {
173 circular_deque_const_iterator ret = *this;
174 Decrement();
175 return ret;
176 }
177
178 // Random access mutation.
179 friend circular_deque_const_iterator operator+(
180 const circular_deque_const_iterator& iter,
181 difference_type offset) {
182 circular_deque_const_iterator ret = iter;
183 ret.Add(offset);
184 return ret;
185 }
186 circular_deque_const_iterator& operator+=(difference_type offset) {
187 Add(offset);
188 return *this;
189 }
190 friend circular_deque_const_iterator operator-(
191 const circular_deque_const_iterator& iter,
192 difference_type offset) {
193 circular_deque_const_iterator ret = iter;
194 ret.Add(-offset);
195 return ret;
196 }
197 circular_deque_const_iterator& operator-=(difference_type offset) {
198 Add(-offset);
199 return *this;
200 }
201
202 friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
203 const circular_deque_const_iterator& rhs) {
204 lhs.CheckComparable(rhs);
205 return lhs.OffsetFromBegin() - rhs.OffsetFromBegin();
206 }
207
208 // Comparisons.
209 friend bool operator==(const circular_deque_const_iterator& lhs,
210 const circular_deque_const_iterator& rhs) {
211 lhs.CheckComparable(rhs);
212 return lhs.index_ == rhs.index_;
213 }
214 friend bool operator!=(const circular_deque_const_iterator& lhs,
215 const circular_deque_const_iterator& rhs) {
216 return !(lhs == rhs);
217 }
218 friend bool operator<(const circular_deque_const_iterator& lhs,
219 const circular_deque_const_iterator& rhs) {
220 lhs.CheckComparable(rhs);
221 return lhs.OffsetFromBegin() < rhs.OffsetFromBegin();
222 }
223 friend bool operator<=(const circular_deque_const_iterator& lhs,
224 const circular_deque_const_iterator& rhs) {
225 return !(lhs > rhs);
226 }
227 friend bool operator>(const circular_deque_const_iterator& lhs,
228 const circular_deque_const_iterator& rhs) {
229 lhs.CheckComparable(rhs);
230 return lhs.OffsetFromBegin() > rhs.OffsetFromBegin();
231 }
232 friend bool operator>=(const circular_deque_const_iterator& lhs,
233 const circular_deque_const_iterator& rhs) {
234 return !(lhs < rhs);
235 }
236
237 protected:
238 friend class circular_deque<T>;
239
circular_deque_const_iterator(const circular_deque<T> * parent,size_t index)240 circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
241 : parent_deque_(parent), index_(index) {
242 #if DCHECK_IS_ON()
243 created_generation_ = parent->generation_;
244 #endif // DCHECK_IS_ON()
245 }
246
247 // Returns the offset from the beginning index of the buffer to the current
248 // item.
OffsetFromBegin()249 size_t OffsetFromBegin() const {
250 if (index_ >= parent_deque_->begin_)
251 return index_ - parent_deque_->begin_; // On the same side as begin.
252 return parent_deque_->buffer_.capacity() - parent_deque_->begin_ + index_;
253 }
254
255 // Most uses will be ++ and -- so use a simplified implementation.
Increment()256 void Increment() {
257 CheckUnstableUsage();
258 parent_deque_->CheckValidIndex(index_);
259 index_++;
260 if (index_ == parent_deque_->buffer_.capacity())
261 index_ = 0;
262 }
Decrement()263 void Decrement() {
264 CheckUnstableUsage();
265 parent_deque_->CheckValidIndexOrEnd(index_);
266 if (index_ == 0)
267 index_ = parent_deque_->buffer_.capacity() - 1;
268 else
269 index_--;
270 }
Add(difference_type delta)271 void Add(difference_type delta) {
272 CheckUnstableUsage();
273 #if DCHECK_IS_ON()
274 if (delta <= 0)
275 parent_deque_->CheckValidIndexOrEnd(index_);
276 else
277 parent_deque_->CheckValidIndex(index_);
278 #endif
279 // It should be valid to add 0 to any iterator, even if the container is
280 // empty and the iterator points to end(). The modulo below will divide
281 // by 0 if the buffer capacity is empty, so it's important to check for
282 // this case explicitly.
283 if (delta == 0)
284 return;
285
286 difference_type new_offset = OffsetFromBegin() + delta;
287 DCHECK(new_offset >= 0 &&
288 new_offset <= static_cast<difference_type>(parent_deque_->size()));
289 index_ = (new_offset + parent_deque_->begin_) %
290 parent_deque_->buffer_.capacity();
291 }
292
293 #if DCHECK_IS_ON()
CheckUnstableUsage()294 void CheckUnstableUsage() const {
295 DCHECK(parent_deque_);
296 // Since circular_deque doesn't guarantee stability, any attempt to
297 // dereference this iterator after a mutation (i.e. the generation doesn't
298 // match the original) in the container is illegal.
299 DCHECK_EQ(created_generation_, parent_deque_->generation_)
300 << "circular_deque iterator dereferenced after mutation.";
301 }
CheckComparable(const circular_deque_const_iterator & other)302 void CheckComparable(const circular_deque_const_iterator& other) const {
303 DCHECK_EQ(parent_deque_, other.parent_deque_);
304 // Since circular_deque doesn't guarantee stability, two iterators that
305 // are compared must have been generated without mutating the container.
306 // If this fires, the container was mutated between generating the two
307 // iterators being compared.
308 DCHECK_EQ(created_generation_, other.created_generation_);
309 }
310 #else
CheckUnstableUsage()311 inline void CheckUnstableUsage() const {}
CheckComparable(const circular_deque_const_iterator &)312 inline void CheckComparable(const circular_deque_const_iterator&) const {}
313 #endif // DCHECK_IS_ON()
314
315 const circular_deque<T>* parent_deque_;
316 size_t index_;
317
318 #if DCHECK_IS_ON()
319 // The generation of the parent deque when this iterator was created. The
320 // container will update the generation for every modification so we can
321 // test if the container was modified by comparing them.
322 uint64_t created_generation_;
323 #endif // DCHECK_IS_ON()
324 };
325
326 template <typename T>
327 class circular_deque_iterator : public circular_deque_const_iterator<T> {
328 using base = circular_deque_const_iterator<T>;
329
330 public:
331 friend class circular_deque<T>;
332
333 using difference_type = std::ptrdiff_t;
334 using value_type = T;
335 using pointer = T*;
336 using reference = T&;
337 using iterator_category = std::random_access_iterator_tag;
338
339 // Expose the base class' constructor.
circular_deque_iterator()340 circular_deque_iterator() : circular_deque_const_iterator<T>() {}
341
342 // Dereferencing.
343 T& operator*() const { return const_cast<T&>(base::operator*()); }
344 T* operator->() const { return const_cast<T*>(base::operator->()); }
345 T& operator[](difference_type i) {
346 return const_cast<T&>(base::operator[](i));
347 }
348
349 // Random access mutation.
350 friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
351 difference_type offset) {
352 circular_deque_iterator ret = iter;
353 ret.Add(offset);
354 return ret;
355 }
356 circular_deque_iterator& operator+=(difference_type offset) {
357 base::Add(offset);
358 return *this;
359 }
360 friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
361 difference_type offset) {
362 circular_deque_iterator ret = iter;
363 ret.Add(-offset);
364 return ret;
365 }
366 circular_deque_iterator& operator-=(difference_type offset) {
367 base::Add(-offset);
368 return *this;
369 }
370
371 // Increment and decrement.
372 circular_deque_iterator& operator++() {
373 base::Increment();
374 return *this;
375 }
376 circular_deque_iterator operator++(int) {
377 circular_deque_iterator ret = *this;
378 base::Increment();
379 return ret;
380 }
381 circular_deque_iterator& operator--() {
382 base::Decrement();
383 return *this;
384 }
385 circular_deque_iterator operator--(int) {
386 circular_deque_iterator ret = *this;
387 base::Decrement();
388 return ret;
389 }
390
391 private:
circular_deque_iterator(const circular_deque<T> * parent,size_t index)392 circular_deque_iterator(const circular_deque<T>* parent, size_t index)
393 : circular_deque_const_iterator<T>(parent, index) {}
394 };
395
396 } // namespace internal
397
398 template <typename T>
399 class circular_deque {
400 private:
401 using VectorBuffer = internal::VectorBuffer<T>;
402
403 public:
404 using value_type = T;
405 using size_type = std::size_t;
406 using difference_type = std::ptrdiff_t;
407 using reference = value_type&;
408 using const_reference = const value_type&;
409 using pointer = value_type*;
410 using const_pointer = const value_type*;
411
412 using iterator = internal::circular_deque_iterator<T>;
413 using const_iterator = internal::circular_deque_const_iterator<T>;
414 using reverse_iterator = std::reverse_iterator<iterator>;
415 using const_reverse_iterator = std::reverse_iterator<const_iterator>;
416
417 // ---------------------------------------------------------------------------
418 // Constructor
419
420 constexpr circular_deque() = default;
421
422 // Constructs with |count| copies of |value| or default constructed version.
circular_deque(size_type count)423 circular_deque(size_type count) { resize(count); }
circular_deque(size_type count,const T & value)424 circular_deque(size_type count, const T& value) { resize(count, value); }
425
426 // Range constructor.
427 template <class InputIterator>
circular_deque(InputIterator first,InputIterator last)428 circular_deque(InputIterator first, InputIterator last) {
429 assign(first, last);
430 }
431
432 // Copy/move.
circular_deque(const circular_deque & other)433 circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
434 assign(other.begin(), other.end());
435 }
circular_deque(circular_deque && other)436 circular_deque(circular_deque&& other) noexcept
437 : buffer_(std::move(other.buffer_)),
438 begin_(other.begin_),
439 end_(other.end_) {
440 other.begin_ = 0;
441 other.end_ = 0;
442 }
443
circular_deque(std::initializer_list<value_type> init)444 circular_deque(std::initializer_list<value_type> init) { assign(init); }
445
~circular_deque()446 ~circular_deque() { DestructRange(begin_, end_); }
447
448 // ---------------------------------------------------------------------------
449 // Assignments.
450 //
451 // All of these may invalidate iterators and references.
452
453 circular_deque& operator=(const circular_deque& other) {
454 if (&other == this)
455 return *this;
456
457 reserve(other.size());
458 assign(other.begin(), other.end());
459 return *this;
460 }
461 circular_deque& operator=(circular_deque&& other) noexcept {
462 if (&other == this)
463 return *this;
464
465 // We're about to overwrite the buffer, so don't free it in clear to
466 // avoid doing it twice.
467 ClearRetainCapacity();
468 buffer_ = std::move(other.buffer_);
469 begin_ = other.begin_;
470 end_ = other.end_;
471
472 other.begin_ = 0;
473 other.end_ = 0;
474
475 IncrementGeneration();
476 return *this;
477 }
478 circular_deque& operator=(std::initializer_list<value_type> ilist) {
479 reserve(ilist.size());
480 assign(std::begin(ilist), std::end(ilist));
481 return *this;
482 }
483
assign(size_type count,const value_type & value)484 void assign(size_type count, const value_type& value) {
485 ClearRetainCapacity();
486 reserve(count);
487 for (size_t i = 0; i < count; i++)
488 emplace_back(value);
489 IncrementGeneration();
490 }
491
492 // This variant should be enabled only when InputIterator is an iterator.
493 template <typename InputIterator>
494 typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
495 void>::type
assign(InputIterator first,InputIterator last)496 assign(InputIterator first, InputIterator last) {
497 // Possible future enhancement, dispatch on iterator tag type. For forward
498 // iterators we can use std::difference to preallocate the space required
499 // and only do one copy.
500 ClearRetainCapacity();
501 for (; first != last; ++first)
502 emplace_back(*first);
503 IncrementGeneration();
504 }
505
assign(std::initializer_list<value_type> value)506 void assign(std::initializer_list<value_type> value) {
507 reserve(std::distance(value.begin(), value.end()));
508 assign(value.begin(), value.end());
509 }
510
511 // ---------------------------------------------------------------------------
512 // Accessors.
513 //
514 // Since this class assumes no exceptions, at() and operator[] are equivalent.
515
at(size_type i)516 const value_type& at(size_type i) const {
517 DCHECK(i < size());
518 size_t right_size = buffer_.capacity() - begin_;
519 if (begin_ <= end_ || i < right_size)
520 return buffer_[begin_ + i];
521 return buffer_[i - right_size];
522 }
at(size_type i)523 value_type& at(size_type i) {
524 return const_cast<value_type&>(
525 const_cast<const circular_deque*>(this)->at(i));
526 }
527
528 value_type& operator[](size_type i) { return at(i); }
529 const value_type& operator[](size_type i) const {
530 return const_cast<circular_deque*>(this)->at(i);
531 }
532
front()533 value_type& front() {
534 DCHECK(!empty());
535 return buffer_[begin_];
536 }
front()537 const value_type& front() const {
538 DCHECK(!empty());
539 return buffer_[begin_];
540 }
541
back()542 value_type& back() {
543 DCHECK(!empty());
544 return *(--end());
545 }
back()546 const value_type& back() const {
547 DCHECK(!empty());
548 return *(--end());
549 }
550
551 // ---------------------------------------------------------------------------
552 // Iterators.
553
begin()554 iterator begin() { return iterator(this, begin_); }
begin()555 const_iterator begin() const { return const_iterator(this, begin_); }
cbegin()556 const_iterator cbegin() const { return const_iterator(this, begin_); }
557
end()558 iterator end() { return iterator(this, end_); }
end()559 const_iterator end() const { return const_iterator(this, end_); }
cend()560 const_iterator cend() const { return const_iterator(this, end_); }
561
rbegin()562 reverse_iterator rbegin() { return reverse_iterator(end()); }
rbegin()563 const_reverse_iterator rbegin() const {
564 return const_reverse_iterator(end());
565 }
crbegin()566 const_reverse_iterator crbegin() const { return rbegin(); }
567
rend()568 reverse_iterator rend() { return reverse_iterator(begin()); }
rend()569 const_reverse_iterator rend() const {
570 return const_reverse_iterator(begin());
571 }
crend()572 const_reverse_iterator crend() const { return rend(); }
573
574 // ---------------------------------------------------------------------------
575 // Memory management.
576
577 // IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
578 // the buffer when elements are deleted and there is "too much" wasted space.
579 // So if you call reserve() with a large size in anticipation of pushing many
580 // elements, but pop an element before the queue is full, the capacity you
581 // reserved may be lost.
582 //
583 // As a result, it's only worthwhile to call reserve() when you're adding
584 // many things at once with no intermediate operations.
reserve(size_type new_capacity)585 void reserve(size_type new_capacity) {
586 if (new_capacity > capacity())
587 SetCapacityTo(new_capacity);
588 }
589
capacity()590 size_type capacity() const {
591 // One item is wasted to indicate end().
592 return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
593 }
594
shrink_to_fit()595 void shrink_to_fit() {
596 if (empty()) {
597 // Optimize empty case to really delete everything if there was
598 // something.
599 if (buffer_.capacity())
600 buffer_ = VectorBuffer();
601 } else {
602 SetCapacityTo(size());
603 }
604 }
605
606 // ---------------------------------------------------------------------------
607 // Size management.
608
609 // This will additionally reset the capacity() to 0.
clear()610 void clear() {
611 // This can't resize(0) because that requires a default constructor to
612 // compile, which not all contained classes may implement.
613 ClearRetainCapacity();
614 buffer_ = VectorBuffer();
615 }
616
empty()617 bool empty() const { return begin_ == end_; }
618
size()619 size_type size() const {
620 if (begin_ <= end_)
621 return end_ - begin_;
622 return buffer_.capacity() - begin_ + end_;
623 }
624
625 // When reducing size, the elements are deleted from the end. When expanding
626 // size, elements are added to the end with |value| or the default
627 // constructed version. Even when using resize(count) to shrink, a default
628 // constructor is required for the code to compile, even though it will not
629 // be called.
630 //
631 // There are two versions rather than using a default value to avoid
632 // creating a temporary when shrinking (when it's not needed). Plus if
633 // the default constructor is desired when expanding usually just calling it
634 // for each element is faster than making a default-constructed temporary and
635 // copying it.
resize(size_type count)636 void resize(size_type count) {
637 // SEE BELOW VERSION if you change this. The code is mostly the same.
638 if (count > size()) {
639 // This could be slighly more efficient but expanding a queue with
640 // identical elements is unusual and the extra computations of emplacing
641 // one-by-one will typically be small relative to calling the constructor
642 // for every item.
643 ExpandCapacityIfNecessary(count - size());
644 while (size() < count)
645 emplace_back();
646 } else if (count < size()) {
647 size_t new_end = (begin_ + count) % buffer_.capacity();
648 DestructRange(new_end, end_);
649 end_ = new_end;
650
651 ShrinkCapacityIfNecessary();
652 }
653 IncrementGeneration();
654 }
resize(size_type count,const value_type & value)655 void resize(size_type count, const value_type& value) {
656 // SEE ABOVE VERSION if you change this. The code is mostly the same.
657 if (count > size()) {
658 ExpandCapacityIfNecessary(count - size());
659 while (size() < count)
660 emplace_back(value);
661 } else if (count < size()) {
662 size_t new_end = (begin_ + count) % buffer_.capacity();
663 DestructRange(new_end, end_);
664 end_ = new_end;
665
666 ShrinkCapacityIfNecessary();
667 }
668 IncrementGeneration();
669 }
670
671 // ---------------------------------------------------------------------------
672 // Insert and erase.
673 //
674 // Insertion and deletion in the middle is O(n) and invalidates all existing
675 // iterators.
676 //
677 // The implementation of insert isn't optimized as much as it could be. If
678 // the insertion requires that the buffer be grown, it will first be grown
679 // and everything moved, and then the items will be inserted, potentially
680 // moving some items twice. This simplifies the implemntation substantially
681 // and means less generated templatized code. Since this is an uncommon
682 // operation for deques, and already relatively slow, it doesn't seem worth
683 // the benefit to optimize this.
684
insert(const_iterator pos,size_type count,const T & value)685 void insert(const_iterator pos, size_type count, const T& value) {
686 ValidateIterator(pos);
687
688 // Optimize insert at the beginning.
689 if (pos == begin()) {
690 ExpandCapacityIfNecessary(count);
691 for (size_t i = 0; i < count; i++)
692 push_front(value);
693 return;
694 }
695
696 iterator insert_cur(this, pos.index_);
697 iterator insert_end;
698 MakeRoomFor(count, &insert_cur, &insert_end);
699 while (insert_cur < insert_end) {
700 new (&buffer_[insert_cur.index_]) T(value);
701 ++insert_cur;
702 }
703
704 IncrementGeneration();
705 }
706
707 // This enable_if keeps this call from getting confused with the (pos, count,
708 // value) version when value is an integer.
709 template <class InputIterator>
710 typename std::enable_if<::base::internal::is_iterator<InputIterator>::value,
711 void>::type
insert(const_iterator pos,InputIterator first,InputIterator last)712 insert(const_iterator pos, InputIterator first, InputIterator last) {
713 ValidateIterator(pos);
714
715 size_t inserted_items = std::distance(first, last);
716 if (inserted_items == 0)
717 return; // Can divide by 0 when doing modulo below, so return early.
718
719 // Make a hole to copy the items into.
720 iterator insert_cur;
721 iterator insert_end;
722 if (pos == begin()) {
723 // Optimize insert at the beginning, nothing needs to be shifted and the
724 // hole is the |inserted_items| block immediately before |begin_|.
725 ExpandCapacityIfNecessary(inserted_items);
726 insert_end = begin();
727 begin_ =
728 (begin_ + buffer_.capacity() - inserted_items) % buffer_.capacity();
729 insert_cur = begin();
730 } else {
731 insert_cur = iterator(this, pos.index_);
732 MakeRoomFor(inserted_items, &insert_cur, &insert_end);
733 }
734
735 // Copy the items.
736 while (insert_cur < insert_end) {
737 new (&buffer_[insert_cur.index_]) T(*first);
738 ++insert_cur;
739 ++first;
740 }
741
742 IncrementGeneration();
743 }
744
745 // These all return an iterator to the inserted item. Existing iterators will
746 // be invalidated.
insert(const_iterator pos,const T & value)747 iterator insert(const_iterator pos, const T& value) {
748 return emplace(pos, value);
749 }
insert(const_iterator pos,T && value)750 iterator insert(const_iterator pos, T&& value) {
751 return emplace(pos, std::move(value));
752 }
753 template <class... Args>
emplace(const_iterator pos,Args &&...args)754 iterator emplace(const_iterator pos, Args&&... args) {
755 ValidateIterator(pos);
756
757 // Optimize insert at beginning which doesn't require shifting.
758 if (pos == cbegin()) {
759 emplace_front(std::forward<Args>(args)...);
760 return begin();
761 }
762
763 // Do this before we make the new iterators we return.
764 IncrementGeneration();
765
766 iterator insert_begin(this, pos.index_);
767 iterator insert_end;
768 MakeRoomFor(1, &insert_begin, &insert_end);
769 new (&buffer_[insert_begin.index_]) T(std::forward<Args>(args)...);
770
771 return insert_begin;
772 }
773
774 // Calling erase() won't automatically resize the buffer smaller like resize
775 // or the pop functions. Erase is slow and relatively uncommon, and for
776 // normal deque usage a pop will normally be done on a regular basis that
777 // will prevent excessive buffer usage over long periods of time. It's not
778 // worth having the extra code for every template instantiation of erase()
779 // to resize capacity downward to a new buffer.
erase(const_iterator pos)780 iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
erase(const_iterator first,const_iterator last)781 iterator erase(const_iterator first, const_iterator last) {
782 ValidateIterator(first);
783 ValidateIterator(last);
784
785 IncrementGeneration();
786
787 // First, call the destructor on the deleted items.
788 if (first.index_ == last.index_) {
789 // Nothing deleted. Need to return early to avoid falling through to
790 // moving items on top of themselves.
791 return iterator(this, first.index_);
792 } else if (first.index_ < last.index_) {
793 // Contiguous range.
794 buffer_.DestructRange(&buffer_[first.index_], &buffer_[last.index_]);
795 } else {
796 // Deleted range wraps around.
797 buffer_.DestructRange(&buffer_[first.index_],
798 &buffer_[buffer_.capacity()]);
799 buffer_.DestructRange(&buffer_[0], &buffer_[last.index_]);
800 }
801
802 if (first.index_ == begin_) {
803 // This deletion is from the beginning. Nothing needs to be copied, only
804 // begin_ needs to be updated.
805 begin_ = last.index_;
806 return iterator(this, last.index_);
807 }
808
809 // In an erase operation, the shifted items all move logically to the left,
810 // so move them from left-to-right.
811 iterator move_src(this, last.index_);
812 iterator move_src_end = end();
813 iterator move_dest(this, first.index_);
814 for (; move_src < move_src_end; move_src++, move_dest++) {
815 buffer_.MoveRange(&buffer_[move_src.index_],
816 &buffer_[move_src.index_ + 1],
817 &buffer_[move_dest.index_]);
818 }
819
820 end_ = move_dest.index_;
821
822 // Since we did not reallocate and only changed things after the erase
823 // element(s), the input iterator's index points to the thing following the
824 // deletion.
825 return iterator(this, first.index_);
826 }
827
828 // ---------------------------------------------------------------------------
829 // Begin/end operations.
830
push_front(const T & value)831 void push_front(const T& value) { emplace_front(value); }
push_front(T && value)832 void push_front(T&& value) { emplace_front(std::move(value)); }
833
push_back(const T & value)834 void push_back(const T& value) { emplace_back(value); }
push_back(T && value)835 void push_back(T&& value) { emplace_back(std::move(value)); }
836
837 template <class... Args>
emplace_front(Args &&...args)838 reference emplace_front(Args&&... args) {
839 ExpandCapacityIfNecessary(1);
840 if (begin_ == 0)
841 begin_ = buffer_.capacity() - 1;
842 else
843 begin_--;
844 IncrementGeneration();
845 new (&buffer_[begin_]) T(std::forward<Args>(args)...);
846 return front();
847 }
848
849 template <class... Args>
emplace_back(Args &&...args)850 reference emplace_back(Args&&... args) {
851 ExpandCapacityIfNecessary(1);
852 new (&buffer_[end_]) T(std::forward<Args>(args)...);
853 if (end_ == buffer_.capacity() - 1)
854 end_ = 0;
855 else
856 end_++;
857 IncrementGeneration();
858 return back();
859 }
860
pop_front()861 void pop_front() {
862 DCHECK(size());
863 buffer_.DestructRange(&buffer_[begin_], &buffer_[begin_ + 1]);
864 begin_++;
865 if (begin_ == buffer_.capacity())
866 begin_ = 0;
867
868 ShrinkCapacityIfNecessary();
869
870 // Technically popping will not invalidate any iterators since the
871 // underlying buffer will be stable. But in the future we may want to add a
872 // feature that resizes the buffer smaller if there is too much wasted
873 // space. This ensures we can make such a change safely.
874 IncrementGeneration();
875 }
pop_back()876 void pop_back() {
877 DCHECK(size());
878 if (end_ == 0)
879 end_ = buffer_.capacity() - 1;
880 else
881 end_--;
882 buffer_.DestructRange(&buffer_[end_], &buffer_[end_ + 1]);
883
884 ShrinkCapacityIfNecessary();
885
886 // See pop_front comment about why this is here.
887 IncrementGeneration();
888 }
889
890 // ---------------------------------------------------------------------------
891 // General operations.
892
swap(circular_deque & other)893 void swap(circular_deque& other) {
894 std::swap(buffer_, other.buffer_);
895 std::swap(begin_, other.begin_);
896 std::swap(end_, other.end_);
897 IncrementGeneration();
898 }
899
swap(circular_deque & lhs,circular_deque & rhs)900 friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
901
902 private:
903 friend internal::circular_deque_iterator<T>;
904 friend internal::circular_deque_const_iterator<T>;
905
906 // Moves the items in the given circular buffer to the current one. The
907 // source is moved from so will become invalid. The destination buffer must
908 // have already been allocated with enough size.
MoveBuffer(VectorBuffer & from_buf,size_t from_begin,size_t from_end,VectorBuffer * to_buf,size_t * to_begin,size_t * to_end)909 static void MoveBuffer(VectorBuffer& from_buf,
910 size_t from_begin,
911 size_t from_end,
912 VectorBuffer* to_buf,
913 size_t* to_begin,
914 size_t* to_end) {
915 size_t from_capacity = from_buf.capacity();
916
917 *to_begin = 0;
918 if (from_begin < from_end) {
919 // Contiguous.
920 from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_end],
921 to_buf->begin());
922 *to_end = from_end - from_begin;
923 } else if (from_begin > from_end) {
924 // Discontiguous, copy the right side to the beginning of the new buffer.
925 from_buf.MoveRange(&from_buf[from_begin], &from_buf[from_capacity],
926 to_buf->begin());
927 size_t right_size = from_capacity - from_begin;
928 // Append the left side.
929 from_buf.MoveRange(&from_buf[0], &from_buf[from_end],
930 &(*to_buf)[right_size]);
931 *to_end = right_size + from_end;
932 } else {
933 // No items.
934 *to_end = 0;
935 }
936 }
937
938 // Expands the buffer size. This assumes the size is larger than the
939 // number of elements in the vector (it won't call delete on anything).
SetCapacityTo(size_t new_capacity)940 void SetCapacityTo(size_t new_capacity) {
941 // Use the capacity + 1 as the internal buffer size to differentiate
942 // empty and full (see definition of buffer_ below).
943 VectorBuffer new_buffer(new_capacity + 1);
944 MoveBuffer(buffer_, begin_, end_, &new_buffer, &begin_, &end_);
945 buffer_ = std::move(new_buffer);
946 }
ExpandCapacityIfNecessary(size_t additional_elts)947 void ExpandCapacityIfNecessary(size_t additional_elts) {
948 size_t min_new_capacity = size() + additional_elts;
949 if (capacity() >= min_new_capacity)
950 return; // Already enough room.
951
952 min_new_capacity =
953 std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
954
955 // std::vector always grows by at least 50%. WTF::Deque grows by at least
956 // 25%. We expect queue workloads to generally stay at a similar size and
957 // grow less than a vector might, so use 25%.
958 size_t new_capacity =
959 std::max(min_new_capacity, capacity() + capacity() / 4);
960 SetCapacityTo(new_capacity);
961 }
962
ShrinkCapacityIfNecessary()963 void ShrinkCapacityIfNecessary() {
964 // Don't auto-shrink below this size.
965 if (capacity() <= internal::kCircularBufferInitialCapacity)
966 return;
967
968 // Shrink when 100% of the size() is wasted.
969 size_t sz = size();
970 size_t empty_spaces = capacity() - sz;
971 if (empty_spaces < sz)
972 return;
973
974 // Leave 1/4 the size as free capacity, not going below the initial
975 // capacity.
976 size_t new_capacity =
977 std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
978 if (new_capacity < capacity()) {
979 // Count extra item to convert to internal capacity.
980 SetCapacityTo(new_capacity);
981 }
982 }
983
984 // Backend for clear() but does not resize the internal buffer.
ClearRetainCapacity()985 void ClearRetainCapacity() {
986 // This can't resize(0) because that requires a default constructor to
987 // compile, which not all contained classes may implement.
988 DestructRange(begin_, end_);
989 begin_ = 0;
990 end_ = 0;
991 IncrementGeneration();
992 }
993
994 // Calls destructors for the given begin->end indices. The indices may wrap
995 // around. The buffer is not resized, and the begin_ and end_ members are
996 // not changed.
DestructRange(size_t begin,size_t end)997 void DestructRange(size_t begin, size_t end) {
998 if (end == begin) {
999 return;
1000 } else if (end > begin) {
1001 buffer_.DestructRange(&buffer_[begin], &buffer_[end]);
1002 } else {
1003 buffer_.DestructRange(&buffer_[begin], &buffer_[buffer_.capacity()]);
1004 buffer_.DestructRange(&buffer_[0], &buffer_[end]);
1005 }
1006 }
1007
1008 // Makes room for |count| items starting at |*insert_begin|. Since iterators
1009 // are not stable across buffer resizes, |*insert_begin| will be updated to
1010 // point to the beginning of the newly opened position in the new array (it's
1011 // in/out), and the end of the newly opened position (it's out-only).
MakeRoomFor(size_t count,iterator * insert_begin,iterator * insert_end)1012 void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
1013 if (count == 0) {
1014 *insert_end = *insert_begin;
1015 return;
1016 }
1017
1018 // The offset from the beginning will be stable across reallocations.
1019 size_t begin_offset = insert_begin->OffsetFromBegin();
1020 ExpandCapacityIfNecessary(count);
1021
1022 insert_begin->index_ = (begin_ + begin_offset) % buffer_.capacity();
1023 *insert_end =
1024 iterator(this, (insert_begin->index_ + count) % buffer_.capacity());
1025
1026 // Update the new end and prepare the iterators for copying.
1027 iterator src = end();
1028 end_ = (end_ + count) % buffer_.capacity();
1029 iterator dest = end();
1030
1031 // Move the elements. This will always involve shifting logically to the
1032 // right, so move in a right-to-left order.
1033 while (true) {
1034 if (src == *insert_begin)
1035 break;
1036 --src;
1037 --dest;
1038 buffer_.MoveRange(&buffer_[src.index_], &buffer_[src.index_ + 1],
1039 &buffer_[dest.index_]);
1040 }
1041 }
1042
1043 #if DCHECK_IS_ON()
1044 // Asserts the given index is dereferencable. The index is an index into the
1045 // buffer, not an index used by operator[] or at() which will be offsets from
1046 // begin.
CheckValidIndex(size_t i)1047 void CheckValidIndex(size_t i) const {
1048 if (begin_ <= end_)
1049 DCHECK(i >= begin_ && i < end_);
1050 else
1051 DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
1052 }
1053
1054 // Asserts the given index is either dereferencable or points to end().
CheckValidIndexOrEnd(size_t i)1055 void CheckValidIndexOrEnd(size_t i) const {
1056 if (i != end_)
1057 CheckValidIndex(i);
1058 }
1059
ValidateIterator(const const_iterator & i)1060 void ValidateIterator(const const_iterator& i) const {
1061 DCHECK(i.parent_deque_ == this);
1062 i.CheckUnstableUsage();
1063 }
1064
1065 // See generation_ below.
IncrementGeneration()1066 void IncrementGeneration() { generation_++; }
1067 #else
1068 // No-op versions of these functions for release builds.
CheckValidIndex(size_t)1069 void CheckValidIndex(size_t) const {}
CheckValidIndexOrEnd(size_t)1070 void CheckValidIndexOrEnd(size_t) const {}
ValidateIterator(const const_iterator & i)1071 void ValidateIterator(const const_iterator& i) const {}
IncrementGeneration()1072 void IncrementGeneration() {}
1073 #endif
1074
1075 // Danger, the buffer_.capacity() is the "internal capacity" which is
1076 // capacity() + 1 since there is an extra item to indicate the end. Otherwise
1077 // being completely empty and completely full are indistinguishable (begin ==
1078 // end). We could add a separate flag to avoid it, but that adds significant
1079 // extra complexity since every computation will have to check for it. Always
1080 // keeping one extra unused element in the buffer makes iterator computations
1081 // much simpler.
1082 //
1083 // Container internal code will want to use buffer_.capacity() for offset
1084 // computations rather than capacity().
1085 VectorBuffer buffer_;
1086 size_type begin_ = 0;
1087 size_type end_ = 0;
1088
1089 #if DCHECK_IS_ON()
1090 // Incremented every time a modification is made that could affect iterator
1091 // invalidations.
1092 uint64_t generation_ = 0;
1093 #endif
1094 };
1095
1096 // Implementations of base::Erase[If] (see base/stl_util.h).
1097 template <class T, class Value>
Erase(circular_deque<T> & container,const Value & value)1098 void Erase(circular_deque<T>& container, const Value& value) {
1099 container.erase(std::remove(container.begin(), container.end(), value),
1100 container.end());
1101 }
1102
1103 template <class T, class Predicate>
EraseIf(circular_deque<T> & container,Predicate pred)1104 void EraseIf(circular_deque<T>& container, Predicate pred) {
1105 container.erase(std::remove_if(container.begin(), container.end(), pred),
1106 container.end());
1107 }
1108
1109 } // namespace base
1110
1111 #endif // BASE_CONTAINERS_CIRCULAR_DEQUE_H_
1112