1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_PROFILER_UNBOUND_QUEUE_INL_H_
6 #define V8_PROFILER_UNBOUND_QUEUE_INL_H_
7 
8 #include "src/profiler/unbound-queue.h"
9 
10 namespace v8 {
11 namespace internal {
12 
13 template<typename Record>
14 struct UnboundQueue<Record>::Node: public Malloced {
15   explicit Node(const Record& value) : value(value), next(nullptr) {}
16 
17   Record value;
18   Node* next;
19 };
20 
21 
22 template<typename Record>
23 UnboundQueue<Record>::UnboundQueue() {
24   first_ = new Node(Record());
25   divider_ = last_ = reinterpret_cast<base::AtomicWord>(first_);
26 }
27 
28 
29 template<typename Record>
30 UnboundQueue<Record>::~UnboundQueue() {
31   while (first_ != nullptr) DeleteFirst();
32 }
33 
34 
35 template<typename Record>
36 void UnboundQueue<Record>::DeleteFirst() {
37   Node* tmp = first_;
38   first_ = tmp->next;
39   delete tmp;
40 }
41 
42 
43 template<typename Record>
44 bool UnboundQueue<Record>::Dequeue(Record* rec) {
45   if (divider_ == base::Acquire_Load(&last_)) return false;
46   Node* next = reinterpret_cast<Node*>(divider_)->next;
47   *rec = next->value;
48   base::Release_Store(&divider_, reinterpret_cast<base::AtomicWord>(next));
49   return true;
50 }
51 
52 
53 template<typename Record>
54 void UnboundQueue<Record>::Enqueue(const Record& rec) {
55   Node*& next = reinterpret_cast<Node*>(last_)->next;
56   next = new Node(rec);
57   base::Release_Store(&last_, reinterpret_cast<base::AtomicWord>(next));
58 
59   while (first_ != reinterpret_cast<Node*>(base::Acquire_Load(&divider_))) {
60     DeleteFirst();
61   }
62 }
63 
64 
65 template<typename Record>
66 bool UnboundQueue<Record>::IsEmpty() const {
67   return base::Relaxed_Load(&divider_) == base::Relaxed_Load(&last_);
68 }
69 
70 
71 template<typename Record>
72 Record* UnboundQueue<Record>::Peek() const {
73   if (divider_ == base::Acquire_Load(&last_)) return nullptr;
74   Node* next = reinterpret_cast<Node*>(divider_)->next;
75   return &next->value;
76 }
77 
78 }  // namespace internal
79 }  // namespace v8
80 
81 #endif  // V8_PROFILER_UNBOUND_QUEUE_INL_H_
82