1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_PROFILER_UNBOUND_QUEUE_INL_H_
6 #define V8_PROFILER_UNBOUND_QUEUE_INL_H_
7 
8 #include "src/profiler/unbound-queue.h"
9 
10 namespace v8 {
11 namespace internal {
12 
13 template<typename Record>
14 struct UnboundQueue<Record>::Node: public Malloced {
15   explicit Node(const Record& value)
16       : value(value), next(NULL) {
17   }
18 
19   Record value;
20   Node* next;
21 };
22 
23 
24 template<typename Record>
25 UnboundQueue<Record>::UnboundQueue() {
26   first_ = new Node(Record());
27   divider_ = last_ = reinterpret_cast<base::AtomicWord>(first_);
28 }
29 
30 
31 template<typename Record>
32 UnboundQueue<Record>::~UnboundQueue() {
33   while (first_ != NULL) DeleteFirst();
34 }
35 
36 
37 template<typename Record>
38 void UnboundQueue<Record>::DeleteFirst() {
39   Node* tmp = first_;
40   first_ = tmp->next;
41   delete tmp;
42 }
43 
44 
45 template<typename Record>
46 bool UnboundQueue<Record>::Dequeue(Record* rec) {
47   if (divider_ == base::Acquire_Load(&last_)) return false;
48   Node* next = reinterpret_cast<Node*>(divider_)->next;
49   *rec = next->value;
50   base::Release_Store(&divider_, reinterpret_cast<base::AtomicWord>(next));
51   return true;
52 }
53 
54 
55 template<typename Record>
56 void UnboundQueue<Record>::Enqueue(const Record& rec) {
57   Node*& next = reinterpret_cast<Node*>(last_)->next;
58   next = new Node(rec);
59   base::Release_Store(&last_, reinterpret_cast<base::AtomicWord>(next));
60 
61   while (first_ != reinterpret_cast<Node*>(base::Acquire_Load(&divider_))) {
62     DeleteFirst();
63   }
64 }
65 
66 
67 template<typename Record>
68 bool UnboundQueue<Record>::IsEmpty() const {
69   return base::NoBarrier_Load(&divider_) == base::NoBarrier_Load(&last_);
70 }
71 
72 
73 template<typename Record>
74 Record* UnboundQueue<Record>::Peek() const {
75   if (divider_ == base::Acquire_Load(&last_)) return NULL;
76   Node* next = reinterpret_cast<Node*>(divider_)->next;
77   return &next->value;
78 }
79 
80 }  // namespace internal
81 }  // namespace v8
82 
83 #endif  // V8_PROFILER_UNBOUND_QUEUE_INL_H_
84