1 /*
2  *
3  * Copyright 2017 gRPC authors.
4  *
5  * Licensed under the Apache License, Version 2.0 (the "License");
6  * you may not use this file except in compliance with the License.
7  * You may obtain a copy of the License at
8  *
9  *     http://www.apache.org/licenses/LICENSE-2.0
10  *
11  * Unless required by applicable law or agreed to in writing, software
12  * distributed under the License is distributed on an "AS IS" BASIS,
13  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14  * See the License for the specific language governing permissions and
15  * limitations under the License.
16  *
17  */
18 
19 // \file Arena based allocator
20 // Allows very fast allocation of memory, but that memory cannot be freed until
21 // the arena as a whole is freed
22 // Tracks the total memory allocated against it, so that future arenas can
23 // pre-allocate the right amount of memory
24 
25 #ifndef GRPC_CORE_LIB_GPRPP_ARENA_H
26 #define GRPC_CORE_LIB_GPRPP_ARENA_H
27 
28 #include <grpc/support/port_platform.h>
29 
30 #include <new>
31 #include <utility>
32 
33 #include <grpc/support/alloc.h>
34 #include <grpc/support/sync.h>
35 
36 #include "src/core/lib/gpr/alloc.h"
37 #include "src/core/lib/gpr/spinlock.h"
38 #include "src/core/lib/gprpp/atomic.h"
39 
40 #include <stddef.h>
41 
42 namespace grpc_core {
43 
44 class Arena {
45  public:
46   // Create an arena, with \a initial_size bytes in the first allocated buffer.
47   static Arena* Create(size_t initial_size);
48 
49   // Create an arena, with \a initial_size bytes in the first allocated buffer,
50   // and return both a void pointer to the returned arena and a void* with the
51   // first allocation.
52   static std::pair<Arena*, void*> CreateWithAlloc(size_t initial_size,
53                                                   size_t alloc_size);
54 
55   // Destroy an arena, returning the total number of bytes allocated.
56   size_t Destroy();
57   // Allocate \a size bytes from the arena.
Alloc(size_t size)58   void* Alloc(size_t size) {
59     static constexpr size_t base_size =
60         GPR_ROUND_UP_TO_ALIGNMENT_SIZE(sizeof(Arena));
61     size = GPR_ROUND_UP_TO_ALIGNMENT_SIZE(size);
62     size_t begin = total_used_.FetchAdd(size, MemoryOrder::RELAXED);
63     if (begin + size <= initial_zone_size_) {
64       return reinterpret_cast<char*>(this) + base_size + begin;
65     } else {
66       return AllocZone(size);
67     }
68   }
69 
70   // TODO(roth): We currently assume that all callers need alignment of 16
71   // bytes, which may be wrong in some cases. When we have time, we should
72   // change this to instead use the alignment of the type being allocated by
73   // this method.
74   template <typename T, typename... Args>
New(Args &&...args)75   T* New(Args&&... args) {
76     T* t = static_cast<T*>(Alloc(sizeof(T)));
77     new (t) T(std::forward<Args>(args)...);
78     return t;
79   }
80 
81  private:
82   struct Zone {
83     Zone* prev;
84   };
85 
86   // Initialize an arena.
87   // Parameters:
88   //   initial_size: The initial size of the whole arena in bytes. These bytes
89   //   are contained within 'zone 0'. If the arena user ends up requiring more
90   //   memory than the arena contains in zone 0, subsequent zones are allocated
91   //   on demand and maintained in a tail-linked list.
92   //
93   //   initial_alloc: Optionally, construct the arena as though a call to
94   //   Alloc() had already been made for initial_alloc bytes. This provides a
95   //   quick optimization (avoiding an atomic fetch-add) for the common case
96   //   where we wish to create an arena and then perform an immediate
97   //   allocation.
98   explicit Arena(size_t initial_size, size_t initial_alloc = 0)
total_used_(GPR_ROUND_UP_TO_ALIGNMENT_SIZE (initial_alloc))99       : total_used_(GPR_ROUND_UP_TO_ALIGNMENT_SIZE(initial_alloc)),
100         initial_zone_size_(initial_size) {}
101 
102   ~Arena();
103 
104   void* AllocZone(size_t size);
105 
106   // Keep track of the total used size. We use this in our call sizing
107   // hysteresis.
108   Atomic<size_t> total_used_;
109   const size_t initial_zone_size_;
110   gpr_spinlock arena_growth_spinlock_ = GPR_SPINLOCK_STATIC_INITIALIZER;
111   // If the initial arena allocation wasn't enough, we allocate additional zones
112   // in a reverse linked list. Each additional zone consists of (1) a pointer to
113   // the zone added before this zone (null if this is the first additional zone)
114   // and (2) the allocated memory. The arena itself maintains a pointer to the
115   // last zone; the zone list is reverse-walked during arena destruction only.
116   Zone* last_zone_ = nullptr;
117 };
118 
119 }  // namespace grpc_core
120 
121 #endif /* GRPC_CORE_LIB_GPRPP_ARENA_H */
122