Mercurial > hg > sv-dependency-builds
comparison osx/include/kj/arena.h @ 49:3ab5a40c4e3b
Add Capnp and KJ builds for OSX
author | Chris Cannam <cannam@all-day-breakfast.com> |
---|---|
date | Tue, 25 Oct 2016 14:48:23 +0100 |
parents | |
children | 0994c39f1e94 |
comparison
equal
deleted
inserted
replaced
48:9530b331f8c1 | 49:3ab5a40c4e3b |
---|---|
1 // Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors | |
2 // Licensed under the MIT License: | |
3 // | |
4 // Permission is hereby granted, free of charge, to any person obtaining a copy | |
5 // of this software and associated documentation files (the "Software"), to deal | |
6 // in the Software without restriction, including without limitation the rights | |
7 // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
8 // copies of the Software, and to permit persons to whom the Software is | |
9 // furnished to do so, subject to the following conditions: | |
10 // | |
11 // The above copyright notice and this permission notice shall be included in | |
12 // all copies or substantial portions of the Software. | |
13 // | |
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |
17 // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
18 // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
19 // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | |
20 // THE SOFTWARE. | |
21 | |
22 #ifndef KJ_ARENA_H_ | |
23 #define KJ_ARENA_H_ | |
24 | |
25 #if defined(__GNUC__) && !KJ_HEADER_WARNINGS | |
26 #pragma GCC system_header | |
27 #endif | |
28 | |
29 #include "memory.h" | |
30 #include "array.h" | |
31 #include "string.h" | |
32 | |
33 namespace kj { | |
34 | |
35 class Arena { | |
36 // A class which allows several objects to be allocated in contiguous chunks of memory, then | |
37 // frees them all at once. | |
38 // | |
39 // Allocating from the same Arena in multiple threads concurrently is NOT safe, because making | |
40 // it safe would require atomic operations that would slow down allocation even when | |
41 // single-threaded. If you need to use arena allocation in a multithreaded context, consider | |
42 // allocating thread-local arenas. | |
43 | |
44 public: | |
45 explicit Arena(size_t chunkSizeHint = 1024); | |
46 // Create an Arena. `chunkSizeHint` hints at where to start when allocating chunks, but is only | |
47 // a hint -- the Arena will, for example, allocate progressively larger chunks as time goes on, | |
48 // in order to reduce overall allocation overhead. | |
49 | |
50 explicit Arena(ArrayPtr<byte> scratch); | |
51 // Allocates from the given scratch space first, only resorting to the heap when it runs out. | |
52 | |
53 KJ_DISALLOW_COPY(Arena); | |
54 ~Arena() noexcept(false); | |
55 | |
56 template <typename T, typename... Params> | |
57 T& allocate(Params&&... params); | |
58 template <typename T> | |
59 ArrayPtr<T> allocateArray(size_t size); | |
60 // Allocate an object or array of type T. If T has a non-trivial destructor, that destructor | |
61 // will be run during the Arena's destructor. Such destructors are run in opposite order of | |
62 // allocation. Note that these methods must maintain a list of destructors to call, which has | |
63 // overhead, but this overhead only applies if T has a non-trivial destructor. | |
64 | |
65 template <typename T, typename... Params> | |
66 Own<T> allocateOwn(Params&&... params); | |
67 template <typename T> | |
68 Array<T> allocateOwnArray(size_t size); | |
69 template <typename T> | |
70 ArrayBuilder<T> allocateOwnArrayBuilder(size_t capacity); | |
71 // Allocate an object or array of type T. Destructors are executed when the returned Own<T> | |
72 // or Array<T> goes out-of-scope, which must happen before the Arena is destroyed. This variant | |
73 // is useful when you need to control when the destructor is called. This variant also avoids | |
74 // the need for the Arena itself to keep track of destructors to call later, which may make it | |
75 // slightly more efficient. | |
76 | |
77 template <typename T> | |
78 inline T& copy(T&& value) { return allocate<Decay<T>>(kj::fwd<T>(value)); } | |
79 // Allocate a copy of the given value in the arena. This is just a shortcut for calling the | |
80 // type's copy (or move) constructor. | |
81 | |
82 StringPtr copyString(StringPtr content); | |
83 // Make a copy of the given string inside the arena, and return a pointer to the copy. | |
84 | |
85 private: | |
86 struct ChunkHeader { | |
87 ChunkHeader* next; | |
88 byte* pos; // first unallocated byte in this chunk | |
89 byte* end; // end of this chunk | |
90 }; | |
91 struct ObjectHeader { | |
92 void (*destructor)(void*); | |
93 ObjectHeader* next; | |
94 }; | |
95 | |
96 size_t nextChunkSize; | |
97 ChunkHeader* chunkList = nullptr; | |
98 ObjectHeader* objectList = nullptr; | |
99 | |
100 ChunkHeader* currentChunk = nullptr; | |
101 | |
102 void cleanup(); | |
103 // Run all destructors, leaving the above pointers null. If a destructor throws, the State is | |
104 // left in a consistent state, such that if cleanup() is called again, it will pick up where | |
105 // it left off. | |
106 | |
107 void* allocateBytes(size_t amount, uint alignment, bool hasDisposer); | |
108 // Allocate the given number of bytes. `hasDisposer` must be true if `setDisposer()` may be | |
109 // called on this pointer later. | |
110 | |
111 void* allocateBytesInternal(size_t amount, uint alignment); | |
112 // Try to allocate the given number of bytes without taking a lock. Fails if and only if there | |
113 // is no space left in the current chunk. | |
114 | |
115 void setDestructor(void* ptr, void (*destructor)(void*)); | |
116 // Schedule the given destructor to be executed when the Arena is destroyed. `ptr` must be a | |
117 // pointer previously returned by an `allocateBytes()` call for which `hasDisposer` was true. | |
118 | |
119 template <typename T> | |
120 static void destroyArray(void* pointer) { | |
121 size_t elementCount = *reinterpret_cast<size_t*>(pointer); | |
122 constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t)); | |
123 DestructorOnlyArrayDisposer::instance.disposeImpl( | |
124 reinterpret_cast<byte*>(pointer) + prefixSize, | |
125 sizeof(T), elementCount, elementCount, &destroyObject<T>); | |
126 } | |
127 | |
128 template <typename T> | |
129 static void destroyObject(void* pointer) { | |
130 dtor(*reinterpret_cast<T*>(pointer)); | |
131 } | |
132 }; | |
133 | |
134 // ======================================================================================= | |
135 // Inline implementation details | |
136 | |
137 template <typename T, typename... Params> | |
138 T& Arena::allocate(Params&&... params) { | |
139 T& result = *reinterpret_cast<T*>(allocateBytes( | |
140 sizeof(T), alignof(T), !__has_trivial_destructor(T))); | |
141 if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) { | |
142 ctor(result, kj::fwd<Params>(params)...); | |
143 } | |
144 if (!__has_trivial_destructor(T)) { | |
145 setDestructor(&result, &destroyObject<T>); | |
146 } | |
147 return result; | |
148 } | |
149 | |
150 template <typename T> | |
151 ArrayPtr<T> Arena::allocateArray(size_t size) { | |
152 if (__has_trivial_destructor(T)) { | |
153 ArrayPtr<T> result = | |
154 arrayPtr(reinterpret_cast<T*>(allocateBytes( | |
155 sizeof(T) * size, alignof(T), false)), size); | |
156 if (!__has_trivial_constructor(T)) { | |
157 for (size_t i = 0; i < size; i++) { | |
158 ctor(result[i]); | |
159 } | |
160 } | |
161 return result; | |
162 } else { | |
163 // Allocate with a 64-bit prefix in which we store the array size. | |
164 constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t)); | |
165 void* base = allocateBytes(sizeof(T) * size + prefixSize, alignof(T), true); | |
166 size_t& tag = *reinterpret_cast<size_t*>(base); | |
167 ArrayPtr<T> result = | |
168 arrayPtr(reinterpret_cast<T*>(reinterpret_cast<byte*>(base) + prefixSize), size); | |
169 setDestructor(base, &destroyArray<T>); | |
170 | |
171 if (__has_trivial_constructor(T)) { | |
172 tag = size; | |
173 } else { | |
174 // In case of constructor exceptions, we need the tag to end up storing the number of objects | |
175 // that were successfully constructed, so that they'll be properly destroyed. | |
176 tag = 0; | |
177 for (size_t i = 0; i < size; i++) { | |
178 ctor(result[i]); | |
179 tag = i + 1; | |
180 } | |
181 } | |
182 return result; | |
183 } | |
184 } | |
185 | |
186 template <typename T, typename... Params> | |
187 Own<T> Arena::allocateOwn(Params&&... params) { | |
188 T& result = *reinterpret_cast<T*>(allocateBytes(sizeof(T), alignof(T), false)); | |
189 if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) { | |
190 ctor(result, kj::fwd<Params>(params)...); | |
191 } | |
192 return Own<T>(&result, DestructorOnlyDisposer<T>::instance); | |
193 } | |
194 | |
195 template <typename T> | |
196 Array<T> Arena::allocateOwnArray(size_t size) { | |
197 ArrayBuilder<T> result = allocateOwnArrayBuilder<T>(size); | |
198 for (size_t i = 0; i < size; i++) { | |
199 result.add(); | |
200 } | |
201 return result.finish(); | |
202 } | |
203 | |
204 template <typename T> | |
205 ArrayBuilder<T> Arena::allocateOwnArrayBuilder(size_t capacity) { | |
206 return ArrayBuilder<T>( | |
207 reinterpret_cast<T*>(allocateBytes(sizeof(T) * capacity, alignof(T), false)), | |
208 capacity, DestructorOnlyArrayDisposer::instance); | |
209 } | |
210 | |
211 } // namespace kj | |
212 | |
213 #endif // KJ_ARENA_H_ |