blob: 049a6b547bd1a09a48edeee9a06f7ac6582eb70b [file] [log] [blame]
Brian Silverman70325d62015-09-20 17:00:43 -04001// Copyright (c) 2000, Google Inc.
2// All rights reserved.
3//
4// Redistribution and use in source and binary forms, with or without
5// modification, are permitted provided that the following conditions are
6// met:
7//
8// * Redistributions of source code must retain the above copyright
9// notice, this list of conditions and the following disclaimer.
10// * Redistributions in binary form must reproduce the above
11// copyright notice, this list of conditions and the following disclaimer
12// in the documentation and/or other materials provided with the
13// distribution.
14// * Neither the name of Google Inc. nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29// ---
30//
31// Reorganized by Craig Silverstein
32// "Handles" by Ilan Horn
33//
34// Sometimes it is necessary to allocate a large number of small
35// objects. Doing this the usual way (malloc, new) is slow,
36// especially for multithreaded programs. A BaseArena provides a
37// mark/release method of memory management: it asks for a large chunk
38// from the operating system and doles it out bit by bit as required.
39// Then you free all the memory at once by calling BaseArena::Reset().
40//
41// Use SafeArena for multi-threaded programs where multiple threads
42// could access the same arena at once. Use UnsafeArena otherwise.
43// Usually you'll want UnsafeArena.
44//
45// There are four ways to use the arena. arena.h and arena.cc are
46// sufficient for the MALLOC and STRINGS uses. For NEW and STL you'll
47// also need to include arena-inl.h in the appropriate .cc file.
48// However, we do *declare* (but not define) the template types here.
49//
50// LIKE MALLOC: --Uses UnsafeArena (or SafeArena)--
51// This is the simplest way. Just create an arena, and whenever you
52// need a block of memory to put something in, call BaseArena::Alloc(). eg
53// s = arena.Alloc(100);
54// snprintf(s, 100, "%s:%d", host, port);
55// arena.Shrink(strlen(s)+1); // optional; see below for use
56//
57// You'll probably use the convenience routines more often:
58// s = arena.Strdup(host); // a copy of host lives in the arena
59// s = arena.Strndup(host, 100); // we guarantee to NUL-terminate!
60// s = arena.Memdup(protobuf, sizeof(protobuf);
61//
62// If you go the Alloc() route, you'll probably allocate too-much-space.
63// You can reclaim the extra space by calling Shrink() before the next
64// Alloc() (or Strdup(), or whatever), with the #bytes you actually used.
65// If you use this method, memory management is easy: just call Alloc()
66// and friends a lot, and call Reset() when you're done with the data.
67//
68// FOR STRINGS: --Uses UnsafeArena (or SafeArena)--
69// This is a special case of STL (below), but is simpler. Use an
70// astring, which acts like a string but allocates from the passed-in
71// arena:
72// astring s(arena); // or "sastring" to use a SafeArena
73// s.assign(host);
74// astring s2(host, hostlen, arena);
75//
76// WITH NEW: --Uses BaseArena, Gladiator (or ArenaOnlyGladiator)--
77// Use this to allocate a C++ class object (or any other object you
78// have to get via new/delete rather than malloc/free).
79// There are several things you have to do in this case:
80// 1) Your class (the one you new) must inherit from Gladiator.
81// 2) To actually allocate this class on the arena, use
82// myclass = new (AllocateInArena, arena) MyClass(constructor, args)
83//
84// Note that MyClass doesn't need to have the arena passed in.
85// But if it, in turn, wants to call "new" on some of its member
86// variables, and you want those member vars to be on the arena
87// too, you better pass in an arena so it can call new(0,arena).
88//
89// If you can guarantee that everyone who ever calls new on
90// MyClass uses the new(0,arena) form (ie nobody ever just says
91// new), you can have MyClass subclass from ArenaOnlyGladiator
92// rather than from Gladiator. ArenaOnlyGladiator is a bit more
93// efficient (faster and smaller), but is otherwise identical.
94//
95// If you allocate myclass using new(0,arena), and MyClass only
96// does memory management in the destructor, it's not necessary
97// to even call "delete myclass;", you can just call arena.Reset();
98// If the destructor does something else (closes a file, logs
99// a message, whatever), you'll have to call destructor and Reset()
100// both: "delete myclass; arena.Reset();"
101//
102// Note that you can not allocate an array of classes this way:
103// noway = new (AllocateInArena, arena) MyClass[5]; // not supported!
104// It's not difficult to program, we just haven't done it. Arrays
105// are typically big and so there's little point to arena-izing them.
106//
107// WITH NEW: --Uses UnsafeArena--
108// There are cases where you can't inherit the class from Gladiator,
109// or inheriting would be too expensive. Examples of this include
110// plain-old-data (allocated using new) and third-party classes (such
111// as STL containers). arena-inl.h provides a global operator new
112// that can be used as follows:
113//
114// #include "base/arena-inl.h"
115//
116// UnsafeArena arena(1000);
117// Foo* foo = new (AllocateInArena, &arena) Foo;
118// Foo* foo_array = new (AllocateInArena, &arena) Foo[10];
119//
120// IN STL: --Uses BaseArena, ArenaAllocator--
121// All STL containers (vector, hash_map, etc) take an allocator.
122// You can use the arena as an allocator. Then whenever the vector
123// (or whatever) wants to allocate memory, it will take it from the
124// arena. To use, you just indicate in the type that you want to use the
125// arena, and then actually give a pointer to the arena as the last
126// constructor arg:
127// vector<int, ArenaAllocator<int, UnsafeArena> > v(&arena);
128// v.push_back(3);
129//
130// WARNING: Careless use of STL within an arena-allocated object can
131// result in memory leaks if you rely on arena.Reset() to free
132// memory and do not call the object destructor. This is actually
133// a subclass of a more general hazard: If an arena-allocated
134// object creates (and owns) objects that are not also
135// arena-allocated, then the creating object must have a
136// destructor that deletes them, or they will not be deleted.
137// However, since the outer object is arena allocated, it's easy to
138// forget to call delete on it, and needing to do so may seem to
139// negate much of the benefit of arena allocation. A specific
140// example is use of vector<string> in an arena-allocated object,
141// since type string is not atomic and is always allocated by the
142// default runtime allocator. The arena definition provided here
143// allows for much flexibility, but you ought to carefully consider
144// before defining arena-allocated objects which in turn create
145// non-arena allocated objects.
146//
147// WITH HANDLES:
148// The various arena classes can supply compact handles to data kept
149// in the arena. These handles consume only 4 bytes each, and are thus
150// more efficient than pointers - this may be interesting in cases
151// where a very large number of references to memory in the arena need
152// to be kept.
153// Note that handles are limited in the amount of data that can be reference
154// in the arena, typically to 4GB*the number given to set_handle_alignment()
155// (which defaults to 1). The number of allocations that can have handles
156// is, of course, smaller than 4G (that's what's representable by 32 bits).
157// It does depend on their sizes, however. In a worst-case scenario each
158// allocation consumes a page of its own, and we will run out of handles
159// after approximately (4G/block_size)*handle_alignment allocations.
160// When we run out of handles or allocate data over the amount of memory
161// that handles can reference, an invalid handle will be returned (but
162// the requested memory will still be allocated in the arena).
163// Handles memory use is most efficient when the arena block size is a power
164// of two. When this is not the case, we can run out of handles when at
165// most half of the addressable space (as described above) is not in use.
166// At worst handles can reference at least 2GB*handle_alignment.
167// Example use:
168// UnsafeArena arena(16384);
169// arena.set_handle_alignment(4);
170// // Assume you want to keep the string s in the arena.
171// Handle h = arena.MemdupWithHandle(s.c_str(), s.length());
172// // Later, to get the memory from the handle, use:
173// void* p = arena.HandleToPointer(h);
174// // Note that there's no way to retrieve the size from the handle.
175// // It probably makes sense to encode the size into the buffer saved,
176// // unless the size is known/fixed.
177// Internal machinery of handles:
178// The handle consists of the block index in the arena and the offset
179// inside the block, encoded into a single unsigned uint32 value.
180// Note that, the rightmost alignment bits (controlled by
181// set_handle_alignment()) are shaved off the saved offset in the Handle,
182// to give some extra capacity :)
183// set_handle_alignment() can only be called when the arena is empty,
184// as changing it invalidates any handles that are still in flight.
185//
186//
187// PUTTING IT ALL TOGETHER
188// Here's a program that uses all of the above. Note almost all the
189// examples are the various ways to use "new" and STL. Using the
190// malloc-like features and the string type are much easier!
191//
192// Class A : public Gladiator {
193// public:
194// int i;
195// vector<int> v1;
196// vector<int, ArenaAllocator<int, UnsafeArena> >* v3;
197// vector<int, ArenaAllocator<int, UnsafeArena> >* v4;
198// vector<int>* v5;
199// vector<string> vs;
200// vector<astring> va;
201// char *s;
202// A() : v1(), v3(NULL), v4(NULL), vs(), va(), s(NULL) {
203// // v1 is allocated on the arena whenever A is. Its ints never are.
204// v5 = new vector<int>;
205// // v5 is not allocated on the arena, and neither are any of its ints.
206// }
207// ~A() {
208// delete v5; // needed since v5 wasn't allocated on the arena
209// printf("I'm done!\n");
210// }
211// };
212//
213// class B : public A { // we inherit from Gladiator, but indirectly
214// public:
215// UnsafeArena* arena_;
216// vector<int, ArenaAllocator<int, UnsafeArena> > v2;
217// vector<A> va1;
218// vector<A, ArenaAllocator<A, UnsafeArena> > va2;
219// vector<A>* pva;
220// vector<A, ArenaAllocator<A, UnsafeArena> >* pva2;
221// astring a;
222//
223// B(UnsafeArena * arena)
224// : arena_(arena), v2(arena_), va1(), va2(arena_), a("initval", arena_) {
225// v3 = new vector<int, ArenaAllocator<int, UnsafeArena> >(arena_);
226// v4 = new (AllocateInArena, arena_) vector<int, ArenaAllocator<int, UnsafeArena> >(arena_);
227// v5 = new (AllocateInArena, arena_) vector<int>;
228// // v2 is allocated on the arena whenever B is. Its ints always are.
229// // v3 is not allocated on the arena, but the ints you give it are
230// // v4 is allocated on the arena, and so are the ints you give it
231// // v5 is allocated on the arena, but the ints you give it are not
232// // va1 is allocated on the arena whenever B is. No A ever is.
233// // va2 is allocated on the arena whenever B is. Its A's always are.
234// pva = new (AllocateInArena, arena_) vector<A>;
235// pva2 = new (AllocateInArena, arena_) vector<A, ArenaAllocator<A, UnsafeArena> >(arena_);
236// // pva is allocated on the arena, but its A's are not
237// // pva2 is allocated on the arena, and so are its A's.
238// // a's value "initval" is stored on the arena. If we reassign a,
239// // the new value will be stored on the arena too.
240// }
241// ~B() {
242// delete v3; // necessary to free v3's memory, though not its ints'
243// // don't need to delete v4: arena_.Reset() will do as good
244// delete v5; // necessary to free v5's ints memory, though not v5 itself
245// delete pva; // necessary to make sure you reclaim space used by A's
246// delete pva2; // safe to call this; needed if you want to see the printfs
247// // pva2->clear() -- not necessary, arena_.Reset() will do just as good
248// }
249// };
250//
251// main() {
252// UnsafeArena arena(1000);
253// A a1; // a1 is not on the arena
254// a1.vs.push_back(string("hello")); // hello is not copied onto the arena
255// a1.va.push_back(astring("hello", &arena)); // hello is on the arena,
256// // astring container isn't
257// a1.s = arena.Strdup("hello"); // hello is on the arena
258//
259// A* a2 = new (AllocateInArena, arena) A; // a2 is on the arena
260// a2.vs.push_back(string("hello")); // hello is *still* not on the arena
261// a2.s = arena.Strdup("world"); // world is on the arena. a1.s is ok
262//
263// B b1(&arena); // B is not allocated on the arena
264// b1.a.assign("hello"); // hello is on the arena
265// b1.pva2.push_back(a1); // our copy of a1 will be stored on
266// // the arena, though a1 itself wasn't
267// arena.Reset(); // all done with our memory!
268// }
269
270#ifndef BASE_ARENA_H_
271#define BASE_ARENA_H_
272
273#include <config.h>
274#include "base/mutex.h" // must go first to get _XOPEN_SOURCE
275#include <assert.h>
276#include <string.h>
277#include <vector>
278#include "base/thread_annotations.h"
279#include "base/macros.h" // for uint32
280#include "base/util.h" // for CHECK, etc
281
282namespace ctemplate {
283
284// Annoying stuff for windows -- make sure clients (in this case
285// unittests) can import the class definitions and variables.
286#ifndef CTEMPLATE_DLL_DECL
287# ifdef _MSC_VER
288# define CTEMPLATE_DLL_DECL __declspec(dllimport)
289# else
290# define CTEMPLATE_DLL_DECL /* should be the empty string for non-windows */
291# endif
292#endif
293
294// This class is "thread-compatible": different threads can access the
295// arena at the same time without locking, as long as they use only
296// const methods.
297class CTEMPLATE_DLL_DECL BaseArena {
298 protected: // You can't make an arena directly; only a subclass of one
299 BaseArena(char* first_block, const size_t block_size, bool align_to_page);
300 public:
301 virtual ~BaseArena();
302
303 virtual void Reset();
304
305 // A handle to a pointer in an arena. An opaque type, with default
306 // copy and assignment semantics.
307 class Handle {
308 public:
309 static const uint32 kInvalidValue = 0xFFFFFFFF; // int32-max
310
311 Handle() : handle_(kInvalidValue) { }
312 // Default copy constructors are fine here.
313 bool operator==(const Handle& h) const { return handle_ == h.handle_; }
314 bool operator!=(const Handle& h) const { return handle_ != h.handle_; }
315
316 uint32 hash() const { return handle_; }
317 bool valid() const { return handle_ != kInvalidValue; }
318
319 private:
320 // Arena needs to be able to access the internal data.
321 friend class BaseArena;
322
323 explicit Handle(uint32 handle) : handle_(handle) { }
324
325 uint32 handle_;
326 };
327
328 // they're "slow" only 'cause they're virtual (subclasses define "fast" ones)
329 virtual char* SlowAlloc(size_t size) = 0;
330 virtual void SlowFree(void* memory, size_t size) = 0;
331 virtual char* SlowRealloc(char* memory, size_t old_size, size_t new_size) = 0;
332 virtual char* SlowAllocWithHandle(const size_t size, Handle* handle) = 0;
333
334 // Set the alignment to be used when Handles are requested. This can only
335 // be set for an arena that is empty - it cannot be changed on the fly.
336 // The alignment must be a power of 2 that the block size is divisable by.
337 // The default alignment is 1.
338 // Trying to set an alignment that does not meet the above constraints will
339 // cause a CHECK-failure.
340 void set_handle_alignment(int align);
341
342 // Retrieve the memory pointer that the supplied handle refers to.
343 // Calling this with an invalid handle will CHECK-fail.
344 void* HandleToPointer(const Handle& h) const;
345
346
347 class Status {
348 private:
349 friend class BaseArena;
350 size_t bytes_allocated_;
351 public:
352 Status() : bytes_allocated_(0) { }
353 size_t bytes_allocated() const {
354 return bytes_allocated_;
355 }
356 };
357
358 // Accessors and stats counters
359 // This accessor isn't so useful here, but is included so we can be
360 // type-compatible with ArenaAllocator (in arena-inl.h). That is,
361 // we define arena() because ArenaAllocator does, and that way you
362 // can template on either of these and know it's safe to call arena().
363 virtual BaseArena* arena() { return this; }
364 size_t block_size() const { return block_size_; }
365 int block_count() const;
366 bool is_empty() const {
367 // must check block count in case we allocated a block larger than blksize
368 return freestart_ == freestart_when_empty_ && 1 == block_count();
369 }
370
371 // This should be the worst-case alignment for any type. This is
372 // good for IA-32, SPARC version 7 (the last one I know), and
373 // supposedly Alpha. i386 would be more time-efficient with a
374 // default alignment of 8, but ::operator new() uses alignment of 4,
375 // and an assertion will fail below after the call to MakeNewBlock()
376 // if you try to use a larger alignment.
377#ifdef __i386__
378 static const int kDefaultAlignment = 4;
379#else
380 static const int kDefaultAlignment = 8;
381#endif
382
383 protected:
384 void MakeNewBlock();
385 void* GetMemoryFallback(const size_t size, const int align);
386 void* GetMemory(const size_t size, const int align) {
387 assert(remaining_ <= block_size_); // an invariant
388 if ( size > 0 && size < remaining_ && align == 1 ) { // common case
389 last_alloc_ = freestart_;
390 freestart_ += size;
391 remaining_ -= size;
392 return reinterpret_cast<void*>(last_alloc_);
393 }
394 return GetMemoryFallback(size, align);
395 }
396
397 // This doesn't actually free any memory except for the last piece allocated
398 void ReturnMemory(void* memory, const size_t size) {
399 if ( memory == last_alloc_ && size == freestart_ - last_alloc_ ) {
400 remaining_ += size;
401 freestart_ = last_alloc_;
402 }
403 }
404
405 // This is used by Realloc() -- usually we Realloc just by copying to a
406 // bigger space, but for the last alloc we can realloc by growing the region.
407 bool AdjustLastAlloc(void* last_alloc, const size_t newsize);
408
409 // Since using different alignments for different handles would make
410 // the handles incompatible (e.g., we could end up with the same handle
411 // value referencing two different allocations, the alignment is not passed
412 // as an argument to GetMemoryWithHandle, and handle_alignment_ is used
413 // automatically for all GetMemoryWithHandle calls.
414 void* GetMemoryWithHandle(const size_t size, Handle* handle);
415
416 Status status_;
417 size_t remaining_;
418
419 private:
420 struct AllocatedBlock {
421 char *mem;
422 size_t size;
423 };
424
425 // The returned AllocatedBlock* is valid until the next call to AllocNewBlock
426 // or Reset (i.e. anything that might affect overflow_blocks_).
427 AllocatedBlock *AllocNewBlock(const size_t block_size);
428
429 const AllocatedBlock *IndexToBlock(int index) const;
430
431 const int first_block_we_own_; // 1 if they pass in 1st block, 0 else
432 const size_t block_size_;
433 char* freestart_; // beginning of the free space in most recent block
434 char* freestart_when_empty_; // beginning of the free space when we're empty
435 char* last_alloc_; // used to make sure ReturnBytes() is safe
436 // STL vector isn't as efficient as it could be, so we use an array at first
437 int blocks_alloced_; // how many of the first_blocks_ have been alloced
438 AllocatedBlock first_blocks_[16]; // the length of this array is arbitrary
439 // if the first_blocks_ aren't enough, expand into overflow_blocks_.
440 std::vector<AllocatedBlock>* overflow_blocks_;
441 const bool page_aligned_; // when true, all blocks need to be page aligned
442 int handle_alignment_; // Alignment to be used when Handles are requested.
443 int handle_alignment_bits_; // log2(handle_alignment_).
444 // The amount of bits required to keep block_size_ (ceil(log2(block_size_))).
445 size_t block_size_bits_;
446
447 void FreeBlocks(); // Frees all except first block
448
449 // This subclass needs to alter permissions for all allocated blocks.
450 friend class ProtectableUnsafeArena;
451
452 DISALLOW_COPY_AND_ASSIGN(BaseArena);
453};
454
455class CTEMPLATE_DLL_DECL UnsafeArena : public BaseArena {
456 public:
457 // Allocates a thread-compatible arena with the specified block size.
458 explicit UnsafeArena(const size_t block_size)
459 : BaseArena(NULL, block_size, false) { }
460 UnsafeArena(const size_t block_size, bool align)
461 : BaseArena(NULL, block_size, align) { }
462
463 // Allocates a thread-compatible arena with the specified block
464 // size. "first_block" must have size "block_size". Memory is
465 // allocated from "first_block" until it is exhausted; after that
466 // memory is allocated by allocating new blocks from the heap.
467 UnsafeArena(char* first_block, const size_t block_size)
468 : BaseArena(first_block, block_size, false) { }
469 UnsafeArena(char* first_block, const size_t block_size, bool align)
470 : BaseArena(first_block, block_size, align) { }
471
472 char* Alloc(const size_t size) {
473 return reinterpret_cast<char*>(GetMemory(size, 1));
474 }
475 void* AllocAligned(const size_t size, const int align) {
476 return GetMemory(size, align);
477 }
478 char* Calloc(const size_t size) {
479 void* return_value = Alloc(size);
480 memset(return_value, 0, size);
481 return reinterpret_cast<char*>(return_value);
482 }
483 void* CallocAligned(const size_t size, const int align) {
484 void* return_value = AllocAligned(size, align);
485 memset(return_value, 0, size);
486 return return_value;
487 }
488 // Free does nothing except for the last piece allocated.
489 void Free(void* memory, size_t size) {
490 ReturnMemory(memory, size);
491 }
492 typedef BaseArena::Handle Handle;
493 char* AllocWithHandle(const size_t size, Handle* handle) {
494 return reinterpret_cast<char*>(GetMemoryWithHandle(size, handle));
495 }
496 virtual char* SlowAlloc(size_t size) { // "slow" 'cause it's virtual
497 return Alloc(size);
498 }
499 virtual void SlowFree(void* memory, size_t size) { // "slow" 'cause it's virt
500 Free(memory, size);
501 }
502 virtual char* SlowRealloc(char* memory, size_t old_size, size_t new_size) {
503 return Realloc(memory, old_size, new_size);
504 }
505 virtual char* SlowAllocWithHandle(const size_t size, Handle* handle) {
506 return AllocWithHandle(size, handle);
507 }
508
509 char* Memdup(const char* s, size_t bytes) {
510 char* newstr = Alloc(bytes);
511 memcpy(newstr, s, bytes);
512 return newstr;
513 }
514 char* MemdupPlusNUL(const char* s, size_t bytes) { // like "string(s, len)"
515 char* newstr = Alloc(bytes+1);
516 memcpy(newstr, s, bytes);
517 newstr[bytes] = '\0';
518 return newstr;
519 }
520 Handle MemdupWithHandle(const char* s, size_t bytes) {
521 Handle handle;
522 char* newstr = AllocWithHandle(bytes, &handle);
523 memcpy(newstr, s, bytes);
524 return handle;
525 }
526 char* Strdup(const char* s) {
527 return Memdup(s, strlen(s) + 1);
528 }
529 // Unlike libc's strncpy, I always NUL-terminate. libc's semantics are dumb.
530 // This will allocate at most n+1 bytes (+1 is for the NULL terminator).
531 char* Strndup(const char* s, size_t n) {
532 // Use memchr so we don't walk past n.
533 // We can't use the one in //strings since this is the base library,
534 // so we have to reinterpret_cast from the libc void *.
535 const char* eos = reinterpret_cast<const char*>(memchr(s, '\0', n));
536 // if no null terminator found, use full n
537 const size_t bytes = (eos == NULL) ? n + 1 : eos - s + 1;
538 char* ret = Memdup(s, bytes);
539 ret[bytes-1] = '\0'; // make sure the string is NUL-terminated
540 return ret;
541 }
542
543 // You can realloc a previously-allocated string either bigger or smaller.
544 // We can be more efficient if you realloc a string right after you allocate
545 // it (eg allocate way-too-much space, fill it, realloc to just-big-enough)
546 char* Realloc(char* s, size_t oldsize, size_t newsize);
547 // If you know the new size is smaller (or equal), you don't need to know
548 // oldsize. We don't check that newsize is smaller, so you'd better be sure!
549 char* Shrink(char* s, size_t newsize) {
550 AdjustLastAlloc(s, newsize); // reclaim space if we can
551 return s; // never need to move if we go smaller
552 }
553
554 // We make a copy so you can keep track of status at a given point in time
555 Status status() const { return status_; }
556
557 // Number of bytes remaining before the arena has to allocate another block.
558 size_t bytes_until_next_allocation() const { return remaining_; }
559
560 private:
561 DISALLOW_COPY_AND_ASSIGN(UnsafeArena);
562};
563
564
565
566// we inherit from BaseArena instead of UnsafeArena so that we don't need
567// virtual methods for allocation/deallocation. This means, however,
568// I have to copy the definitions of strdup, strndup, etc. :-(
569
570class CTEMPLATE_DLL_DECL SafeArena : public BaseArena {
571 public:
572 // Allocates a thread-safe arena with the specified block size.
573 explicit SafeArena(const size_t block_size)
574 : BaseArena(NULL, block_size, false) { }
575
576 // Allocates a thread-safe arena with the specified block size.
577 // "first_block" must have size "block_size". Memory is allocated
578 // from "first_block" until it is exhausted; after that memory is
579 // allocated by allocating new blocks from the heap.
580 SafeArena(char* first_block, const size_t block_size)
581 : BaseArena(first_block, block_size, false) { }
582
583 virtual void Reset() LOCKS_EXCLUDED(mutex_) {
584 MutexLock lock(&mutex_); // in case two threads Reset() at same time
585 BaseArena::Reset();
586 }
587
588 char* Alloc(const size_t size) LOCKS_EXCLUDED(mutex_) {
589 MutexLock lock(&mutex_);
590 return reinterpret_cast<char*>(GetMemory(size, 1));
591 }
592 void* AllocAligned(const size_t size, const int align)
593 LOCKS_EXCLUDED(mutex_) {
594 MutexLock lock(&mutex_);
595 return GetMemory(size, align);
596 }
597 char* Calloc(const size_t size) {
598 void* return_value = Alloc(size);
599 memset(return_value, 0, size);
600 return reinterpret_cast<char*>(return_value);
601 }
602 void* CallocAligned(const size_t size, const int align) {
603 void* return_value = AllocAligned(size, align);
604 memset(return_value, 0, size);
605 return return_value;
606 }
607 // Free does nothing except for the last piece allocated.
608 void Free(void* memory, size_t size) LOCKS_EXCLUDED(mutex_) {
609 MutexLock lock(&mutex_);
610 ReturnMemory(memory, size);
611 }
612 typedef BaseArena::Handle Handle;
613 char* AllocWithHandle(const size_t size, Handle* handle)
614 LOCKS_EXCLUDED(mutex_) {
615 MutexLock lock(&mutex_);
616 return reinterpret_cast<char*>(GetMemoryWithHandle(size, handle));
617 }
618 virtual char* SlowAlloc(size_t size) { // "slow" 'cause it's virtual
619 return Alloc(size);
620 }
621 virtual void SlowFree(void* memory, size_t size) { // "slow" 'cause it's virt
622 Free(memory, size);
623 }
624 virtual char* SlowRealloc(char* memory, size_t old_size, size_t new_size) {
625 return Realloc(memory, old_size, new_size);
626 }
627 virtual char* SlowAllocWithHandle(const size_t size, Handle* handle) {
628 return AllocWithHandle(size, handle);
629 }
630
631 char* Memdup(const char* s, size_t bytes) {
632 char* newstr = Alloc(bytes);
633 memcpy(newstr, s, bytes);
634 return newstr;
635 }
636 char* MemdupPlusNUL(const char* s, size_t bytes) { // like "string(s, len)"
637 char* newstr = Alloc(bytes+1);
638 memcpy(newstr, s, bytes);
639 newstr[bytes] = '\0';
640 return newstr;
641 }
642 Handle MemdupWithHandle(const char* s, size_t bytes) {
643 Handle handle;
644 char* newstr = AllocWithHandle(bytes, &handle);
645 memcpy(newstr, s, bytes);
646 return handle;
647 }
648 char* Strdup(const char* s) {
649 return Memdup(s, strlen(s) + 1);
650 }
651 // Unlike libc's strncpy, I always NUL-terminate. libc's semantics are dumb.
652 // This will allocate at most n+1 bytes (+1 is for the NULL terminator).
653 char* Strndup(const char* s, size_t n) {
654 // Use memchr so we don't walk past n.
655 // We can't use the one in //strings since this is the base library,
656 // so we have to reinterpret_cast from the libc void *.
657 const char* eos = reinterpret_cast<const char*>(memchr(s, '\0', n));
658 // if no null terminator found, use full n
659 const size_t bytes = (eos == NULL) ? n + 1 : eos - s + 1;
660 char* ret = Memdup(s, bytes);
661 ret[bytes-1] = '\0'; // make sure the string is NUL-terminated
662 return ret;
663 }
664
665 // You can realloc a previously-allocated string either bigger or smaller.
666 // We can be more efficient if you realloc a string right after you allocate
667 // it (eg allocate way-too-much space, fill it, realloc to just-big-enough)
668 char* Realloc(char* s, size_t oldsize, size_t newsize)
669 LOCKS_EXCLUDED(mutex_);
670 // If you know the new size is smaller (or equal), you don't need to know
671 // oldsize. We don't check that newsize is smaller, so you'd better be sure!
672 char* Shrink(char* s, size_t newsize) LOCKS_EXCLUDED(mutex_) {
673 MutexLock lock(&mutex_);
674 AdjustLastAlloc(s, newsize); // reclaim space if we can
675 return s; // we never need to move if we go smaller
676 }
677
678 Status status() LOCKS_EXCLUDED(mutex_) {
679 MutexLock lock(&mutex_);
680 return status_;
681 }
682
683 // Number of bytes remaining before the arena has to allocate another block.
684 size_t bytes_until_next_allocation() LOCKS_EXCLUDED(mutex_) {
685 MutexLock lock(&mutex_);
686 return remaining_;
687 }
688
689 protected:
690 Mutex mutex_;
691
692 private:
693 DISALLOW_COPY_AND_ASSIGN(SafeArena);
694};
695
696}
697
698#endif // BASE_ARENA_H_