arena.h 8.6 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
// Copyright (c) 2013, Kenton Varda <temporal@gmail.com>
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this
//    list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
//    this list of conditions and the following disclaimer in the documentation
//    and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

#ifndef KJ_ARENA_H_
#define KJ_ARENA_H_

#include "memory.h"
#include "array.h"
#include "string.h"
30
#include "mutex.h"
31 32 33 34 35 36

namespace kj {

class Arena {
  // A class which allows several objects to be allocated in contiguous chunks of memory, then
  // frees them all at once.
37 38 39 40
  //
  // Allocating from the same Arena in multiple threads concurrently is safe but not particularly
  // performant due to contention.  The class could be optimized in the future to use per-thread
  // chunks to solve this.
41 42

public:
43 44 45 46
  explicit Arena(size_t chunkSizeHint = 1024);
  // Create an Arena.  `chunkSizeHint` hints at where to start when allocating chunks, but is only
  // a hint -- the Arena will, for example, allocate progressively larger chunks as time goes on,
  // in order to reduce overall allocation overhead.
47

48
  explicit Arena(ArrayPtr<byte> scratch);
49 50 51 52 53 54
  // Allocates from the given scratch space first, only resorting to the heap when it runs out.

  KJ_DISALLOW_COPY(Arena);
  ~Arena() noexcept(false);

  template <typename T, typename... Params>
55
  T& allocate(Params&&... params) const;
56
  template <typename T>
57
  ArrayPtr<T> allocateArray(size_t size) const;
58 59 60 61 62 63
  // Allocate an object or array of type T.  If T has a non-trivial destructor, that destructor
  // will be run during the Arena's destructor.  Such destructors are run in opposite order of
  // allocation.  Note that these methods must maintain a list of destructors to call, which has
  // overhead, but this overhead only applies if T has a non-trivial destructor.

  template <typename T, typename... Params>
64
  Own<T> allocateOwn(Params&&... params) const;
65
  template <typename T>
66
  Array<T> allocateOwnArray(size_t size) const;
67
  template <typename T>
68
  ArrayBuilder<T> allocateOwnArrayBuilder(size_t capacity) const;
69 70 71 72 73 74
  // Allocate an object or array of type T.  Destructors are executed when the returned Own<T>
  // or Array<T> goes out-of-scope, which must happen before the Arena is destroyed.  This variant
  // is useful when you need to control when the destructor is called.  This variant also avoids
  // the need for the Arena itself to keep track of destructors to call later, which may make it
  // slightly more efficient.

75
  template <typename T>
76
  inline T& copy(T&& value) const { return allocate<Decay<T>>(kj::fwd<T>(value)); }
77 78 79
  // Allocate a copy of the given value in the arena.  This is just a shortcut for calling the
  // type's copy (or move) constructor.

80
  StringPtr copyString(StringPtr content) const;
81 82 83 84 85
  // Make a copy of the given string inside the arena, and return a pointer to the copy.

private:
  struct ChunkHeader {
    ChunkHeader* next;
86 87
    byte* pos;  // first unallocated byte in this chunk
    byte* end;  // end of this chunk
88 89 90 91 92 93 94
  };
  struct ObjectHeader {
    void (*destructor)(void*);
    ObjectHeader* next;
  };

  struct State {
95
    size_t nextChunkSize;
96
    ChunkHeader* chunkList;
97
    mutable ObjectHeader* objectList;
98

99 100 101 102 103
    ChunkHeader* currentChunk;

    inline State(size_t nextChunkSize)
        : nextChunkSize(nextChunkSize), chunkList(nullptr),
          objectList(nullptr), currentChunk(nullptr) {}
104 105 106 107 108 109 110
    inline ~State() noexcept(false) { cleanup(); }

    void cleanup();
    // Run all destructors, leaving the above pointers null.  If a destructor throws, the State is
    // left in a consistent state, such that if cleanup() is called again, it will pick up where
    // it left off.
  };
111
  MutexGuarded<State> state;
112

113
  void* allocateBytes(size_t amount, uint alignment, bool hasDisposer) const;
114 115 116
  // Allocate the given number of bytes.  `hasDisposer` must be true if `setDisposer()` may be
  // called on this pointer later.

Kenton Varda's avatar
Kenton Varda committed
117 118 119 120
  void* allocateBytesLockless(size_t amount, uint alignment) const;
  // Try to allocate the given number of bytes without taking a lock.  Fails if and only if there
  // is no space left in the current chunk.

121 122 123 124
  void* allocateBytesFallback(size_t amount, uint alignment) const;
  // Fallback used when the current chunk is out of space.

  void setDestructor(void* ptr, void (*destructor)(void*)) const;
125 126 127 128 129 130 131
  // Schedule the given destructor to be executed when the Arena is destroyed.  `ptr` must be a
  // pointer previously returned by an `allocateBytes()` call for which `hasDisposer` was true.

  template <typename T>
  static void destroyArray(void* pointer) {
    size_t elementCount = *reinterpret_cast<size_t*>(pointer);
    constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t));
132
    DestructorOnlyArrayDisposer::instance.disposeImpl(
133 134 135 136 137 138 139 140 141 142 143 144 145 146
        reinterpret_cast<byte*>(pointer) + prefixSize,
        sizeof(T), elementCount, elementCount, &destroyObject<T>);
  }

  template <typename T>
  static void destroyObject(void* pointer) {
    dtor(*reinterpret_cast<T*>(pointer));
  }
};

// =======================================================================================
// Inline implementation details

template <typename T, typename... Params>
147
T& Arena::allocate(Params&&... params) const {
148 149 150 151 152 153 154 155 156 157 158 159
  T& result = *reinterpret_cast<T*>(allocateBytes(
      sizeof(T), alignof(T), !__has_trivial_destructor(T)));
  if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) {
    ctor(result, kj::fwd<Params>(params)...);
  }
  if (!__has_trivial_destructor(T)) {
    setDestructor(&result, &destroyObject<T>);
  }
  return result;
}

template <typename T>
160
ArrayPtr<T> Arena::allocateArray(size_t size) const {
161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195
  if (__has_trivial_destructor(T)) {
    ArrayPtr<T> result =
        arrayPtr(reinterpret_cast<T*>(allocateBytes(
            sizeof(T) * size, alignof(T), false)), size);
    if (!__has_trivial_constructor(T)) {
      for (size_t i = 0; i < size; i++) {
        ctor(result[i]);
      }
    }
    return result;
  } else {
    // Allocate with a 64-bit prefix in which we store the array size.
    constexpr size_t prefixSize = kj::max(alignof(T), sizeof(size_t));
    void* base = allocateBytes(sizeof(T) * size + prefixSize, alignof(T), true);
    size_t& tag = *reinterpret_cast<size_t*>(base);
    ArrayPtr<T> result =
        arrayPtr(reinterpret_cast<T*>(reinterpret_cast<byte*>(base) + prefixSize), size);
    setDestructor(base, &destroyArray<T>);

    if (__has_trivial_constructor(T)) {
      tag = size;
    } else {
      // In case of constructor exceptions, we need the tag to end up storing the number of objects
      // that were successfully constructed, so that they'll be properly destroyed.
      tag = 0;
      for (size_t i = 0; i < size; i++) {
        ctor(result[i]);
        tag = i + 1;
      }
    }
    return result;
  }
}

template <typename T, typename... Params>
196
Own<T> Arena::allocateOwn(Params&&... params) const {
197 198 199 200
  T& result = *reinterpret_cast<T*>(allocateBytes(sizeof(T), alignof(T), false));
  if (!__has_trivial_constructor(T) || sizeof...(Params) > 0) {
    ctor(result, kj::fwd<Params>(params)...);
  }
201
  return Own<T>(&result, DestructorOnlyDisposer<T>::instance);
202 203 204
}

template <typename T>
205
Array<T> Arena::allocateOwnArray(size_t size) const {
206 207 208 209 210 211 212 213
  ArrayBuilder<T> result = allocateOwnArrayBuilder<T>(size);
  for (size_t i = 0; i < size; i++) {
    result.add();
  }
  return result.finish();
}

template <typename T>
214
ArrayBuilder<T> Arena::allocateOwnArrayBuilder(size_t capacity) const {
215 216
  return ArrayBuilder<T>(
      reinterpret_cast<T*>(allocateBytes(sizeof(T) * capacity, alignof(T), false)),
217
      capacity, DestructorOnlyArrayDisposer::instance);
218 219 220 221 222
}

}  // namespace kj

#endif  // KJ_ARENA_H_