arena.c++ 5.7 KB
Newer Older
Kenton Varda's avatar
Kenton Varda committed
1 2
// Copyright (c) 2013-2014 Sandstorm Development Group, Inc. and contributors
// Licensed under the MIT License:
3
//
Kenton Varda's avatar
Kenton Varda committed
4 5 6 7 8 9
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
10
//
Kenton Varda's avatar
Kenton Varda committed
11 12
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
13
//
Kenton Varda's avatar
Kenton Varda committed
14 15 16 17 18 19 20
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
21 22 23 24 25 26 27

#include "arena.h"
#include "debug.h"
#include <stdint.h>

namespace kj {

28
Arena::Arena(size_t chunkSizeHint): nextChunkSize(kj::max(sizeof(ChunkHeader), chunkSizeHint)) {}
29 30

Arena::Arena(ArrayPtr<byte> scratch)
31
    : nextChunkSize(kj::max(sizeof(ChunkHeader), scratch.size())) {
32 33 34 35 36 37 38 39
  if (scratch.size() > sizeof(ChunkHeader)) {
    ChunkHeader* chunk = reinterpret_cast<ChunkHeader*>(scratch.begin());
    chunk->end = scratch.end();
    chunk->pos = reinterpret_cast<byte*>(chunk + 1);
    chunk->next = nullptr;  // Never actually observed.

    // Don't place the chunk in the chunk list because it's not ours to delete.  Just make it the
    // current chunk so that we'll allocate from it until it is empty.
40
    currentChunk = chunk;
41
  }
42 43 44
}

Arena::~Arena() noexcept(false) {
45 46 47 48 49
  // Run cleanup() explicitly, but if it throws an exception, make sure to run it again as part of
  // unwind.  The second call will not throw because destructors are required to guard against
  // exceptions when already unwinding.
  KJ_ON_SCOPE_FAILURE(cleanup());
  cleanup();
50 51
}

52
void Arena::cleanup() {
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69
  while (objectList != nullptr) {
    void* ptr = objectList + 1;
    auto destructor = objectList->destructor;
    objectList = objectList->next;
    destructor(ptr);
  }

  while (chunkList != nullptr) {
    void* ptr = chunkList;
    chunkList = chunkList->next;
    operator delete(ptr);
  }
}

namespace {

constexpr bool isPowerOfTwo(size_t value) {
Kenton Varda's avatar
Kenton Varda committed
70
  return (value & (value - 1)) == 0;
71 72 73 74 75 76 77 78 79 80 81
}

inline byte* alignTo(byte* p, uint alignment) {
  // Round the pointer up to the next aligned value.

  KJ_DASSERT(isPowerOfTwo(alignment), alignment);
  uintptr_t mask = alignment - 1;
  uintptr_t i = reinterpret_cast<uintptr_t>(p);
  return reinterpret_cast<byte*>((i + mask) & ~mask);
}

82 83
inline size_t alignTo(size_t s, uint alignment) {
  // Round the pointer up to the next aligned value.
84 85

  KJ_DASSERT(isPowerOfTwo(alignment), alignment);
86 87 88
  size_t mask = alignment - 1;
  return (s + mask) & ~mask;
}
89

90
}  // namespace
91

92
void* Arena::allocateBytes(size_t amount, uint alignment, bool hasDisposer) {
93
  if (hasDisposer) {
94 95
    alignment = kj::max(alignment, alignof(ObjectHeader));
    amount += alignTo(sizeof(ObjectHeader), alignment);
96 97
  }

98
  void* result = allocateBytesInternal(amount, alignment);
Kenton Varda's avatar
Kenton Varda committed
99 100 101 102 103 104 105 106 107 108

  if (hasDisposer) {
    // Reserve space for the ObjectHeader, but don't add it to the object list yet.
    result = alignTo(reinterpret_cast<byte*>(result) + sizeof(ObjectHeader), alignment);
  }

  KJ_DASSERT(reinterpret_cast<uintptr_t>(result) % alignment == 0);
  return result;
}

109 110 111 112
void* Arena::allocateBytesInternal(size_t amount, uint alignment) {
  if (currentChunk != nullptr) {
    ChunkHeader* chunk = currentChunk;
    byte* alignedPos = alignTo(chunk->pos, alignment);
113

114 115 116 117
    // Careful about overflow here.
    if (amount + (alignedPos - chunk->pos) <= chunk->end - chunk->pos) {
      // There's enough space in this chunk.
      chunk->pos = alignedPos + amount;
Kenton Varda's avatar
Kenton Varda committed
118
      return alignedPos;
119
    }
120
  }
121

122
  // Not enough space in the current chunk.  Allocate a new one.
Kenton Varda's avatar
Kenton Varda committed
123

124
  // We need to allocate at least enough space for the ChunkHeader and the requested allocation.
125

126
  // If the alignment is less than that of the chunk header, we'll need to increase it.
127
  alignment = kj::max(alignment, alignof(ChunkHeader));
128 129

  // If the ChunkHeader size does not match the alignment, we'll need to pad it up.
130 131
  amount += alignTo(sizeof(ChunkHeader), alignment);

132 133 134
  // Make sure we're going to allocate enough space.
  while (nextChunkSize < amount) {
    nextChunkSize *= 2;
135 136
  }

137 138
  // Allocate.
  byte* bytes = reinterpret_cast<byte*>(operator new(nextChunkSize));
139

140
  // Set up the ChunkHeader at the beginning of the allocation.
141
  ChunkHeader* newChunk = reinterpret_cast<ChunkHeader*>(bytes);
142
  newChunk->next = chunkList;
143
  newChunk->pos = bytes + amount;
144 145 146 147
  newChunk->end = bytes + nextChunkSize;
  currentChunk = newChunk;
  chunkList = newChunk;
  nextChunkSize *= 2;
148

149 150
  // Move past the ChunkHeader to find the position of the allocated object.
  return alignTo(bytes + sizeof(ChunkHeader), alignment);
151 152
}

153
StringPtr Arena::copyString(StringPtr content) {
154 155 156 157 158
  char* data = reinterpret_cast<char*>(allocateBytes(content.size() + 1, 1, false));
  memcpy(data, content.cStr(), content.size() + 1);
  return StringPtr(data, content.size());
}

159
void Arena::setDestructor(void* ptr, void (*destructor)(void*)) {
160
  ObjectHeader* header = reinterpret_cast<ObjectHeader*>(ptr) - 1;
161
  KJ_DASSERT(reinterpret_cast<uintptr_t>(header) % alignof(ObjectHeader) == 0);
162
  header->destructor = destructor;
163 164
  header->next = objectList;
  objectList = header;
165 166 167
}

}  // namespace kj