memory_allocation.c 6.73 KB
Newer Older
1
/*
Marcus Winter committed
2
 * Copyright (c) 2014-2016, Siemens AG. All rights reserved.
3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions are met:
 *
 * 1. Redistributions of source code must retain the above copyright notice,
 * this list of conditions and the following disclaimer.
 *
 * 2. Redistributions in binary form must reproduce the above copyright notice,
 * this list of conditions and the following disclaimer in the documentation
 * and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 * POSSIBILITY OF SUCH DAMAGE.
 */

#include <embb/base/c/memory_allocation.h>
#include <embb/base/c/internal/config.h>
#include <embb/base/c/atomic.h>
#include <embb/base/c/internal/unused.h>
31
#include <stdlib.h>
32 33 34 35
#include <assert.h>

#ifdef EMBB_DEBUG

36
static EMBB_BASE_BASIC_TYPE_SIZE_4 embb_bytes_allocated = 0;
37 38 39 40 41 42 43 44 45 46 47 48 49 50 51

enum {
  // Make the marking unlikely to be something else
  INVALID_ALLOCATION = 0x91919191,
  ALIGNED_ALLOCATION = 0x99AABB11,
  UNALIGNED_ALLOCATION = 0x11AABB99
};

void* embb_alloc(size_t bytes) {
  size_t bytes_to_allocate = 2 * sizeof(size_t)+bytes;
  void* allocated = malloc(bytes_to_allocate);

  if (allocated == NULL)
    return NULL;

52
  embb_internal__atomic_fetch_and_add_4(
53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
    &embb_bytes_allocated, (long)bytes_to_allocate);

  size_t* x_as_size_type = (size_t*)allocated;

  x_as_size_type[1] = UNALIGNED_ALLOCATION;
  x_as_size_type[0] = (size_t)bytes_to_allocate;

  // Return the pointer to the payload
  return (void*)((size_t*)allocated + 2);
}

void embb_free(void * ptr) {
  assert(ptr != NULL);

  size_t * alloc_type = (size_t*)ptr - 1;
  size_t * bytes_allocated = (size_t*)ptr - 2;

  // Check whether this memory was allocated unaligned
  assert((*alloc_type) == UNALIGNED_ALLOCATION);

  (*alloc_type) = (size_t)INVALID_ALLOCATION;

75
  embb_internal__atomic_fetch_and_add_4(
76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129
    &embb_bytes_allocated, (long)(0 - (size_t)(*bytes_allocated)));

  free((size_t*)ptr - 2);
}

void* embb_alloc_aligned(size_t alignment, size_t size) {
  // In debug mode, we count the number of allocated bytes to be able to detect
  // memory leaks. For that purpose, we allocate more memory than necessary for
  // the payload. In the extra memory before the payload, we store a pointer to
  // the original allocated block, a flag which indicates whether the allocation
  // was aligned, and the number of allocated bytes. This requires additional
  // 3*sizeof(size_t) bytes of memory (additional_bytes).
  // To get at least n aligned sections and additional_bytes before, we need
  // the following number of bytes:
  // (n+1)*alignment + (additional_bytes-1)

  // n specifies the number of alignment blocks we need for the payload
  size_t n = (size + (alignment - 1)) / alignment;

  // additional_bytes specifies the number of bytes we need to store in addition
  // to the payload
  size_t additional_bytes = sizeof(size_t) * 3;

  size_t bytes_to_allocate = (n + 1)*alignment + (additional_bytes - 1);

  char* allocated = (char *)malloc(bytes_to_allocate);

  if (allocated == NULL)
    return NULL;

  // Get the next aligned pointer
  char* x = (char*)(((uintptr_t)(allocated+alignment)) & ~(alignment - 1));

  // If we do not have enough space before, get the next allocated position
  while ((x-allocated) < (long)additional_bytes)
    x += alignment;

  // Now check if our allocation algorithm worked correctly
  // 1. Enough space for the payload
  assert((size_t)((allocated + bytes_to_allocate) - x) >= size);

  // 2. x is aligned
  assert(((size_t)x % alignment) == 0);

  // 3. Enough space for the additional information
  assert((x - additional_bytes) >= allocated);

  // x is now the first aligned position (this is the return value)
  size_t* x_as_size_type = (size_t*)x;

  x_as_size_type[-1] = (size_t)ALIGNED_ALLOCATION;
  x_as_size_type[-2] = (size_t)allocated;
  x_as_size_type[-3] = bytes_to_allocate;

130
  embb_internal__atomic_fetch_and_add_4(
131 132 133 134 135 136 137
      &embb_bytes_allocated, (long)bytes_to_allocate);

  return x;
}

void embb_free_aligned(void* ptr) {
  assert(ptr != NULL);
138

139 140 141 142 143 144 145 146
  size_t* ptr_conv = (size_t*)ptr;

  // If embb_free_aligned is called, the memory block should have been allocated
  // using embb_alloc_aligned.
  assert(ptr_conv[-1] == ALIGNED_ALLOCATION);

  ptr_conv[-1] = (size_t)INVALID_ALLOCATION;

147
  embb_internal__atomic_fetch_and_add_4(
148 149 150 151 152 153
    &embb_bytes_allocated, (long)((long)0 - ptr_conv[-3]));

  free((void*)ptr_conv[-2]);
}

size_t embb_get_bytes_allocated() {
154
  return (size_t)(embb_internal__atomic_load_4(&embb_bytes_allocated));
155 156 157 158 159 160 161 162 163 164 165 166 167 168 169
}

#else // EMBB_DEBUG

void * embb_alloc(size_t bytes) {
  return malloc(bytes);
}

void embb_free(void * ptr) {
  assert(ptr != NULL);
  free(ptr);
}

void *embb_alloc_aligned(size_t alignment, size_t size) {
  void* malloc_addr = NULL;
170
#ifdef EMBB_PLATFORM_COMPILER_MSVC
171 172 173 174 175 176 177 178 179
  /*
   * From the Documentation:
   * Allocates memory on a specified alignment boundary.
   *
   * Return:
   * A pointer to the memory block that was allocated or NULL if the operation
   * failed. The pointer is a multiple of alignment.
   */
  malloc_addr = _aligned_malloc(size, alignment);
180
#elif defined EMBB_PLATFORM_COMPILER_GNUC
181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196
  /*
   * From the Documentation:
   * The posix_memalign() function shall allocate size bytes aligned on a
   * boundary specified by alignment, and shall return a pointer to the
   * allocated memory in memptr. The value of alignment shall be a multiple
   * of sizeof(void *), that is also a power of two. Upon successful
   * completion, the value pointed to by memptr shall be a multiple of
   * alignment.
   */
  int status = posix_memalign(&malloc_addr, alignment, size);
  EMBB_UNUSED(status);
#endif
  return malloc_addr;
}

void embb_free_aligned(void* ptr) {
197
  assert(ptr != NULL);
198

199
#ifdef EMBB_PLATFORM_COMPILER_MSVC
200 201
  _aligned_free(ptr);
#else
202
#ifdef EMBB_PLATFORM_COMPILER_GNUC
203 204 205 206 207 208 209 210 211 212 213 214 215 216
  free(ptr);
#else
#error Unsupported compiler
#endif
#endif
}

size_t embb_get_bytes_allocated() {
  return 0;
}

#endif

void *embb_alloc_cache_aligned(size_t size) {
217
  return embb_alloc_aligned(EMBB_PLATFORM_CACHE_LINE_SIZE, size);
218
}