aboutsummaryrefslogtreecommitdiffstats
path: root/src/vppinfra/mem_dlmalloc.c
diff options
context:
space:
mode:
Diffstat (limited to 'src/vppinfra/mem_dlmalloc.c')
-rw-r--r--src/vppinfra/mem_dlmalloc.c360
1 files changed, 290 insertions, 70 deletions
diff --git a/src/vppinfra/mem_dlmalloc.c b/src/vppinfra/mem_dlmalloc.c
index e2a0f71e084..a188164a7ba 100644
--- a/src/vppinfra/mem_dlmalloc.c
+++ b/src/vppinfra/mem_dlmalloc.c
@@ -19,7 +19,6 @@
#include <vppinfra/lock.h>
#include <vppinfra/hash.h>
#include <vppinfra/elf_clib.h>
-#include <vppinfra/sanitizer.h>
typedef struct
{
@@ -39,7 +38,6 @@ typedef struct
typedef struct
{
clib_spinlock_t lock;
- uword enabled;
mheap_trace_t *traces;
@@ -53,22 +51,24 @@ typedef struct
uword *trace_index_by_offset;
/* So we can easily shut off current segment trace, if any */
- void *current_traced_mheap;
+ const clib_mem_heap_t *current_traced_mheap;
} mheap_trace_main_t;
mheap_trace_main_t mheap_trace_main;
-void
-mheap_get_trace (uword offset, uword size)
+static __thread int mheap_trace_thread_disable;
+
+static void
+mheap_get_trace_internal (const clib_mem_heap_t *heap, uword offset,
+ uword size)
{
mheap_trace_main_t *tm = &mheap_trace_main;
mheap_trace_t *t;
uword i, n_callers, trace_index, *p;
mheap_trace_t trace;
- uword save_enabled;
- if (tm->enabled == 0 || (clib_mem_get_heap () != tm->current_traced_mheap))
+ if (heap != tm->current_traced_mheap || mheap_trace_thread_disable)
return;
/* Spurious Coverity warnings be gone. */
@@ -76,9 +76,12 @@ mheap_get_trace (uword offset, uword size)
clib_spinlock_lock (&tm->lock);
- /* Turn off tracing to avoid embarrassment... */
- save_enabled = tm->enabled;
- tm->enabled = 0;
+ /* heap could have changed while we were waiting on the lock */
+ if (heap != tm->current_traced_mheap)
+ goto out;
+
+ /* Turn off tracing for this thread to avoid embarrassment... */
+ mheap_trace_thread_disable = 1;
/* Skip our frame and mspace_get_aligned's frame */
n_callers = clib_backtrace (trace.callers, ARRAY_LEN (trace.callers), 2);
@@ -101,7 +104,7 @@ mheap_get_trace (uword offset, uword size)
if (i > 0)
{
trace_index = tm->trace_free_list[i - 1];
- _vec_len (tm->trace_free_list) = i - 1;
+ vec_set_len (tm->trace_free_list, i - 1);
}
else
{
@@ -114,14 +117,12 @@ mheap_get_trace (uword offset, uword size)
{
hash_pair_t *p;
mheap_trace_t *q;
- /* *INDENT-OFF* */
hash_foreach_pair (p, tm->trace_by_callers,
({
q = uword_to_pointer (p->key, mheap_trace_t *);
ASSERT (q >= old_start && q < old_end);
p->key = pointer_to_uword (tm->traces + (q - old_start));
}));
- /* *INDENT-ON* */
}
trace_index = t - tm->traces;
}
@@ -139,34 +140,33 @@ mheap_get_trace (uword offset, uword size)
hash_set (tm->trace_index_by_offset, offset, t - tm->traces);
out:
- tm->enabled = save_enabled;
+ mheap_trace_thread_disable = 0;
clib_spinlock_unlock (&tm->lock);
}
-void
-mheap_put_trace (uword offset, uword size)
+static void
+mheap_put_trace_internal (const clib_mem_heap_t *heap, uword offset,
+ uword size)
{
mheap_trace_t *t;
uword trace_index, *p;
mheap_trace_main_t *tm = &mheap_trace_main;
- uword save_enabled;
- if (tm->enabled == 0)
+ if (heap != tm->current_traced_mheap || mheap_trace_thread_disable)
return;
clib_spinlock_lock (&tm->lock);
- /* Turn off tracing for a moment */
- save_enabled = tm->enabled;
- tm->enabled = 0;
+ /* heap could have changed while we were waiting on the lock */
+ if (heap != tm->current_traced_mheap)
+ goto out;
+
+ /* Turn off tracing for this thread for a moment */
+ mheap_trace_thread_disable = 1;
p = hash_get (tm->trace_index_by_offset, offset);
if (!p)
- {
- tm->enabled = save_enabled;
- clib_spinlock_unlock (&tm->lock);
- return;
- }
+ goto out;
trace_index = p[0];
hash_unset (tm->trace_index_by_offset, offset);
@@ -183,17 +183,34 @@ mheap_put_trace (uword offset, uword size)
vec_add1 (tm->trace_free_list, trace_index);
clib_memset (t, 0, sizeof (t[0]));
}
- tm->enabled = save_enabled;
+
+out:
+ mheap_trace_thread_disable = 0;
clib_spinlock_unlock (&tm->lock);
}
+void
+mheap_get_trace (uword offset, uword size)
+{
+ mheap_get_trace_internal (clib_mem_get_heap (), offset, size);
+}
+
+void
+mheap_put_trace (uword offset, uword size)
+{
+ mheap_put_trace_internal (clib_mem_get_heap (), offset, size);
+}
+
always_inline void
mheap_trace_main_free (mheap_trace_main_t * tm)
{
+ CLIB_SPINLOCK_ASSERT_LOCKED (&tm->lock);
+ tm->current_traced_mheap = 0;
vec_free (tm->traces);
vec_free (tm->trace_free_list);
hash_free (tm->trace_by_callers);
hash_free (tm->trace_index_by_offset);
+ mheap_trace_thread_disable = 0;
}
static clib_mem_heap_t *
@@ -235,7 +252,7 @@ clib_mem_create_heap_internal (void *base, uword size,
mspace_disable_expand (h->mspace);
- CLIB_MEM_POISON (mspace_least_addr (h->mspace),
+ clib_mem_poison (mspace_least_addr (h->mspace),
mspace_footprint (h->mspace));
return h;
@@ -257,7 +274,14 @@ clib_mem_init_internal (void *base, uword size,
clib_mem_set_heap (h);
if (mheap_trace_main.lock == 0)
- clib_spinlock_init (&mheap_trace_main.lock);
+ {
+ /* clib_spinlock_init() dynamically allocates the spinlock in the current
+ * per-cpu heap, but it is used for all traces accross all heaps and
+ * hence we can't really allocate it in the current per-cpu heap as it
+ * could be destroyed later */
+ static struct clib_spinlock_s mheap_trace_main_lock = {};
+ mheap_trace_main.lock = &mheap_trace_main_lock;
+ }
return h;
}
@@ -288,13 +312,12 @@ clib_mem_destroy (void)
{
mheap_trace_main_t *tm = &mheap_trace_main;
clib_mem_heap_t *heap = clib_mem_get_heap ();
- void *base = mspace_least_addr (heap->mspace);
- if (tm->enabled && heap->mspace == tm->current_traced_mheap)
- tm->enabled = 0;
+ if (heap->mspace == tm->current_traced_mheap)
+ mheap_trace (heap, 0);
destroy_mspace (heap->mspace);
- clib_mem_vm_unmap (base);
+ clib_mem_vm_unmap (heap);
}
__clib_export u8 *
@@ -357,6 +380,7 @@ format_mheap_trace (u8 * s, va_list * va)
int verbose = va_arg (*va, int);
int have_traces = 0;
int i;
+ int n = 0;
clib_spinlock_lock (&tm->lock);
if (vec_len (tm->traces) > 0 &&
@@ -383,9 +407,10 @@ format_mheap_trace (u8 * s, va_list * va)
total_objects_traced += t->n_allocations;
- /* When not verbose only report allocations of more than 1k. */
- if (!verbose && t->n_bytes < 1024)
+ /* When not verbose only report the 50 biggest allocations */
+ if (!verbose && n >= 50)
continue;
+ n++;
if (t == traces_copy)
s = format (s, "%=9s%=9s %=10s Traceback\n", "Bytes", "Count",
@@ -464,13 +489,13 @@ format_clib_mem_heap (u8 * s, va_list * va)
format_white_space, indent + 2, format_msize, mi.usmblks);
}
- if (mspace_is_traced (heap->mspace))
+ if (heap->flags & CLIB_MEM_HEAP_F_TRACED)
s = format (s, "\n%U", format_mheap_trace, tm, verbose);
return s;
}
-__clib_export void
-clib_mem_get_heap_usage (clib_mem_heap_t * heap, clib_mem_usage_t * usage)
+__clib_export __clib_flatten void
+clib_mem_get_heap_usage (clib_mem_heap_t *heap, clib_mem_usage_t *usage)
{
struct dlmallinfo mi = mspace_mallinfo (heap->mspace);
@@ -493,42 +518,50 @@ uword clib_mem_validate_serial = 0;
__clib_export void
mheap_trace (clib_mem_heap_t * h, int enable)
{
- (void) mspace_enable_disable_trace (h->mspace, enable);
+ mheap_trace_main_t *tm = &mheap_trace_main;
+
+ clib_spinlock_lock (&tm->lock);
+
+ if (tm->current_traced_mheap != 0 && tm->current_traced_mheap != h)
+ {
+ clib_warning ("tracing already enabled for another heap, ignoring");
+ goto out;
+ }
- if (enable == 0)
- mheap_trace_main_free (&mheap_trace_main);
+ if (enable)
+ {
+ h->flags |= CLIB_MEM_HEAP_F_TRACED;
+ tm->current_traced_mheap = h;
+ }
+ else
+ {
+ h->flags &= ~CLIB_MEM_HEAP_F_TRACED;
+ mheap_trace_main_free (&mheap_trace_main);
+ }
+
+out:
+ clib_spinlock_unlock (&tm->lock);
}
__clib_export void
clib_mem_trace (int enable)
{
- mheap_trace_main_t *tm = &mheap_trace_main;
void *current_heap = clib_mem_get_heap ();
-
- tm->enabled = enable;
mheap_trace (current_heap, enable);
-
- if (enable)
- tm->current_traced_mheap = current_heap;
- else
- tm->current_traced_mheap = 0;
}
int
clib_mem_is_traced (void)
{
clib_mem_heap_t *h = clib_mem_get_heap ();
- return mspace_is_traced (h->mspace);
+ return (h->flags &= CLIB_MEM_HEAP_F_TRACED) != 0;
}
__clib_export uword
clib_mem_trace_enable_disable (uword enable)
{
- uword rv;
- mheap_trace_main_t *tm = &mheap_trace_main;
-
- rv = tm->enabled;
- tm->enabled = enable;
+ uword rv = !mheap_trace_thread_disable;
+ mheap_trace_thread_disable = !enable;
return rv;
}
@@ -567,37 +600,224 @@ clib_mem_destroy_heap (clib_mem_heap_t * h)
{
mheap_trace_main_t *tm = &mheap_trace_main;
- if (tm->enabled && h->mspace == tm->current_traced_mheap)
- tm->enabled = 0;
+ if (h->mspace == tm->current_traced_mheap)
+ mheap_trace (h, 0);
destroy_mspace (h->mspace);
if (h->flags & CLIB_MEM_HEAP_F_UNMAP_ON_DESTROY)
clib_mem_vm_unmap (h->base);
}
-__clib_export uword
-clib_mem_get_heap_free_space (clib_mem_heap_t * h)
+__clib_export __clib_flatten uword
+clib_mem_get_heap_free_space (clib_mem_heap_t *h)
{
struct dlmallinfo dlminfo = mspace_mallinfo (h->mspace);
return dlminfo.fordblks;
}
-__clib_export void *
-clib_mem_get_heap_base (clib_mem_heap_t * h)
+__clib_export __clib_flatten void *
+clib_mem_get_heap_base (clib_mem_heap_t *h)
{
return h->base;
}
-__clib_export uword
-clib_mem_get_heap_size (clib_mem_heap_t * heap)
+__clib_export __clib_flatten uword
+clib_mem_get_heap_size (clib_mem_heap_t *heap)
{
return heap->size;
}
-/*
- * fd.io coding-style-patch-verification: ON
- *
- * Local Variables:
- * eval: (c-set-style "gnu")
- * End:
- */
+/* Memory allocator which may call os_out_of_memory() if it fails */
+static inline void *
+clib_mem_heap_alloc_inline (void *heap, uword size, uword align,
+ int os_out_of_memory_on_failure)
+{
+ clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
+ void *p;
+
+ align = clib_max (CLIB_MEM_MIN_ALIGN, align);
+
+ p = mspace_memalign (h->mspace, align, size);
+
+ if (PREDICT_FALSE (0 == p))
+ {
+ if (os_out_of_memory_on_failure)
+ os_out_of_memory ();
+ return 0;
+ }
+
+ if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
+ mheap_get_trace_internal (h, pointer_to_uword (p), clib_mem_size (p));
+
+ clib_mem_unpoison (p, size);
+ return p;
+}
+
+/* Memory allocator which calls os_out_of_memory() when it fails */
+__clib_export __clib_flatten void *
+clib_mem_alloc (uword size)
+{
+ return clib_mem_heap_alloc_inline (0, size, CLIB_MEM_MIN_ALIGN,
+ /* os_out_of_memory */ 1);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_alloc_aligned (uword size, uword align)
+{
+ return clib_mem_heap_alloc_inline (0, size, align,
+ /* os_out_of_memory */ 1);
+}
+
+/* Memory allocator which calls os_out_of_memory() when it fails */
+__clib_export __clib_flatten void *
+clib_mem_alloc_or_null (uword size)
+{
+ return clib_mem_heap_alloc_inline (0, size, CLIB_MEM_MIN_ALIGN,
+ /* os_out_of_memory */ 0);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_alloc_aligned_or_null (uword size, uword align)
+{
+ return clib_mem_heap_alloc_inline (0, size, align,
+ /* os_out_of_memory */ 0);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_alloc (void *heap, uword size)
+{
+ return clib_mem_heap_alloc_inline (heap, size, CLIB_MEM_MIN_ALIGN,
+ /* os_out_of_memory */ 1);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_alloc_aligned (void *heap, uword size, uword align)
+{
+ return clib_mem_heap_alloc_inline (heap, size, align,
+ /* os_out_of_memory */ 1);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_alloc_or_null (void *heap, uword size)
+{
+ return clib_mem_heap_alloc_inline (heap, size, CLIB_MEM_MIN_ALIGN,
+ /* os_out_of_memory */ 0);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_alloc_aligned_or_null (void *heap, uword size, uword align)
+{
+ return clib_mem_heap_alloc_inline (heap, size, align,
+ /* os_out_of_memory */ 0);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_realloc_aligned (void *heap, void *p, uword new_size,
+ uword align)
+{
+ uword old_alloc_size;
+ clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
+ void *new;
+
+ ASSERT (count_set_bits (align) == 1);
+
+ old_alloc_size = p ? mspace_usable_size (p) : 0;
+
+ if (new_size == old_alloc_size)
+ return p;
+
+ if (p && pointer_is_aligned (p, align) &&
+ mspace_realloc_in_place (h->mspace, p, new_size))
+ {
+ clib_mem_unpoison (p, new_size);
+ if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
+ {
+ mheap_put_trace_internal (h, pointer_to_uword (p), old_alloc_size);
+ mheap_get_trace_internal (h, pointer_to_uword (p),
+ clib_mem_size (p));
+ }
+ }
+ else
+ {
+ new = clib_mem_heap_alloc_inline (h, new_size, align, 1);
+
+ clib_mem_unpoison (new, new_size);
+ if (old_alloc_size)
+ {
+ clib_mem_unpoison (p, old_alloc_size);
+ clib_memcpy_fast (new, p, clib_min (new_size, old_alloc_size));
+ clib_mem_heap_free (h, p);
+ }
+ p = new;
+ }
+
+ return p;
+}
+
+__clib_export __clib_flatten void *
+clib_mem_heap_realloc (void *heap, void *p, uword new_size)
+{
+ return clib_mem_heap_realloc_aligned (heap, p, new_size, CLIB_MEM_MIN_ALIGN);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_realloc_aligned (void *p, uword new_size, uword align)
+{
+ return clib_mem_heap_realloc_aligned (0, p, new_size, align);
+}
+
+__clib_export __clib_flatten void *
+clib_mem_realloc (void *p, uword new_size)
+{
+ return clib_mem_heap_realloc_aligned (0, p, new_size, CLIB_MEM_MIN_ALIGN);
+}
+
+__clib_export __clib_flatten uword
+clib_mem_heap_is_heap_object (void *heap, void *p)
+{
+ clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
+ return mspace_is_heap_object (h->mspace, p);
+}
+
+__clib_export __clib_flatten uword
+clib_mem_is_heap_object (void *p)
+{
+ return clib_mem_heap_is_heap_object (0, p);
+}
+
+__clib_export __clib_flatten void
+clib_mem_heap_free (void *heap, void *p)
+{
+ clib_mem_heap_t *h = heap ? heap : clib_mem_get_per_cpu_heap ();
+ uword size = clib_mem_size (p);
+
+ /* Make sure object is in the correct heap. */
+ ASSERT (clib_mem_heap_is_heap_object (h, p));
+
+ if (PREDICT_FALSE (h->flags & CLIB_MEM_HEAP_F_TRACED))
+ mheap_put_trace_internal (h, pointer_to_uword (p), size);
+ clib_mem_poison (p, clib_mem_size (p));
+
+ mspace_free (h->mspace, p);
+}
+
+__clib_export __clib_flatten void
+clib_mem_free (void *p)
+{
+ clib_mem_heap_free (0, p);
+}
+
+__clib_export __clib_flatten uword
+clib_mem_size (void *p)
+{
+ return mspace_usable_size (p);
+}
+
+__clib_export void
+clib_mem_free_s (void *p)
+{
+ uword size = clib_mem_size (p);
+ clib_mem_unpoison (p, size);
+ memset_s_inline (p, size, 0, size);
+ clib_mem_free (p);
+}