return;
}
- while (__sync_lock_test_and_set (&h->lock, 1))
+ while (clib_atomic_test_and_set (&h->lock))
;
h->owner_cpu = my_cpu;
clib_mem_vm_map (h, sizeof (h[0]));
/* Zero vector header: both heap header and vector length. */
- memset (h, 0, sizeof (h[0]));
+ clib_memset (h, 0, sizeof (h[0]));
_vec_len (v) = 0;
h->vm_alloc_offset_from_header = (void *) h - memory;
(clib_address_t) v, h->max_size);
/* Initialize free list heads to empty. */
- memset (h->first_free_elt_uoffset_by_bin, 0xFF,
- sizeof (h->first_free_elt_uoffset_by_bin));
+ clib_memset (h->first_free_elt_uoffset_by_bin, 0xFF,
+ sizeof (h->first_free_elt_uoffset_by_bin));
return v;
}
return mheap_alloc_with_flags (memory, size, flags);
}
+void *
+mheap_alloc_with_lock (void *memory, uword size, int locked)
+{
+ uword flags = 0;
+ void *rv;
+
+ if (memory != 0)
+ flags |= MHEAP_FLAG_DISABLE_VM;
+
+#ifdef CLIB_HAVE_VEC128
+ flags |= MHEAP_FLAG_SMALL_OBJECT_CACHE;
+#endif
+
+ rv = mheap_alloc_with_flags (memory, size, flags);
+
+ if (rv && locked)
+ {
+ mheap_t *h = mheap_header (rv);
+ h->flags |= MHEAP_FLAG_THREAD_SAFE;
+ }
+ return rv;
+}
+
void *
_mheap_free (void *v)
{
mheap_elt_t *e;
uword i, n_hist;
- memset (hist, 0, sizeof (hist));
+ clib_memset (hist, 0, sizeof (hist));
n_hist = 0;
for (e = v;
mheap_trace_t trace;
/* Spurious Coverity warnings be gone. */
- memset (&trace, 0, sizeof (trace));
+ clib_memset (&trace, 0, sizeof (trace));
n_callers = clib_backtrace (trace.callers, ARRAY_LEN (trace.callers),
/* Skip mheap_get_aligned's frame */ 1);
{
hash_unset_mem (tm->trace_by_callers, t->callers);
vec_add1 (tm->trace_free_list, trace_index);
- memset (t, 0, sizeof (t[0]));
+ clib_memset (t, 0, sizeof (t[0]));
}
}