vec_validate_aligned (pm->lookup_table, vec_len (pm->pages) *
elts_per_page - 1, CLIB_CACHE_LINE_BYTES);
- p = first * elts_per_page;
+ p = (uword) first *elts_per_page;
if (pm->flags & CLIB_PMALLOC_F_NO_PAGEMAP)
{
while (p < (uword) elts_per_page * count)
if ((pm->flags & CLIB_PMALLOC_F_NO_PAGEMAP) == 0)
mmap_flags |= MAP_LOCKED;
- if (a->log2_subpage_sz != pm->sys_log2_page_sz)
- mmap_flags |= MAP_HUGETLB | MAP_LOCKED;
-
if (a->flags & CLIB_PMALLOC_ARENA_F_SHARED_MEM)
{
mmap_flags |= MAP_SHARED;
- if (mmap_flags & MAP_HUGETLB)
+ if (a->log2_subpage_sz != pm->sys_log2_page_sz)
pm->error = clib_mem_create_hugetlb_fd ((char *) a->name, &a->fd);
else
pm->error = clib_mem_create_fd ((char *) a->name, &a->fd);
}
else
{
+ if (a->log2_subpage_sz != pm->sys_log2_page_sz)
+ mmap_flags |= MAP_HUGETLB;
+
mmap_flags |= MAP_PRIVATE | MAP_ANONYMOUS;
a->fd = -1;
}
return 0;
}
- return pm->base + (pp->index << pm->def_log2_page_sz);
+ return pm->base + ((uword) pp->index << pm->def_log2_page_sz);
}
static inline void *
int verbose = va_arg (*va, int);
u32 indent = format_get_indent (s);
- s = format (s, "page %u: phys-addr %p ", pp->index, pp->pa);
-
if (pp->chunks == 0)
return s;
return s;
}
+u8 *
+format_pmalloc_map (u8 * s, va_list * va)
+{
+ clib_pmalloc_main_t *pm = va_arg (*va, clib_pmalloc_main_t *);
+
+ u32 index;
+ s = format (s, "%16s %13s %8s", "virtual-addr", "physical-addr", "size");
+ vec_foreach_index (index, pm->lookup_table)
+ {
+ uword *lookup_val, pa, va;
+ lookup_val = vec_elt_at_index (pm->lookup_table, index);
+ va =
+ pointer_to_uword (pm->base) +
+ ((uword) index << pm->lookup_log2_page_sz);
+ pa = va - *lookup_val;
+ s =
+ format (s, "\n %16p %13p %8U", uword_to_pointer (va, u64),
+ uword_to_pointer (pa, u64), format_log2_page_size,
+ pm->lookup_log2_page_sz);
+ }
+ return s;
+}
+
/*
* fd.io coding-style-patch-verification: ON
*