@@ -283,7 +283,7 @@ test_memzone_reserve_max(void)
/* align everything */
last_addr = RTE_PTR_ALIGN_CEIL(ms[memseg_idx].addr, RTE_CACHE_LINE_SIZE);
len = ms[memseg_idx].len - RTE_PTR_DIFF(last_addr, ms[memseg_idx].addr);
- len &= ~((size_t) CACHE_LINE_MASK);
+ len &= ~((size_t) RTE_CACHE_LINE_MASK);
/* cycle through all memzones */
for (memzone_idx = 0; memzone_idx < RTE_MAX_MEMZONE; memzone_idx++) {
@@ -376,7 +376,7 @@ test_memzone_reserve_max_aligned(void)
/* align everything */
last_addr = RTE_PTR_ALIGN_CEIL(ms[memseg_idx].addr, RTE_CACHE_LINE_SIZE);
len = ms[memseg_idx].len - RTE_PTR_DIFF(last_addr, ms[memseg_idx].addr);
- len &= ~((size_t) CACHE_LINE_MASK);
+ len &= ~((size_t) RTE_CACHE_LINE_MASK);
/* cycle through all memzones */
for (memzone_idx = 0; memzone_idx < RTE_MAX_MEMZONE; memzone_idx++) {
@@ -474,11 +474,11 @@ test_memzone_aligned(void)
printf("Unable to reserve 64-byte aligned memzone!\n");
return -1;
}
- if ((memzone_aligned_32->phys_addr & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_32->phys_addr & RTE_CACHE_LINE_MASK) != 0)
return -1;
- if (((uintptr_t) memzone_aligned_32->addr & CACHE_LINE_MASK) != 0)
+ if (((uintptr_t) memzone_aligned_32->addr & RTE_CACHE_LINE_MASK) != 0)
return -1;
- if ((memzone_aligned_32->len & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_32->len & RTE_CACHE_LINE_MASK) != 0)
return -1;
if (memzone_aligned_128 == NULL) {
@@ -489,7 +489,7 @@ test_memzone_aligned(void)
return -1;
if (((uintptr_t) memzone_aligned_128->addr & 127) != 0)
return -1;
- if ((memzone_aligned_128->len & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_128->len & RTE_CACHE_LINE_MASK) != 0)
return -1;
if (memzone_aligned_256 == NULL) {
@@ -500,7 +500,7 @@ test_memzone_aligned(void)
return -1;
if (((uintptr_t) memzone_aligned_256->addr & 255) != 0)
return -1;
- if ((memzone_aligned_256->len & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_256->len & RTE_CACHE_LINE_MASK) != 0)
return -1;
if (memzone_aligned_512 == NULL) {
@@ -511,7 +511,7 @@ test_memzone_aligned(void)
return -1;
if (((uintptr_t) memzone_aligned_512->addr & 511) != 0)
return -1;
- if ((memzone_aligned_512->len & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_512->len & RTE_CACHE_LINE_MASK) != 0)
return -1;
if (memzone_aligned_1024 == NULL) {
@@ -522,7 +522,7 @@ test_memzone_aligned(void)
return -1;
if (((uintptr_t) memzone_aligned_1024->addr & 1023) != 0)
return -1;
- if ((memzone_aligned_1024->len & CACHE_LINE_MASK) != 0)
+ if ((memzone_aligned_1024->len & RTE_CACHE_LINE_MASK) != 0)
return -1;
/* check that zones don't overlap */
@@ -588,7 +588,7 @@ check_memzone_bounded(const char *name, uint32_t len, uint32_t align,
return (-1);
}
- if ((mz->len & CACHE_LINE_MASK) != 0 || mz->len < len ||
+ if ((mz->len & RTE_CACHE_LINE_MASK) != 0 || mz->len < len ||
mz->len < RTE_CACHE_LINE_SIZE) {
printf("%s(%s): invalid length\n",
__func__, mz->name);
@@ -952,17 +952,17 @@ test_memzone(void)
/* check cache-line alignments */
printf("check alignments and lengths\n");
- if ((memzone1->phys_addr & CACHE_LINE_MASK) != 0)
+ if ((memzone1->phys_addr & RTE_CACHE_LINE_MASK) != 0)
return -1;
- if ((memzone2->phys_addr & CACHE_LINE_MASK) != 0)
+ if ((memzone2->phys_addr & RTE_CACHE_LINE_MASK) != 0)
return -1;
- if (memzone3 != NULL && (memzone3->phys_addr & CACHE_LINE_MASK) != 0)
+ if (memzone3 != NULL && (memzone3->phys_addr & RTE_CACHE_LINE_MASK) != 0)
return -1;
- if ((memzone1->len & CACHE_LINE_MASK) != 0 || memzone1->len == 0)
+ if ((memzone1->len & RTE_CACHE_LINE_MASK) != 0 || memzone1->len == 0)
return -1;
- if ((memzone2->len & CACHE_LINE_MASK) != 0 || memzone2->len == 0)
+ if ((memzone2->len & RTE_CACHE_LINE_MASK) != 0 || memzone2->len == 0)
return -1;
- if (memzone3 != NULL && ((memzone3->len & CACHE_LINE_MASK) != 0 ||
+ if (memzone3 != NULL && ((memzone3->len & RTE_CACHE_LINE_MASK) != 0 ||
memzone3->len == 0))
return -1;
if (memzone4->len != 1024)
@@ -181,7 +181,7 @@ rte_rdtsc(void)
*/
#define SOCKET_ID_ANY -1 /**< Any NUMA socket. */
#define RTE_CACHE_LINE_SIZE 64 /**< Cache line size. */
-#define CACHE_LINE_MASK (RTE_CACHE_LINE_SIZE-1) /**< Cache line mask. */
+#define RTE_CACHE_LINE_MASK (RTE_CACHE_LINE_SIZE-1) /**< Cache line mask. */
/**
* Force alignment to cache line.
@@ -450,7 +450,7 @@ rte_distributor_create(const char *name,
const struct rte_memzone *mz;
/* compilation-time checks */
- RTE_BUILD_BUG_ON((sizeof(*d) & CACHE_LINE_MASK) != 0);
+ RTE_BUILD_BUG_ON((sizeof(*d) & RTE_CACHE_LINE_MASK) != 0);
RTE_BUILD_BUG_ON((RTE_DISTRIB_MAX_WORKERS & 7) != 0);
RTE_BUILD_BUG_ON(RTE_DISTRIB_MAX_WORKERS >
sizeof(d->in_flight_bitmask) * CHAR_BIT);
@@ -169,13 +169,13 @@ memzone_reserve_aligned_thread_unsafe(const char *name, size_t len,
/* align length on cache boundary. Check for overflow before doing so */
- if (len > SIZE_MAX - CACHE_LINE_MASK) {
+ if (len > SIZE_MAX - RTE_CACHE_LINE_MASK) {
rte_errno = EINVAL; /* requested size too big */
return NULL;
}
- len += CACHE_LINE_MASK;
- len &= ~((size_t) CACHE_LINE_MASK);
+ len += RTE_CACHE_LINE_MASK;
+ len &= ~((size_t) RTE_CACHE_LINE_MASK);
/* save minimal requested length */
requested_len = RTE_MAX((size_t)RTE_CACHE_LINE_SIZE, len);
@@ -421,8 +421,8 @@ memseg_sanitize(struct rte_memseg *memseg)
unsigned virt_align;
unsigned off;
- phys_align = memseg->phys_addr & CACHE_LINE_MASK;
- virt_align = (unsigned long)memseg->addr & CACHE_LINE_MASK;
+ phys_align = memseg->phys_addr & RTE_CACHE_LINE_MASK;
+ virt_align = (unsigned long)memseg->addr & RTE_CACHE_LINE_MASK;
/*
* sanity check: phys_addr and addr must have the same
@@ -438,13 +438,13 @@ memseg_sanitize(struct rte_memseg *memseg)
}
/* align start address */
- off = (RTE_CACHE_LINE_SIZE - phys_align) & CACHE_LINE_MASK;
+ off = (RTE_CACHE_LINE_SIZE - phys_align) & RTE_CACHE_LINE_MASK;
memseg->phys_addr += off;
memseg->addr = (char *)memseg->addr + off;
memseg->len -= off;
/* align end address */
- memseg->len &= ~((uint64_t)CACHE_LINE_MASK);
+ memseg->len &= ~((uint64_t)RTE_CACHE_LINE_MASK);
return 0;
}
@@ -62,7 +62,7 @@ enum rte_page_sizes {
#ifndef RTE_CACHE_LINE_SIZE
#define RTE_CACHE_LINE_SIZE 64 /**< Cache line size. */
#endif
-#define CACHE_LINE_MASK (RTE_CACHE_LINE_SIZE-1) /**< Cache line mask. */
+#define RTE_CACHE_LINE_MASK (RTE_CACHE_LINE_SIZE-1) /**< Cache line mask. */
#define CACHE_LINE_ROUNDUP(size) \
(RTE_CACHE_LINE_SIZE * ((size + RTE_CACHE_LINE_SIZE - 1) / RTE_CACHE_LINE_SIZE))
@@ -114,7 +114,7 @@ static unsigned optimize_object_size(unsigned obj_size)
nrank = 1;
/* process new object size */
- new_obj_size = (obj_size + CACHE_LINE_MASK) / RTE_CACHE_LINE_SIZE;
+ new_obj_size = (obj_size + RTE_CACHE_LINE_MASK) / RTE_CACHE_LINE_SIZE;
while (get_gcd(new_obj_size, nrank * nchan) != 1)
new_obj_size++;
return new_obj_size * RTE_CACHE_LINE_SIZE;
@@ -270,8 +270,8 @@ rte_mempool_calc_obj_size(uint32_t elt_size, uint32_t flags,
sz->total_size = sz->header_size + sz->elt_size +
sz->trailer_size;
sz->trailer_size += ((RTE_CACHE_LINE_SIZE -
- (sz->total_size & CACHE_LINE_MASK)) &
- CACHE_LINE_MASK);
+ (sz->total_size & RTE_CACHE_LINE_MASK)) &
+ RTE_CACHE_LINE_MASK);
}
/*
@@ -418,18 +418,18 @@ rte_mempool_xmem_create(const char *name, unsigned n, unsigned elt_size,
/* compilation-time checks */
RTE_BUILD_BUG_ON((sizeof(struct rte_mempool) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#if RTE_MEMPOOL_CACHE_MAX_SIZE > 0
RTE_BUILD_BUG_ON((sizeof(struct rte_mempool_cache) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
RTE_BUILD_BUG_ON((offsetof(struct rte_mempool, local_cache) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#endif
#ifdef RTE_LIBRTE_MEMPOOL_DEBUG
RTE_BUILD_BUG_ON((sizeof(struct rte_mempool_debug_stats) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
RTE_BUILD_BUG_ON((offsetof(struct rte_mempool, stats) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#endif
/* check that we have an initialised tail queue */
@@ -489,7 +489,7 @@ rte_mempool_xmem_create(const char *name, unsigned n, unsigned elt_size,
* cache-aligned
*/
private_data_size = (private_data_size +
- CACHE_LINE_MASK) & (~CACHE_LINE_MASK);
+ RTE_CACHE_LINE_MASK) & (~RTE_CACHE_LINE_MASK);
if (! rte_eal_has_hugepages()) {
/*
@@ -120,18 +120,18 @@ rte_ring_init(struct rte_ring *r, const char *name, unsigned count,
{
/* compilation-time checks */
RTE_BUILD_BUG_ON((sizeof(struct rte_ring) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#ifdef RTE_RING_SPLIT_PROD_CONS
RTE_BUILD_BUG_ON((offsetof(struct rte_ring, cons) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#endif
RTE_BUILD_BUG_ON((offsetof(struct rte_ring, prod) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#ifdef RTE_LIBRTE_RING_DEBUG
RTE_BUILD_BUG_ON((sizeof(struct rte_ring_debug_stats) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
RTE_BUILD_BUG_ON((offsetof(struct rte_ring, stats) &
- CACHE_LINE_MASK) != 0);
+ RTE_CACHE_LINE_MASK) != 0);
#endif
/* init the ring structure */
@@ -249,7 +249,7 @@ rte_bitmap_init(uint32_t n_bits, uint8_t *mem, uint32_t mem_size)
return NULL;
}
- if ((mem == NULL) || (((uintptr_t) mem) & CACHE_LINE_MASK)) {
+ if ((mem == NULL) || (((uintptr_t) mem) & RTE_CACHE_LINE_MASK)) {
return NULL;
}