@@ -37,12 +37,12 @@
/* reference count offset in mbuf rearm data */
#define REFCNT_BITS_OFFSET ((offsetof(struct rte_mbuf, refcnt) - \
- offsetof(struct rte_mbuf, rearm_data)) * BYTE_SIZE)
+ offsetof(struct rte_mbuf, mbuf_rearm_data)) * BYTE_SIZE)
#ifdef CC_AVX512_SUPPORT
/* segment number offset in mbuf rearm data */
#define SEG_NUM_BITS_OFFSET ((offsetof(struct rte_mbuf, nb_segs) - \
- offsetof(struct rte_mbuf, rearm_data)) * BYTE_SIZE)
+ offsetof(struct rte_mbuf, mbuf_rearm_data)) * BYTE_SIZE)
/* default rearm data */
#define DEFAULT_REARM_DATA (1ULL << SEG_NUM_BITS_OFFSET | \
1ULL << REFCNT_BITS_OFFSET)
@@ -36,10 +36,10 @@
/* Load four mbufs rearm data */
RTE_BUILD_BUG_ON(REFCNT_BITS_OFFSET >= 64);
RTE_BUILD_BUG_ON(SEG_NUM_BITS_OFFSET >= 64);
- __m256i mbufs = _mm256_set_epi64x(*tx_pkts[3]->rearm_data,
- *tx_pkts[2]->rearm_data,
- *tx_pkts[1]->rearm_data,
- *tx_pkts[0]->rearm_data);
+ __m256i mbufs = _mm256_set_epi64x(*(uint64_t *)tx_pkts[3]->mbuf_rearm_data,
+ *(uint64_t *)tx_pkts[2]->mbuf_rearm_data,
+ *(uint64_t *)tx_pkts[1]->mbuf_rearm_data,
+ *(uint64_t *)tx_pkts[0]->mbuf_rearm_data);
/* refcnt=1 and nb_segs=1 */
__m256i mbuf_ref = _mm256_set1_epi64x(DEFAULT_REARM_DATA);
@@ -54,7 +54,7 @@
/* Check headroom is enough */
const __mmask16 data_mask = 0x1 | 0x1 << 4 | 0x1 << 8 | 0x1 << 12;
RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_off) !=
- offsetof(struct rte_mbuf, rearm_data));
+ offsetof(struct rte_mbuf, mbuf_rearm_data));
cmp = _mm256_mask_cmplt_epu16_mask(data_mask, mbufs, head_rooms);
if (unlikely(cmp))
return -1;
@@ -187,7 +187,7 @@
rx_pkts[i] = (struct rte_mbuf *)vq->vq_descx[id + i].cookie;
rte_packet_prefetch(rte_pktmbuf_mtod(rx_pkts[i], void *));
- addrs[i] = (uintptr_t)rx_pkts[i]->rx_descriptor_fields1;
+ addrs[i] = (uintptr_t)rx_pkts[i]->mbuf_rx_descriptor_fields1;
}
/*
@@ -205,7 +205,7 @@
/* assert offset of data_len */
RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_len) !=
- offsetof(struct rte_mbuf, rx_descriptor_fields1) + 8);
+ offsetof(struct rte_mbuf, mbuf_rx_descriptor_fields1) + 8);
__m512i v_index = _mm512_set_epi64(addrs[3] + 8, addrs[3],
addrs[2] + 8, addrs[2],
@@ -59,10 +59,10 @@
uint8x16x2_t mbuf;
/* Load four mbufs rearm data. */
RTE_BUILD_BUG_ON(REFCNT_BITS_OFFSET >= 64);
- pkts[0] = vld1_u16((uint16_t *)&tx_pkts[0]->rearm_data);
- pkts[1] = vld1_u16((uint16_t *)&tx_pkts[1]->rearm_data);
- pkts[2] = vld1_u16((uint16_t *)&tx_pkts[2]->rearm_data);
- pkts[3] = vld1_u16((uint16_t *)&tx_pkts[3]->rearm_data);
+ pkts[0] = vld1_u16((uint16_t *)&tx_pkts[0]->mbuf_rearm_data);
+ pkts[1] = vld1_u16((uint16_t *)&tx_pkts[1]->mbuf_rearm_data);
+ pkts[2] = vld1_u16((uint16_t *)&tx_pkts[2]->mbuf_rearm_data);
+ pkts[3] = vld1_u16((uint16_t *)&tx_pkts[3]->mbuf_rearm_data);
mbuf.val[0] = vreinterpretq_u8_u16(vcombine_u16(pkts[0], pkts[1]));
mbuf.val[1] = vreinterpretq_u8_u16(vcombine_u16(pkts[2], pkts[3]));
@@ -78,7 +78,7 @@
/* Check headroom is enough. */
uint16x4_t head_rooms = vdup_n_u16(head_size);
RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_off) !=
- offsetof(struct rte_mbuf, rearm_data));
+ offsetof(struct rte_mbuf, mbuf_rearm_data));
uint16x4_t data_offset = vreinterpret_u16_u8(vqtbl2_u8(mbuf, data_msk));
uint64x1_t cmp2 = vreinterpret_u64_u16(vclt_u16(data_offset, head_rooms));
if (unlikely(vget_lane_u64(cmp2, 0)))
@@ -263,10 +263,10 @@
pkt_mb[3] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[3]), len_adjust));
- vst1q_u64((void *)&rx_pkts[0]->rx_descriptor_fields1, pkt_mb[0]);
- vst1q_u64((void *)&rx_pkts[1]->rx_descriptor_fields1, pkt_mb[1]);
- vst1q_u64((void *)&rx_pkts[2]->rx_descriptor_fields1, pkt_mb[2]);
- vst1q_u64((void *)&rx_pkts[3]->rx_descriptor_fields1, pkt_mb[3]);
+ vst1q_u64((void *)&rx_pkts[0]->mbuf_rx_descriptor_fields1, pkt_mb[0]);
+ vst1q_u64((void *)&rx_pkts[1]->mbuf_rx_descriptor_fields1, pkt_mb[1]);
+ vst1q_u64((void *)&rx_pkts[2]->mbuf_rx_descriptor_fields1, pkt_mb[2]);
+ vst1q_u64((void *)&rx_pkts[3]->mbuf_rx_descriptor_fields1, pkt_mb[3]);
if (hw->has_rx_offload) {
virtio_for_each_try_unroll(i, 0, PACKED_BATCH_SIZE) {
@@ -39,9 +39,9 @@
mb_def.port = vq->hw->port_id;
rte_mbuf_refcnt_set(&mb_def, 1);
- /* prevent compiler reordering: rearm_data covers previous fields */
+ /* prevent compiler reordering: mbuf_rearm_data covers previous fields */
rte_compiler_barrier();
- p = (uintptr_t)&mb_def.rearm_data;
+ p = (uintptr_t)&mb_def.mbuf_rearm_data;
rxq->mbuf_initializer = *(uint64_t *)p;
return 0;
@@ -41,7 +41,7 @@
for (i = 0; i < RTE_VIRTIO_VPMD_RX_REARM_THRESH; i++) {
uintptr_t p;
- p = (uintptr_t)&sw_ring[i]->rearm_data;
+ p = (uintptr_t)&sw_ring[i]->mbuf_rearm_data;
*(uint64_t *)p = rxvq->mbuf_initializer;
start_dp[i].addr = VIRTIO_MBUF_ADDR(sw_ring[i], vq) +
@@ -138,9 +138,9 @@
((__vector unsigned short)pkt_mb[0] + len_adjust);
pkt_mb[1] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[1] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[0]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[0]->mbuf_rx_descriptor_fields1 =
pkt_mb[0];
- *(__vector unsigned char *)&rx_pkts[1]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[1]->mbuf_rx_descriptor_fields1 =
pkt_mb[1];
pkt_mb[2] = vec_perm(desc[1], zero, shuf_msk1);
@@ -149,9 +149,9 @@
((__vector unsigned short)pkt_mb[2] + len_adjust);
pkt_mb[3] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[3] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[2]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[2]->mbuf_rx_descriptor_fields1 =
pkt_mb[2];
- *(__vector unsigned char *)&rx_pkts[3]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[3]->mbuf_rx_descriptor_fields1 =
pkt_mb[3];
pkt_mb[4] = vec_perm(desc[2], zero, shuf_msk1);
@@ -160,9 +160,9 @@
((__vector unsigned short)pkt_mb[4] + len_adjust);
pkt_mb[5] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[5] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[4]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[4]->mbuf_rx_descriptor_fields1 =
pkt_mb[4];
- *(__vector unsigned char *)&rx_pkts[5]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[5]->mbuf_rx_descriptor_fields1 =
pkt_mb[5];
pkt_mb[6] = vec_perm(desc[3], zero, shuf_msk1);
@@ -171,9 +171,9 @@
((__vector unsigned short)pkt_mb[6] + len_adjust);
pkt_mb[7] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[7] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[6]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[6]->mbuf_rx_descriptor_fields1 =
pkt_mb[6];
- *(__vector unsigned char *)&rx_pkts[7]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)&rx_pkts[7]->mbuf_rx_descriptor_fields1 =
pkt_mb[7];
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {
@@ -138,9 +138,9 @@
vreinterpretq_u16_u64(pkt_mb[1]), len_adjust));
pkt_mb[0] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[0]), len_adjust));
- vst1q_u64((void *)&rx_pkts[1]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[1]->mbuf_rx_descriptor_fields1,
pkt_mb[1]);
- vst1q_u64((void *)&rx_pkts[0]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[0]->mbuf_rx_descriptor_fields1,
pkt_mb[0]);
pkt_mb[3] = vreinterpretq_u64_u8(vqtbl1q_u8(
@@ -151,9 +151,9 @@
vreinterpretq_u16_u64(pkt_mb[3]), len_adjust));
pkt_mb[2] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[2]), len_adjust));
- vst1q_u64((void *)&rx_pkts[3]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[3]->mbuf_rx_descriptor_fields1,
pkt_mb[3]);
- vst1q_u64((void *)&rx_pkts[2]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[2]->mbuf_rx_descriptor_fields1,
pkt_mb[2]);
pkt_mb[5] = vreinterpretq_u64_u8(vqtbl1q_u8(
@@ -164,9 +164,9 @@
vreinterpretq_u16_u64(pkt_mb[5]), len_adjust));
pkt_mb[4] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[4]), len_adjust));
- vst1q_u64((void *)&rx_pkts[5]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[5]->mbuf_rx_descriptor_fields1,
pkt_mb[5]);
- vst1q_u64((void *)&rx_pkts[4]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[4]->mbuf_rx_descriptor_fields1,
pkt_mb[4]);
pkt_mb[7] = vreinterpretq_u64_u8(vqtbl1q_u8(
@@ -177,9 +177,9 @@
vreinterpretq_u16_u64(pkt_mb[7]), len_adjust));
pkt_mb[6] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[6]), len_adjust));
- vst1q_u64((void *)&rx_pkts[7]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[7]->mbuf_rx_descriptor_fields1,
pkt_mb[7]);
- vst1q_u64((void *)&rx_pkts[6]->rx_descriptor_fields1,
+ vst1q_u64((void *)&rx_pkts[6]->mbuf_rx_descriptor_fields1,
pkt_mb[6]);
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {
@@ -134,36 +134,36 @@
pkt_mb[0] = _mm_shuffle_epi8(desc[0], shuf_msk1);
pkt_mb[1] = _mm_add_epi16(pkt_mb[1], len_adjust);
pkt_mb[0] = _mm_add_epi16(pkt_mb[0], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[1]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[1]->mbuf_rx_descriptor_fields1,
pkt_mb[1]);
- _mm_storeu_si128((void *)&rx_pkts[0]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[0]->mbuf_rx_descriptor_fields1,
pkt_mb[0]);
pkt_mb[3] = _mm_shuffle_epi8(desc[1], shuf_msk2);
pkt_mb[2] = _mm_shuffle_epi8(desc[1], shuf_msk1);
pkt_mb[3] = _mm_add_epi16(pkt_mb[3], len_adjust);
pkt_mb[2] = _mm_add_epi16(pkt_mb[2], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[3]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[3]->mbuf_rx_descriptor_fields1,
pkt_mb[3]);
- _mm_storeu_si128((void *)&rx_pkts[2]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[2]->mbuf_rx_descriptor_fields1,
pkt_mb[2]);
pkt_mb[5] = _mm_shuffle_epi8(desc[2], shuf_msk2);
pkt_mb[4] = _mm_shuffle_epi8(desc[2], shuf_msk1);
pkt_mb[5] = _mm_add_epi16(pkt_mb[5], len_adjust);
pkt_mb[4] = _mm_add_epi16(pkt_mb[4], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[5]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[5]->mbuf_rx_descriptor_fields1,
pkt_mb[5]);
- _mm_storeu_si128((void *)&rx_pkts[4]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[4]->mbuf_rx_descriptor_fields1,
pkt_mb[4]);
pkt_mb[7] = _mm_shuffle_epi8(desc[3], shuf_msk2);
pkt_mb[6] = _mm_shuffle_epi8(desc[3], shuf_msk1);
pkt_mb[7] = _mm_add_epi16(pkt_mb[7], len_adjust);
pkt_mb[6] = _mm_add_epi16(pkt_mb[6], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[7]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[7]->mbuf_rx_descriptor_fields1,
pkt_mb[7]);
- _mm_storeu_si128((void *)&rx_pkts[6]->rx_descriptor_fields1,
+ _mm_storeu_si128((void *)&rx_pkts[6]->mbuf_rx_descriptor_fields1,
pkt_mb[6]);
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {