@@ -37,12 +37,12 @@
/* reference count offset in mbuf rearm data */
#define REFCNT_BITS_OFFSET ((offsetof(struct rte_mbuf, refcnt) - \
- offsetof(struct rte_mbuf, rearm_data)) * BYTE_SIZE)
+ offsetof(struct rte_mbuf, data_off)) * BYTE_SIZE)
#ifdef CC_AVX512_SUPPORT
/* segment number offset in mbuf rearm data */
#define SEG_NUM_BITS_OFFSET ((offsetof(struct rte_mbuf, nb_segs) - \
- offsetof(struct rte_mbuf, rearm_data)) * BYTE_SIZE)
+ offsetof(struct rte_mbuf, data_off)) * BYTE_SIZE)
/* default rearm data */
#define DEFAULT_REARM_DATA (1ULL << SEG_NUM_BITS_OFFSET | \
1ULL << REFCNT_BITS_OFFSET)
@@ -36,10 +36,10 @@
/* Load four mbufs rearm data */
RTE_BUILD_BUG_ON(REFCNT_BITS_OFFSET >= 64);
RTE_BUILD_BUG_ON(SEG_NUM_BITS_OFFSET >= 64);
- __m256i mbufs = _mm256_set_epi64x(*tx_pkts[3]->rearm_data,
- *tx_pkts[2]->rearm_data,
- *tx_pkts[1]->rearm_data,
- *tx_pkts[0]->rearm_data);
+ __m256i mbufs = _mm256_set_epi64x(*rte_mbuf_rearm_data(tx_pkts[3]),
+ *rte_mbuf_rearm_data(tx_pkts[2]),
+ *rte_mbuf_rearm_data(tx_pkts[1]),
+ *rte_mbuf_rearm_data(tx_pkts[0]));
/* refcnt=1 and nb_segs=1 */
__m256i mbuf_ref = _mm256_set1_epi64x(DEFAULT_REARM_DATA);
@@ -53,8 +53,6 @@
/* Check headroom is enough */
const __mmask16 data_mask = 0x1 | 0x1 << 4 | 0x1 << 8 | 0x1 << 12;
- RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_off) !=
- offsetof(struct rte_mbuf, rearm_data));
cmp = _mm256_mask_cmplt_epu16_mask(data_mask, mbufs, head_rooms);
if (unlikely(cmp))
return -1;
@@ -187,7 +185,7 @@
rx_pkts[i] = (struct rte_mbuf *)vq->vq_descx[id + i].cookie;
rte_packet_prefetch(rte_pktmbuf_mtod(rx_pkts[i], void *));
- addrs[i] = (uintptr_t)rx_pkts[i]->rx_descriptor_fields1;
+ addrs[i] = (uintptr_t)rte_mbuf_rx_descriptor_fields1(rx_pkts[i]);
}
/*
@@ -203,10 +201,6 @@
__m512i v_value = _mm512_add_epi32(values, mbuf_len_offset);
- /* assert offset of data_len */
- RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_len) !=
- offsetof(struct rte_mbuf, rx_descriptor_fields1) + 8);
-
__m512i v_index = _mm512_set_epi64(addrs[3] + 8, addrs[3],
addrs[2] + 8, addrs[2],
addrs[1] + 8, addrs[1],
@@ -59,10 +59,10 @@
uint8x16x2_t mbuf;
/* Load four mbufs rearm data. */
RTE_BUILD_BUG_ON(REFCNT_BITS_OFFSET >= 64);
- pkts[0] = vld1_u16((uint16_t *)&tx_pkts[0]->rearm_data);
- pkts[1] = vld1_u16((uint16_t *)&tx_pkts[1]->rearm_data);
- pkts[2] = vld1_u16((uint16_t *)&tx_pkts[2]->rearm_data);
- pkts[3] = vld1_u16((uint16_t *)&tx_pkts[3]->rearm_data);
+ pkts[0] = vld1_u16((uint16_t *)rte_mbuf_rearm_data(tx_pkts[0]));
+ pkts[1] = vld1_u16((uint16_t *)rte_mbuf_rearm_data(tx_pkts[1]));
+ pkts[2] = vld1_u16((uint16_t *)rte_mbuf_rearm_data(tx_pkts[2]));
+ pkts[3] = vld1_u16((uint16_t *)rte_mbuf_rearm_data(tx_pkts[3]));
mbuf.val[0] = vreinterpretq_u8_u16(vcombine_u16(pkts[0], pkts[1]));
mbuf.val[1] = vreinterpretq_u8_u16(vcombine_u16(pkts[2], pkts[3]));
@@ -77,8 +77,6 @@
/* Check headroom is enough. */
uint16x4_t head_rooms = vdup_n_u16(head_size);
- RTE_BUILD_BUG_ON(offsetof(struct rte_mbuf, data_off) !=
- offsetof(struct rte_mbuf, rearm_data));
uint16x4_t data_offset = vreinterpret_u16_u8(vqtbl2_u8(mbuf, data_msk));
uint64x1_t cmp2 = vreinterpret_u64_u16(vclt_u16(data_offset, head_rooms));
if (unlikely(vget_lane_u64(cmp2, 0)))
@@ -263,10 +261,10 @@
pkt_mb[3] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[3]), len_adjust));
- vst1q_u64((void *)&rx_pkts[0]->rx_descriptor_fields1, pkt_mb[0]);
- vst1q_u64((void *)&rx_pkts[1]->rx_descriptor_fields1, pkt_mb[1]);
- vst1q_u64((void *)&rx_pkts[2]->rx_descriptor_fields1, pkt_mb[2]);
- vst1q_u64((void *)&rx_pkts[3]->rx_descriptor_fields1, pkt_mb[3]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[0]), pkt_mb[0]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[1]), pkt_mb[1]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[2]), pkt_mb[2]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[3]), pkt_mb[3]);
if (hw->has_rx_offload) {
virtio_for_each_try_unroll(i, 0, PACKED_BATCH_SIZE) {
@@ -31,7 +31,6 @@
virtio_rxq_vec_setup(struct virtnet_rx *rxq)
{
struct virtqueue *vq = virtnet_rxq_to_vq(rxq);
- uintptr_t p;
struct rte_mbuf mb_def = { .buf_addr = 0 }; /* zeroed mbuf */
mb_def.nb_segs = 1;
@@ -41,8 +40,7 @@
/* prevent compiler reordering: rearm_data covers previous fields */
rte_compiler_barrier();
- p = (uintptr_t)&mb_def.rearm_data;
- rxq->mbuf_initializer = *(uint64_t *)p;
+ rxq->mbuf_initializer = *rte_mbuf_rearm_data(&mb_def);
return 0;
}
@@ -39,10 +39,7 @@
}
for (i = 0; i < RTE_VIRTIO_VPMD_RX_REARM_THRESH; i++) {
- uintptr_t p;
-
- p = (uintptr_t)&sw_ring[i]->rearm_data;
- *(uint64_t *)p = rxvq->mbuf_initializer;
+ *rte_mbuf_rearm_data(sw_ring[i]) = rxvq->mbuf_initializer;
start_dp[i].addr = VIRTIO_MBUF_ADDR(sw_ring[i], vq) +
RTE_PKTMBUF_HEADROOM - vq->hw->vtnet_hdr_size;
@@ -138,9 +138,9 @@
((__vector unsigned short)pkt_mb[0] + len_adjust);
pkt_mb[1] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[1] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[0]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[0]) =
pkt_mb[0];
- *(__vector unsigned char *)&rx_pkts[1]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[1]) =
pkt_mb[1];
pkt_mb[2] = vec_perm(desc[1], zero, shuf_msk1);
@@ -149,9 +149,9 @@
((__vector unsigned short)pkt_mb[2] + len_adjust);
pkt_mb[3] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[3] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[2]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[2]) =
pkt_mb[2];
- *(__vector unsigned char *)&rx_pkts[3]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[3]) =
pkt_mb[3];
pkt_mb[4] = vec_perm(desc[2], zero, shuf_msk1);
@@ -160,9 +160,9 @@
((__vector unsigned short)pkt_mb[4] + len_adjust);
pkt_mb[5] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[5] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[4]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[4]) =
pkt_mb[4];
- *(__vector unsigned char *)&rx_pkts[5]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[5]) =
pkt_mb[5];
pkt_mb[6] = vec_perm(desc[3], zero, shuf_msk1);
@@ -171,9 +171,9 @@
((__vector unsigned short)pkt_mb[6] + len_adjust);
pkt_mb[7] = (__vector unsigned char)
((__vector unsigned short)pkt_mb[7] + len_adjust);
- *(__vector unsigned char *)&rx_pkts[6]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[6]) =
pkt_mb[6];
- *(__vector unsigned char *)&rx_pkts[7]->rx_descriptor_fields1 =
+ *(__vector unsigned char *)rte_mbuf_rx_descriptor_fields1(rx_pkts[7]) =
pkt_mb[7];
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {
@@ -138,10 +138,8 @@
vreinterpretq_u16_u64(pkt_mb[1]), len_adjust));
pkt_mb[0] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[0]), len_adjust));
- vst1q_u64((void *)&rx_pkts[1]->rx_descriptor_fields1,
- pkt_mb[1]);
- vst1q_u64((void *)&rx_pkts[0]->rx_descriptor_fields1,
- pkt_mb[0]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[1]), pkt_mb[1]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[0]), pkt_mb[0]);
pkt_mb[3] = vreinterpretq_u64_u8(vqtbl1q_u8(
vreinterpretq_u8_u64(desc[1]), shuf_msk2));
@@ -151,10 +149,8 @@
vreinterpretq_u16_u64(pkt_mb[3]), len_adjust));
pkt_mb[2] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[2]), len_adjust));
- vst1q_u64((void *)&rx_pkts[3]->rx_descriptor_fields1,
- pkt_mb[3]);
- vst1q_u64((void *)&rx_pkts[2]->rx_descriptor_fields1,
- pkt_mb[2]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[3]), pkt_mb[3]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[2]), pkt_mb[2]);
pkt_mb[5] = vreinterpretq_u64_u8(vqtbl1q_u8(
vreinterpretq_u8_u64(desc[2]), shuf_msk2));
@@ -164,10 +160,8 @@
vreinterpretq_u16_u64(pkt_mb[5]), len_adjust));
pkt_mb[4] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[4]), len_adjust));
- vst1q_u64((void *)&rx_pkts[5]->rx_descriptor_fields1,
- pkt_mb[5]);
- vst1q_u64((void *)&rx_pkts[4]->rx_descriptor_fields1,
- pkt_mb[4]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[5]), pkt_mb[5]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[4]), pkt_mb[4]);
pkt_mb[7] = vreinterpretq_u64_u8(vqtbl1q_u8(
vreinterpretq_u8_u64(desc[3]), shuf_msk2));
@@ -177,10 +171,8 @@
vreinterpretq_u16_u64(pkt_mb[7]), len_adjust));
pkt_mb[6] = vreinterpretq_u64_u16(vsubq_u16(
vreinterpretq_u16_u64(pkt_mb[6]), len_adjust));
- vst1q_u64((void *)&rx_pkts[7]->rx_descriptor_fields1,
- pkt_mb[7]);
- vst1q_u64((void *)&rx_pkts[6]->rx_descriptor_fields1,
- pkt_mb[6]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[7]), pkt_mb[7]);
+ vst1q_u64(rte_mbuf_rx_descriptor_fields1(rx_pkts[6]), pkt_mb[6]);
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {
if (sw_ring + nb_used <= sw_ring_end)
@@ -134,36 +134,36 @@
pkt_mb[0] = _mm_shuffle_epi8(desc[0], shuf_msk1);
pkt_mb[1] = _mm_add_epi16(pkt_mb[1], len_adjust);
pkt_mb[0] = _mm_add_epi16(pkt_mb[0], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[1]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[1]),
pkt_mb[1]);
- _mm_storeu_si128((void *)&rx_pkts[0]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[0]),
pkt_mb[0]);
pkt_mb[3] = _mm_shuffle_epi8(desc[1], shuf_msk2);
pkt_mb[2] = _mm_shuffle_epi8(desc[1], shuf_msk1);
pkt_mb[3] = _mm_add_epi16(pkt_mb[3], len_adjust);
pkt_mb[2] = _mm_add_epi16(pkt_mb[2], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[3]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[3]),
pkt_mb[3]);
- _mm_storeu_si128((void *)&rx_pkts[2]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[2]),
pkt_mb[2]);
pkt_mb[5] = _mm_shuffle_epi8(desc[2], shuf_msk2);
pkt_mb[4] = _mm_shuffle_epi8(desc[2], shuf_msk1);
pkt_mb[5] = _mm_add_epi16(pkt_mb[5], len_adjust);
pkt_mb[4] = _mm_add_epi16(pkt_mb[4], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[5]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[5]),
pkt_mb[5]);
- _mm_storeu_si128((void *)&rx_pkts[4]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[4]),
pkt_mb[4]);
pkt_mb[7] = _mm_shuffle_epi8(desc[3], shuf_msk2);
pkt_mb[6] = _mm_shuffle_epi8(desc[3], shuf_msk1);
pkt_mb[7] = _mm_add_epi16(pkt_mb[7], len_adjust);
pkt_mb[6] = _mm_add_epi16(pkt_mb[6], len_adjust);
- _mm_storeu_si128((void *)&rx_pkts[7]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[7]),
pkt_mb[7]);
- _mm_storeu_si128((void *)&rx_pkts[6]->rx_descriptor_fields1,
+ _mm_storeu_si128(rte_mbuf_rx_descriptor_fields1(rx_pkts[6]),
pkt_mb[6]);
if (unlikely(nb_used <= RTE_VIRTIO_DESC_PER_LOOP)) {