[v2,31/45] baseband/acc: use rte stdatomic API

Message ID 1711048652-7512-32-git-send-email-roretzla@linux.microsoft.com (mailing list archive)
State Superseded, archived
Delegated to: Thomas Monjalon
Headers
Series use stdatomic API |

Checks

Context Check Description
ci/checkpatch success coding style OK

Commit Message

Tyler Retzlaff March 21, 2024, 7:17 p.m. UTC
  Replace the use of gcc builtin __atomic_xxx intrinsics with
corresponding rte_atomic_xxx optional rte stdatomic API.

Signed-off-by: Tyler Retzlaff <roretzla@linux.microsoft.com>
---
 drivers/baseband/acc/rte_acc100_pmd.c | 36 +++++++++++++--------------
 drivers/baseband/acc/rte_vrb_pmd.c    | 46 +++++++++++++++++++++++------------
 2 files changed, 48 insertions(+), 34 deletions(-)
  

Patch

diff --git a/drivers/baseband/acc/rte_acc100_pmd.c b/drivers/baseband/acc/rte_acc100_pmd.c
index 4f666e5..ee50b9c 100644
--- a/drivers/baseband/acc/rte_acc100_pmd.c
+++ b/drivers/baseband/acc/rte_acc100_pmd.c
@@ -3673,8 +3673,8 @@ 
 
 	desc_idx = acc_desc_idx_tail(q, *dequeued_descs);
 	desc = q->ring_addr + desc_idx;
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+			rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3728,8 +3728,8 @@ 
 	uint16_t current_dequeued_descs = 0, descs_in_tb;
 
 	desc = acc_desc_tail(q, *dequeued_descs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+			rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3742,8 +3742,8 @@ 
 	/* Check if last CB in TB is ready to dequeue (and thus
 	 * the whole TB) - checking sdone bit. If not return.
 	 */
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)last_desc,
+			rte_memory_order_relaxed);
 	if (!(atom_desc.rsp.val & ACC_SDONE))
 		return -1;
 
@@ -3755,8 +3755,8 @@ 
 
 	while (i < descs_in_tb) {
 		desc = acc_desc_tail(q, *dequeued_descs);
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-				__ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+				rte_memory_order_relaxed);
 		rsp.val = atom_desc.rsp.val;
 		rte_bbdev_log_debug("Resp. desc %p: %x descs %d cbs %d\n",
 				desc, rsp.val, descs_in_tb, desc->req.numCBs);
@@ -3793,8 +3793,8 @@ 
 	struct rte_bbdev_dec_op *op;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+			rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3846,8 +3846,8 @@ 
 	struct rte_bbdev_dec_op *op;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+			rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3902,8 +3902,8 @@ 
 	uint8_t cbs_in_tb = 1, cb_idx = 0;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+			rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3919,8 +3919,8 @@ 
 	/* Check if last CB in TB is ready to dequeue (and thus
 	 * the whole TB) - checking sdone bit. If not return.
 	 */
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc,
-			__ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)last_desc,
+			rte_memory_order_relaxed);
 	if (!(atom_desc.rsp.val & ACC_SDONE))
 		return -1;
 
@@ -3930,8 +3930,8 @@ 
 	/* Read remaining CBs if exists */
 	while (cb_idx < cbs_in_tb) {
 		desc = acc_desc_tail(q, dequeued_cbs);
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc,
-				__ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+				rte_memory_order_relaxed);
 		rsp.val = atom_desc.rsp.val;
 		rte_bbdev_log_debug("Resp. desc %p: %x r %d c %d\n",
 						desc, rsp.val, cb_idx, cbs_in_tb);
diff --git a/drivers/baseband/acc/rte_vrb_pmd.c b/drivers/baseband/acc/rte_vrb_pmd.c
index 88b1104..f7c54be 100644
--- a/drivers/baseband/acc/rte_vrb_pmd.c
+++ b/drivers/baseband/acc/rte_vrb_pmd.c
@@ -3119,7 +3119,8 @@ 
 
 	desc_idx = acc_desc_idx_tail(q, *dequeued_descs);
 	desc = q->ring_addr + desc_idx;
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	if (*dequeued_ops + desc->req.numCBs > max_requested_ops)
 		return -1;
@@ -3157,7 +3158,8 @@ 
 	struct rte_bbdev_enc_op *op;
 
 	desc = acc_desc_tail(q, *dequeued_descs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit. */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3192,7 +3194,8 @@ 
 	uint16_t current_dequeued_descs = 0, descs_in_tb;
 
 	desc = acc_desc_tail(q, *dequeued_descs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	if (*dequeued_ops + 1 > max_requested_ops)
 		return -1;
@@ -3208,7 +3211,8 @@ 
 	/* Check if last CB in TB is ready to dequeue (and thus
 	 * the whole TB) - checking sdone bit. If not return.
 	 */
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)last_desc,
+	    rte_memory_order_relaxed);
 	if (!(atom_desc.rsp.val & ACC_SDONE))
 		return -1;
 
@@ -3220,7 +3224,8 @@ 
 
 	while (i < descs_in_tb) {
 		desc = acc_desc_tail(q, *dequeued_descs);
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+		    rte_memory_order_relaxed);
 		rsp.val = atom_desc.rsp.val;
 
 		vrb_update_dequeued_operation(desc, rsp, &op->status, aq_dequeued, true, false);
@@ -3246,7 +3251,8 @@ 
 	struct rte_bbdev_dec_op *op;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit. */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3290,7 +3296,8 @@ 
 	struct rte_bbdev_dec_op *op;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit. */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3346,7 +3353,8 @@ 
 	uint32_t tb_crc_check = 0;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit. */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -3362,7 +3370,8 @@ 
 	/* Check if last CB in TB is ready to dequeue (and thus the whole TB) - checking sdone bit.
 	 * If not return.
 	 */
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)last_desc,
+	    rte_memory_order_relaxed);
 	if (!(atom_desc.rsp.val & ACC_SDONE))
 		return -1;
 
@@ -3372,7 +3381,8 @@ 
 	/* Read remaining CBs if exists. */
 	while (cb_idx < cbs_in_tb) {
 		desc = acc_desc_tail(q, dequeued_cbs);
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+		    rte_memory_order_relaxed);
 		rsp.val = atom_desc.rsp.val;
 		rte_bbdev_log_debug("Resp. desc %p: %x %x %x", desc,
 				rsp.val, desc->rsp.add_info_0,
@@ -3790,7 +3800,8 @@ 
 	struct rte_bbdev_fft_op *op;
 
 	desc = acc_desc_tail(q, dequeued_cbs);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -4116,7 +4127,8 @@ 
 	uint8_t descs_in_op, i;
 
 	desc = acc_desc_tail(q, dequeued_ops);
-	atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+	atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+	    rte_memory_order_relaxed);
 
 	/* Check fdone bit. */
 	if (!(atom_desc.rsp.val & ACC_FDONE))
@@ -4127,7 +4139,8 @@ 
 		/* Get last CB. */
 		last_desc = acc_desc_tail(q, dequeued_ops + descs_in_op - 1);
 		/* Check if last op is ready to dequeue by checking fdone bit. If not exit. */
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc, __ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)last_desc,
+		    rte_memory_order_relaxed);
 		if (!(atom_desc.rsp.val & ACC_FDONE))
 			return -1;
 #ifdef RTE_LIBRTE_BBDEV_DEBUG
@@ -4137,8 +4150,8 @@ 
 		for (i = 1; i < descs_in_op - 1; i++) {
 			last_desc = q->ring_addr + ((q->sw_ring_tail + dequeued_ops + i)
 					& q->sw_ring_wrap_mask);
-			atom_desc.atom_hdr = __atomic_load_n((uint64_t *)last_desc,
-					__ATOMIC_RELAXED);
+			atom_desc.atom_hdr = rte_atomic_load_explicit(
+			    (uint64_t __rte_atomic *)last_desc, rte_memory_order_relaxed);
 			if (!(atom_desc.rsp.val & ACC_FDONE))
 				return -1;
 		}
@@ -4154,7 +4167,8 @@ 
 
 	for (i = 0; i < descs_in_op; i++) {
 		desc = q->ring_addr + ((q->sw_ring_tail + dequeued_ops + i) & q->sw_ring_wrap_mask);
-		atom_desc.atom_hdr = __atomic_load_n((uint64_t *)desc, __ATOMIC_RELAXED);
+		atom_desc.atom_hdr = rte_atomic_load_explicit((uint64_t __rte_atomic *)desc,
+		    rte_memory_order_relaxed);
 		rsp.val = atom_desc.rsp.val;
 
 		vrb_update_dequeued_operation(desc, rsp, &op->status, aq_dequeued, true, false);