[v3,2/7] stack: replace rte atomics with GCC builtin atomics

Message ID 1679612036-30773-3-git-send-email-roretzla@linux.microsoft.com (mailing list archive)
State Superseded, archived
Delegated to: David Marchand
Headers
Series replace rte atomics with GCC builtin atomics |

Checks

Context Check Description
ci/checkpatch success coding style OK

Commit Message

Tyler Retzlaff March 23, 2023, 10:53 p.m. UTC
  Replace the use of rte_atomic.h types and functions, instead use GCC
supplied C++11 memory model builtins.

Signed-off-by: Tyler Retzlaff <roretzla@linux.microsoft.com>
---
 lib/stack/rte_stack_lf_generic.h | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)
  

Comments

David Marchand May 24, 2023, 8:08 p.m. UTC | #1
Hello Olivier,

Review please.

On Thu, Mar 23, 2023 at 11:54 PM Tyler Retzlaff
<roretzla@linux.microsoft.com> wrote:
>
> Replace the use of rte_atomic.h types and functions, instead use GCC
> supplied C++11 memory model builtins.
>
> Signed-off-by: Tyler Retzlaff <roretzla@linux.microsoft.com>
> ---
>  lib/stack/rte_stack_lf_generic.h | 16 +++++++++-------
>  1 file changed, 9 insertions(+), 7 deletions(-)
>
> diff --git a/lib/stack/rte_stack_lf_generic.h b/lib/stack/rte_stack_lf_generic.h
> index 7fa29ce..aad3747 100644
> --- a/lib/stack/rte_stack_lf_generic.h
> +++ b/lib/stack/rte_stack_lf_generic.h
> @@ -26,8 +26,8 @@
>          * elements. If the mempool is near-empty to the point that this is a
>          * concern, the user should consider increasing the mempool size.
>          */
> -       return (unsigned int)rte_atomic64_read((rte_atomic64_t *)
> -                       &s->stack_lf.used.len);
> +       /* NOTE: review for potential ordering optimization */
> +       return __atomic_load_n(&s->stack_lf.used.len, __ATOMIC_SEQ_CST);
>  }
>
>  static __rte_always_inline void
> @@ -67,8 +67,8 @@
>                                 1, __ATOMIC_RELEASE,
>                                 __ATOMIC_RELAXED);
>         } while (success == 0);
> -
> -       rte_atomic64_add((rte_atomic64_t *)&list->len, num);
> +       /* NOTE: review for potential ordering optimization */
> +       __atomic_fetch_add(&list->len, num, __ATOMIC_SEQ_CST);
>  }
>
>  static __rte_always_inline struct rte_stack_lf_elem *
> @@ -82,14 +82,16 @@
>
>         /* Reserve num elements, if available */
>         while (1) {
> -               uint64_t len = rte_atomic64_read((rte_atomic64_t *)&list->len);
> +               /* NOTE: review for potential ordering optimization */
> +               uint64_t len = __atomic_load_n(&list->len, __ATOMIC_SEQ_CST);
>
>                 /* Does the list contain enough elements? */
>                 if (unlikely(len < num))
>                         return NULL;
>
> -               if (rte_atomic64_cmpset((volatile uint64_t *)&list->len,
> -                                       len, len - num))
> +               /* NOTE: review for potential ordering optimization */
> +               if (__atomic_compare_exchange_n(&list->len, &len, len - num,
> +                       0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))
>                         break;
>         }
>
> --
> 1.8.3.1
>
  

Patch

diff --git a/lib/stack/rte_stack_lf_generic.h b/lib/stack/rte_stack_lf_generic.h
index 7fa29ce..aad3747 100644
--- a/lib/stack/rte_stack_lf_generic.h
+++ b/lib/stack/rte_stack_lf_generic.h
@@ -26,8 +26,8 @@ 
 	 * elements. If the mempool is near-empty to the point that this is a
 	 * concern, the user should consider increasing the mempool size.
 	 */
-	return (unsigned int)rte_atomic64_read((rte_atomic64_t *)
-			&s->stack_lf.used.len);
+	/* NOTE: review for potential ordering optimization */
+	return __atomic_load_n(&s->stack_lf.used.len, __ATOMIC_SEQ_CST);
 }
 
 static __rte_always_inline void
@@ -67,8 +67,8 @@ 
 				1, __ATOMIC_RELEASE,
 				__ATOMIC_RELAXED);
 	} while (success == 0);
-
-	rte_atomic64_add((rte_atomic64_t *)&list->len, num);
+	/* NOTE: review for potential ordering optimization */
+	__atomic_fetch_add(&list->len, num, __ATOMIC_SEQ_CST);
 }
 
 static __rte_always_inline struct rte_stack_lf_elem *
@@ -82,14 +82,16 @@ 
 
 	/* Reserve num elements, if available */
 	while (1) {
-		uint64_t len = rte_atomic64_read((rte_atomic64_t *)&list->len);
+		/* NOTE: review for potential ordering optimization */
+		uint64_t len = __atomic_load_n(&list->len, __ATOMIC_SEQ_CST);
 
 		/* Does the list contain enough elements? */
 		if (unlikely(len < num))
 			return NULL;
 
-		if (rte_atomic64_cmpset((volatile uint64_t *)&list->len,
-					len, len - num))
+		/* NOTE: review for potential ordering optimization */
+		if (__atomic_compare_exchange_n(&list->len, &len, len - num,
+			0, __ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))
 			break;
 	}