aboutsummaryrefslogtreecommitdiffstats
path: root/src/vppinfra
diff options
context:
space:
mode:
authorSirshak Das <sirshak.das@arm.com>2018-10-03 22:53:51 +0000
committerDamjan Marion <dmarion@me.com>2018-10-19 07:10:47 +0000
commit2f6d7bb93c157b874efb79a2d1583a4c368bf89a (patch)
tree05dc2867c598cbb8d711f074b4b0eb62dd464f41 /src/vppinfra
parentbf3443b0f852f5a4c551d12f926defbd047f2161 (diff)
vppinfra: add atomic macros for __sync builtins
This is first part of addition of atomic macros with only macros for __sync builtins. - Based on earlier patch by Damjan (https://gerrit.fd.io/r/#/c/10729/) Additionally - clib_atomic_release macro added and used in the absence of any memory barrier. - clib_atomic_bool_cmp_and_swap added Change-Id: Ie4e48c1e184a652018d1d0d87c4be80ddd180a3b Original-patch-by: Damjan Marion <damarion@cisco.com> Signed-off-by: Sirshak Das <sirshak.das@arm.com> Reviewed-by: Honnappa Nagarahalli <honnappa.nagarahalli@arm.com> Reviewed-by: Ola Liljedahl <ola.liljedahl@arm.com> Reviewed-by: Steve Capper <steve.capper@arm.com>
Diffstat (limited to 'src/vppinfra')
-rw-r--r--src/vppinfra/CMakeLists.txt1
-rw-r--r--src/vppinfra/atomics.h45
-rw-r--r--src/vppinfra/clib.h1
-rw-r--r--src/vppinfra/elog.c2
-rw-r--r--src/vppinfra/elog.h2
-rw-r--r--src/vppinfra/lock.h10
-rw-r--r--src/vppinfra/maplog.h2
-rw-r--r--src/vppinfra/mheap.c2
-rw-r--r--src/vppinfra/smp.h2
9 files changed, 56 insertions, 11 deletions
diff --git a/src/vppinfra/CMakeLists.txt b/src/vppinfra/CMakeLists.txt
index b279f90ae2a..7103d600780 100644
--- a/src/vppinfra/CMakeLists.txt
+++ b/src/vppinfra/CMakeLists.txt
@@ -158,6 +158,7 @@ set(VPPINFRA_HEADERS
tw_timer_template.c
tw_timer_template.h
types.h
+ atomics.h
unix.h
valgrind.h
valloc.h
diff --git a/src/vppinfra/atomics.h b/src/vppinfra/atomics.h
new file mode 100644
index 00000000000..8ddf13801df
--- /dev/null
+++ b/src/vppinfra/atomics.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2018 Cisco and/or its affiliates.
+ * Copyright (c) 2018 Arm Limited. and/or its affiliates.
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at:
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef included_clib_atomics_h
+#define included_clib_atomics_h
+
+/* Legacy __sync builtins */
+
+/* Full Barrier */
+#define clib_atomic_fetch_add(a, b) __sync_fetch_and_add(a, b)
+#define clib_atomic_fetch_sub(a, b) __sync_fetch_and_sub(a, b)
+#define clib_atomic_fetch_and(a, b) __sync_fetch_and_and(a, b)
+#define clib_atomic_fetch_xor(a, b) __sync_fetch_and_xor(a, b)
+#define clib_atomic_fetch_or(a, b) __sync_fetch_and_or(a, b)
+#define clib_atomic_fetch_nand(a, b) __sync_fetch_nand(a, b)
+
+#define clib_atomic_add_fetch(a, b) __sync_add_and_fetch(a, b)
+#define clib_atomic_sub_fetch(a, b) __sync_sub_and_fetch(a, b)
+#define clib_atomic_and_fetch(a, b) __sync_and_and_fetch(a, b)
+#define clib_atomic_xor_fetch(a, b) __sync_xor_and_fetch(a, b)
+#define clib_atomic_or_fetch(a, b) __sync_or_and_fetch(a, b)
+#define clib_atomic_nand_fetch(a, b) __sync_nand_and_fetch(a, b)
+
+#define clib_atomic_cmp_and_swap(addr,old,new) __sync_val_compare_and_swap(addr, old, new)
+#define clib_atomic_bool_cmp_and_swap(addr,old,new) __sync_bool_compare_and_swap(addr, old, new)
+
+/*Accquire Barrier*/
+#define clib_atomic_test_and_set(a) __sync_lock_test_and_set(a, 1)
+/*Release Barrier*/
+#define clib_atomic_release(a) __sync_lock_release(a)
+
+#endif /* included_clib_atomics_h */
diff --git a/src/vppinfra/clib.h b/src/vppinfra/clib.h
index a6f88245d79..95dadd9c09d 100644
--- a/src/vppinfra/clib.h
+++ b/src/vppinfra/clib.h
@@ -46,6 +46,7 @@
#endif
#include <vppinfra/types.h>
+#include <vppinfra/atomics.h>
/* Global DEBUG flag. Setting this to 1 or 0 turns off
ASSERT (see vppinfra/error.h) & other debugging code. */
diff --git a/src/vppinfra/elog.c b/src/vppinfra/elog.c
index a86fadeaa39..c6902eb86ea 100644
--- a/src/vppinfra/elog.c
+++ b/src/vppinfra/elog.c
@@ -46,7 +46,7 @@ static inline void
elog_lock (elog_main_t * em)
{
if (PREDICT_FALSE (em->lock != 0))
- while (__sync_lock_test_and_set (em->lock, 1))
+ while (clib_atomic_test_and_set (em->lock))
;
}
diff --git a/src/vppinfra/elog.h b/src/vppinfra/elog.h
index d50c9a6500d..322c2c63ae7 100644
--- a/src/vppinfra/elog.h
+++ b/src/vppinfra/elog.h
@@ -313,7 +313,7 @@ elog_event_data_inline (elog_main_t * em,
ASSERT (is_pow2 (vec_len (em->event_ring)));
if (em->lock)
- ei = clib_smp_atomic_add (&em->n_total_events, 1);
+ ei = clib_atomic_fetch_add (&em->n_total_events, 1);
else
ei = em->n_total_events++;
diff --git a/src/vppinfra/lock.h b/src/vppinfra/lock.h
index dd79c40b7f2..4645378360d 100644
--- a/src/vppinfra/lock.h
+++ b/src/vppinfra/lock.h
@@ -73,7 +73,7 @@ clib_spinlock_free (clib_spinlock_t * p)
static_always_inline void
clib_spinlock_lock (clib_spinlock_t * p)
{
- while (__sync_lock_test_and_set (&(*p)->lock, 1))
+ while (clib_atomic_test_and_set (&(*p)->lock))
CLIB_PAUSE ();
CLIB_LOCK_DBG (p);
}
@@ -138,13 +138,13 @@ clib_rwlock_free (clib_rwlock_t * p)
always_inline void
clib_rwlock_reader_lock (clib_rwlock_t * p)
{
- while (__sync_lock_test_and_set (&(*p)->n_readers_lock, 1))
+ while (clib_atomic_test_and_set (&(*p)->n_readers_lock))
CLIB_PAUSE ();
(*p)->n_readers += 1;
if ((*p)->n_readers == 1)
{
- while (__sync_lock_test_and_set (&(*p)->writer_lock, 1))
+ while (clib_atomic_test_and_set (&(*p)->writer_lock))
CLIB_PAUSE ();
}
CLIB_MEMORY_BARRIER ();
@@ -159,7 +159,7 @@ clib_rwlock_reader_unlock (clib_rwlock_t * p)
ASSERT ((*p)->n_readers > 0);
CLIB_LOCK_DBG_CLEAR (p);
- while (__sync_lock_test_and_set (&(*p)->n_readers_lock, 1))
+ while (clib_atomic_test_and_set (&(*p)->n_readers_lock))
CLIB_PAUSE ();
(*p)->n_readers -= 1;
@@ -176,7 +176,7 @@ clib_rwlock_reader_unlock (clib_rwlock_t * p)
always_inline void
clib_rwlock_writer_lock (clib_rwlock_t * p)
{
- while (__sync_lock_test_and_set (&(*p)->writer_lock, 1))
+ while (clib_atomic_test_and_set (&(*p)->writer_lock))
CLIB_PAUSE ();
CLIB_LOCK_DBG (p);
}
diff --git a/src/vppinfra/maplog.h b/src/vppinfra/maplog.h
index f7d2f75a95b..ea6a835c7b5 100644
--- a/src/vppinfra/maplog.h
+++ b/src/vppinfra/maplog.h
@@ -137,7 +137,7 @@ clib_maplog_get_entry (clib_maplog_main_t * mm)
ASSERT (mm->flags & CLIB_MAPLOG_FLAG_INIT);
- my_record_index = __sync_fetch_and_add (&mm->next_record_index, 1);
+ my_record_index = clib_atomic_fetch_add (&mm->next_record_index, 1);
/* Time to unmap and create a new logfile? */
if (PREDICT_FALSE ((my_record_index & (mm->file_size_in_records - 1)) == 0))
diff --git a/src/vppinfra/mheap.c b/src/vppinfra/mheap.c
index 5b71873b9ed..0a62943e2f2 100644
--- a/src/vppinfra/mheap.c
+++ b/src/vppinfra/mheap.c
@@ -63,7 +63,7 @@ mheap_maybe_lock (void *v)
return;
}
- while (__sync_lock_test_and_set (&h->lock, 1))
+ while (clib_atomic_test_and_set (&h->lock))
;
h->owner_cpu = my_cpu;
diff --git a/src/vppinfra/smp.h b/src/vppinfra/smp.h
index e4ab66ad5c3..7146e51ac77 100644
--- a/src/vppinfra/smp.h
+++ b/src/vppinfra/smp.h
@@ -41,9 +41,7 @@
#include <vppinfra/cache.h>
#include <vppinfra/os.h> /* for os_panic */
-#define clib_smp_compare_and_swap(addr,new,old) __sync_val_compare_and_swap(addr,old,new)
#define clib_smp_swap(addr,new) __sync_lock_test_and_set(addr,new)
-#define clib_smp_atomic_add(addr,increment) __sync_fetch_and_add(addr,increment)
#if defined (i386) || defined (__x86_64__)
#define clib_smp_pause() do { asm volatile ("pause"); } while (0)