summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorIan Moffett <ian@osmora.org>2025-09-16 00:17:07 -0400
committerIan Moffett <ian@osmora.org>2025-09-16 00:17:07 -0400
commit83cdb493e5e74871548e20e2885416b38245dc75 (patch)
tree9b8b95a04c476da9c8e36ee8aeb93735927486db
parentfe60a37cfe072ec41663a69aefb02816af4067db (diff)
sys: Add sys/atomic.h
Signed-off-by: Ian Moffett <ian@osmora.org>
-rw-r--r--src/sys/include/sys/atomic.h131
1 files changed, 131 insertions, 0 deletions
diff --git a/src/sys/include/sys/atomic.h b/src/sys/include/sys/atomic.h
new file mode 100644
index 0000000..1ab4e9f
--- /dev/null
+++ b/src/sys/include/sys/atomic.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2025 Ian Marco Moffett and L5 engineers
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of the project nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef _SYS_ATOMIC_H_
+#define _SYS_ATOMIC_H_
+
+#if defined(_KERNEL)
+#include <sys/types.h>
+#else
+#include <stdint.h>
+#endif /* _KERNEL */
+
+static inline unsigned long
+atomic_add_long_nv(volatile unsigned long *p, unsigned long v)
+{
+ return __sync_add_and_fetch(p, v);
+}
+
+static inline unsigned int
+atomic_add_int_nv(volatile unsigned int *p, unsigned int v)
+{
+ return __sync_add_and_fetch(p, v);
+}
+
+static inline unsigned int
+atomic_add_64_nv(volatile uint64_t *p, unsigned int v)
+{
+ return __sync_add_and_fetch(p, v);
+}
+
+static inline unsigned long
+atomic_sub_long_nv(volatile unsigned long *p, unsigned long v)
+{
+ return __sync_sub_and_fetch(p, v);
+}
+
+static inline unsigned int
+atomic_sub_int_nv(volatile unsigned int *p, unsigned int v)
+{
+ return __sync_sub_and_fetch(p, v);
+}
+
+static inline unsigned int
+atomic_sub_64_nv(volatile uint64_t *p, unsigned int v)
+{
+ return __sync_sub_and_fetch(p, v);
+}
+
+static inline unsigned int
+atomic_load_int_nv(volatile unsigned int *p, unsigned int v)
+{
+ return __atomic_load_n(p, v);
+}
+
+static inline unsigned int
+atomic_load_long_nv(volatile unsigned long *p, unsigned int v)
+{
+ return __atomic_load_n(p, v);
+}
+
+static inline unsigned int
+atomic_load_64_nv(volatile uint64_t *p, unsigned int v)
+{
+ return __atomic_load_n(p, v);
+}
+
+static inline void
+atomic_store_int_nv(volatile unsigned int *p, int nv, unsigned int v)
+{
+ __atomic_store_n(p, nv, v);
+}
+
+static inline void
+atomic_store_long_nv(volatile unsigned long *p, long nv, unsigned int v)
+{
+ __atomic_store_n(p, nv, v);
+}
+
+static inline void
+atomic_store_64_nv(volatile uint64_t *p, long nv, unsigned int v)
+{
+ __atomic_store_n(p, nv, v);
+}
+
+/* Atomic increment (and fetch) operations */
+#define atomic_inc_long(P) atomic_add_long_nv((P), 1)
+#define atomic_inc_int(P) atomic_add_int_nv((P), 1)
+#define atomic_inc_64(P) atomic_add_64_nv((P), 1)
+
+/* Atomic decrement (and fetch) operations */
+#define atomic_dec_long(P) atomic_sub_long_nv((P), 1)
+#define atomic_dec_int(P) atomic_sub_int_nv((P), 1)
+#define atomic_dec_64(P) atomic_sub_64_nv((P), 1)
+
+/* Atomic load operations */
+#define atomic_load_int(P) atomic_load_int_nv((P), __ATOMIC_SEQ_CST)
+#define atomic_load_long(P) atomic_load_long_nv((P), __ATOMIC_SEQ_CST)
+#define atomic_load_64(P) atomic_load_64_nv((P), __ATOMIC_SEQ_CST)
+
+/* Atomic store operations */
+#define atomic_store_int(P, NV) atomic_store_int_nv((P), (NV), __ATOMIC_SEQ_CST)
+#define atomic_store_long(P, NV) atomic_store_long_nv((P), (NV), __ATOMIC_SEQ_CST)
+#define atomic_store_64(P, NV) atomic_store_64_nv((P), (NV), __ATOMIC_SEQ_CST)
+
+#endif /* !_SYS_ATOMIC_H_ */