#ifndef ARCH_H
#define ARCH_H
+#ifdef __cplusplus
+#include <atomic>
+#else
#include <stdatomic.h>
+#endif
#include "../lib/types.h"
arch_hppa,
arch_mips,
arch_aarch64,
+ arch_loongarch64,
+ arch_riscv64,
arch_generic,
#define ARCH_CPU_CLOCK_WRAPS
+#ifdef __cplusplus
+#define atomic_add(p, v) \
+ std::atomic_fetch_add(p, (v))
+#define atomic_sub(p, v) \
+ std::atomic_fetch_sub(p, (v))
+#define atomic_load_relaxed(p) \
+ std::atomic_load_explicit(p, \
+ std::memory_order_relaxed)
+#define atomic_load_acquire(p) \
+ std::atomic_load_explicit(p, \
+ std::memory_order_acquire)
+#define atomic_store_release(p, v) \
+ std::atomic_store_explicit(p, (v), \
+ std::memory_order_release)
+#else
+#define atomic_add(p, v) \
+ atomic_fetch_add((_Atomic typeof(*(p)) *)(p), v)
+#define atomic_sub(p, v) \
+ atomic_fetch_sub((_Atomic typeof(*(p)) *)(p), v)
+#define atomic_load_relaxed(p) \
+ atomic_load_explicit((_Atomic typeof(*(p)) *)(p), \
+ memory_order_relaxed)
#define atomic_load_acquire(p) \
atomic_load_explicit((_Atomic typeof(*(p)) *)(p), \
memory_order_acquire)
#define atomic_store_release(p, v) \
atomic_store_explicit((_Atomic typeof(*(p)) *)(p), (v), \
memory_order_release)
+#endif
/* IWYU pragma: begin_exports */
#if defined(__i386__)
#include "arch-hppa.h"
#elif defined(__aarch64__)
#include "arch-aarch64.h"
+#elif defined(__loongarch64)
+#include "arch-loongarch64.h"
+#elif defined(__riscv) && __riscv_xlen == 64
+#include "arch-riscv64.h"
#else
#warning "Unknown architecture, attempting to use generic model."
#include "arch-generic.h"
#endif
+#if !defined(__x86_64__) && defined(CONFIG_SYNC_SYNC)
+static inline void tsc_barrier(void)
+{
+ __sync_synchronize();
+}
+#endif
+
#include "../lib/ffz.h"
/* IWYU pragma: end_exports */