summaryrefslogtreecommitdiff
path: root/ruby_atomic.h
blob: 04c5d6d9f8126de9dc499ad2d478bc88e4b4472d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#ifndef INTERNAL_ATOMIC_H
#define INTERNAL_ATOMIC_H

#include "ruby/atomic.h"

/* shim macros only */
#define ATOMIC_ADD(var, val) RUBY_ATOMIC_ADD(var, val)
#define ATOMIC_CAS(var, oldval, newval) RUBY_ATOMIC_CAS(var, oldval, newval)
#define ATOMIC_DEC(var) RUBY_ATOMIC_DEC(var)
#define ATOMIC_EXCHANGE(var, val) RUBY_ATOMIC_EXCHANGE(var, val)
#define ATOMIC_FETCH_ADD(var, val) RUBY_ATOMIC_FETCH_ADD(var, val)
#define ATOMIC_FETCH_SUB(var, val) RUBY_ATOMIC_FETCH_SUB(var, val)
#define ATOMIC_INC(var) RUBY_ATOMIC_INC(var)
#define ATOMIC_OR(var, val) RUBY_ATOMIC_OR(var, val)
#define ATOMIC_PTR_CAS(var, oldval, newval) RUBY_ATOMIC_PTR_CAS(var, oldval, newval)
#define ATOMIC_PTR_EXCHANGE(var, val) RUBY_ATOMIC_PTR_EXCHANGE(var, val)
#define ATOMIC_SET(var, val) RUBY_ATOMIC_SET(var, val)
#define ATOMIC_SIZE_ADD(var, val) RUBY_ATOMIC_SIZE_ADD(var, val)
#define ATOMIC_SIZE_CAS(var, oldval, newval) RUBY_ATOMIC_SIZE_CAS(var, oldval, newval)
#define ATOMIC_SIZE_DEC(var) RUBY_ATOMIC_SIZE_DEC(var)
#define ATOMIC_SIZE_EXCHANGE(var, val) RUBY_ATOMIC_SIZE_EXCHANGE(var, val)
#define ATOMIC_SIZE_INC(var) RUBY_ATOMIC_SIZE_INC(var)
#define ATOMIC_SIZE_SUB(var, val) RUBY_ATOMIC_SIZE_SUB(var, val)
#define ATOMIC_SUB(var, val) RUBY_ATOMIC_SUB(var, val)
#define ATOMIC_VALUE_CAS(var, oldval, val) RUBY_ATOMIC_VALUE_CAS(var, oldval, val)
#define ATOMIC_VALUE_EXCHANGE(var, val) RUBY_ATOMIC_VALUE_EXCHANGE(var, val)

static inline rb_atomic_t
rbimpl_atomic_load_relaxed(volatile rb_atomic_t *ptr)
{
#if defined(HAVE_GCC_ATOMIC_BUILTINS)
    return __atomic_load_n(ptr, __ATOMIC_RELAXED);
#else
    return *ptr;
#endif
}
#define ATOMIC_LOAD_RELAXED(var) rbimpl_atomic_load_relaxed(&(var))

typedef RBIMPL_ALIGNAS(8) uint64_t rbimpl_atomic_uint64_t;

static inline uint64_t
rbimpl_atomic_u64_load_relaxed(const volatile rbimpl_atomic_uint64_t *value)
{
#if defined(HAVE_GCC_ATOMIC_BUILTINS_64)
    return __atomic_load_n(value, __ATOMIC_RELAXED);
#elif defined(_WIN32)
    uint64_t val = *value;
    return InterlockedCompareExchange64(RBIMPL_CAST((uint64_t *)value), val, val);
#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
    uint64_t val = *value;
    return atomic_cas_64(value, val, val);
#else
    return *value;
#endif
}
#define ATOMIC_U64_LOAD_RELAXED(var) rbimpl_atomic_u64_load_relaxed(&(var))

static inline void
rbimpl_atomic_u64_set_relaxed(volatile rbimpl_atomic_uint64_t *address, uint64_t value)
{
#if defined(HAVE_GCC_ATOMIC_BUILTINS_64)
    __atomic_store_n(address, value, __ATOMIC_RELAXED);
#elif defined(_WIN32)
    InterlockedExchange64(address, value);
#elif defined(__sun) && defined(HAVE_ATOMIC_H) && (defined(_LP64) || defined(_I32LPx))
    atomic_swap_64(address, value);
#else
    *address = value;
#endif
}
#define ATOMIC_U64_SET_RELAXED(var, val) rbimpl_atomic_u64_set_relaxed(&(var), val)

#endif