AtomicImpl-c-api.h
3.67 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
#pragma once
#include <assert.h>
#ifdef __EMSCRIPTEN__
#include <emscripten/threading.h>
#endif
#ifdef IL2CPP_TARGET_ARM64
#define IL2CPP_BARRIER_PATCH(INTRIN) \
({ \
__typeof__(INTRIN) atomic_ret__ = (INTRIN); \
__sync_synchronize(); \
atomic_ret__; \
})
#else
#define IL2CPP_BARRIER_PATCH(INTRIN) INTRIN;
#endif
inline int32_t UnityPalAdd(volatile int32_t* location1, int32_t value)
{
ASSERT_ALIGNMENT(location1, 4);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(location1, value));
}
#if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
inline int64_t UnityPalAdd64(volatile int64_t* location1, int64_t value)
{
ASSERT_ALIGNMENT(location1, 8);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(location1, value));
}
#endif
inline int32_t UnityPalIncrement(volatile int32_t* value)
{
ASSERT_ALIGNMENT(value, 4);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(value, 1));
}
#if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
inline int64_t UnityPalIncrement64(volatile int64_t* value)
{
ASSERT_ALIGNMENT(value, 8);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(value, 1));
}
#endif
inline int32_t UnityPalDecrement(volatile int32_t* value)
{
ASSERT_ALIGNMENT(value, 4);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(value, -1));
}
#if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
inline int64_t UnityPalDecrement64(volatile int64_t* value)
{
ASSERT_ALIGNMENT(value, 8);
return IL2CPP_BARRIER_PATCH(__sync_add_and_fetch(value, -1));
}
#endif
inline int32_t UnityPalCompareExchange(volatile int32_t* dest, int32_t exchange, int32_t comparand)
{
ASSERT_ALIGNMENT(dest, 4);
return IL2CPP_BARRIER_PATCH(__sync_val_compare_and_swap(dest, comparand, exchange));
}
inline int64_t UnityPalCompareExchange64(volatile int64_t* dest, int64_t exchange, int64_t comparand)
{
ASSERT_ALIGNMENT(dest, 8);
#ifdef __EMSCRIPTEN__
return emscripten_atomic_cas_u64((void*)dest, comparand, exchange) == comparand ? comparand : *dest;
#else
return IL2CPP_BARRIER_PATCH(__sync_val_compare_and_swap(dest, comparand, exchange));
#endif
}
inline void* UnityPalCompareExchangePointer(void* volatile* dest, void* exchange, void* comparand)
{
ASSERT_ALIGNMENT(dest, IL2CPP_SIZEOF_VOID_P);
return IL2CPP_BARRIER_PATCH(__sync_val_compare_and_swap(dest, comparand, exchange));
}
inline int32_t UnityPalExchange(volatile int32_t* dest, int32_t exchange)
{
ASSERT_ALIGNMENT(dest, 4);
#ifdef __EMSCRIPTEN__
return emscripten_atomic_exchange_u32((void*)dest, exchange);
#else
int32_t prev;
do
{
prev = *dest;
}
while (!IL2CPP_BARRIER_PATCH(__sync_bool_compare_and_swap(dest, prev, exchange)));
return prev;
#endif
}
#if IL2CPP_ENABLE_INTERLOCKED_64_REQUIRED_ALIGNMENT
inline int64_t UnityPalExchange64(volatile int64_t* dest, int64_t exchange)
{
ASSERT_ALIGNMENT(dest, 8);
#ifdef __EMSCRIPTEN__
return emscripten_atomic_exchange_u64((void*)dest, exchange);
#else
int64_t prev;
do
{
prev = *dest;
}
while (!IL2CPP_BARRIER_PATCH(__sync_bool_compare_and_swap(dest, prev, exchange)));
return prev;
#endif
}
#endif
inline void* UnityPalExchangePointer(void* volatile* dest, void* exchange)
{
ASSERT_ALIGNMENT(dest, IL2CPP_SIZEOF_VOID_P);
#ifdef __EMSCRIPTEN__
return (void*)emscripten_atomic_exchange_u32((void*)dest, (uint32_t)exchange);
#else
void* prev;
do
{
prev = *dest;
}
while (!IL2CPP_BARRIER_PATCH(__sync_bool_compare_and_swap(dest, prev, exchange)));
return prev;
#endif
}
inline int64_t UnityPalRead64(volatile int64_t* addr)
{
ASSERT_ALIGNMENT(addr, 8);
return IL2CPP_BARRIER_PATCH(__sync_fetch_and_add(addr, 0));
}