Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update libbson to 1.24.2 #47

Merged
merged 9 commits into from
Aug 1, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 55 additions & 12 deletions src/bson/bson-atomic.c
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ bson_memory_barrier (void)
static int8_t gEmulAtomicLock = 0;

static void
_lock_emul_atomic ()
_lock_emul_atomic (void)
{
int i;
if (bson_atomic_int8_compare_exchange_weak (
Expand All @@ -78,7 +78,7 @@ _lock_emul_atomic ()
}

static void
_unlock_emul_atomic ()
_unlock_emul_atomic (void)
{
int64_t rv = bson_atomic_int8_exchange (
&gEmulAtomicLock, 0, bson_memory_order_release);
Expand All @@ -91,6 +91,9 @@ _bson_emul_atomic_int64_fetch_add (volatile int64_t *p,
enum bson_memory_order _unused)
{
int64_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p += n;
Expand All @@ -104,6 +107,9 @@ _bson_emul_atomic_int64_exchange (volatile int64_t *p,
enum bson_memory_order _unused)
{
int64_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p = n;
Expand All @@ -118,6 +124,9 @@ _bson_emul_atomic_int64_compare_exchange_strong (volatile int64_t *p,
enum bson_memory_order _unused)
{
int64_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
if (ret == expect_value) {
Expand Down Expand Up @@ -145,6 +154,9 @@ _bson_emul_atomic_int32_fetch_add (volatile int32_t *p,
enum bson_memory_order _unused)
{
int32_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p += n;
Expand All @@ -158,6 +170,9 @@ _bson_emul_atomic_int32_exchange (volatile int32_t *p,
enum bson_memory_order _unused)
{
int32_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p = n;
Expand All @@ -172,6 +187,9 @@ _bson_emul_atomic_int32_compare_exchange_strong (volatile int32_t *p,
enum bson_memory_order _unused)
{
int32_t ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
if (ret == expect_value) {
Expand All @@ -195,10 +213,13 @@ _bson_emul_atomic_int32_compare_exchange_weak (volatile int32_t *p,

int
_bson_emul_atomic_int_fetch_add (volatile int *p,
int n,
enum bson_memory_order _unused)
int n,
enum bson_memory_order _unused)
{
int ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p += n;
Expand All @@ -208,10 +229,13 @@ _bson_emul_atomic_int_fetch_add (volatile int *p,

int
_bson_emul_atomic_int_exchange (volatile int *p,
int n,
enum bson_memory_order _unused)
int n,
enum bson_memory_order _unused)
{
int ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p = n;
Expand All @@ -221,11 +245,14 @@ _bson_emul_atomic_int_exchange (volatile int *p,

int
_bson_emul_atomic_int_compare_exchange_strong (volatile int *p,
int expect_value,
int new_value,
enum bson_memory_order _unused)
int expect_value,
int new_value,
enum bson_memory_order _unused)
{
int ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
if (ret == expect_value) {
Expand All @@ -237,11 +264,27 @@ _bson_emul_atomic_int_compare_exchange_strong (volatile int *p,

int
_bson_emul_atomic_int_compare_exchange_weak (volatile int *p,
int expect_value,
int new_value,
enum bson_memory_order order)
int expect_value,
int new_value,
enum bson_memory_order order)
{
/* We're emulating. We can't do a weak version. */
return _bson_emul_atomic_int_compare_exchange_strong (
p, expect_value, new_value, order);
}

void *
_bson_emul_atomic_ptr_exchange (void *volatile *p,
void *n,
enum bson_memory_order _unused)
{
void *ret;

BSON_UNUSED (_unused);

_lock_emul_atomic ();
ret = *p;
*p = n;
_unlock_emul_atomic ();
return ret;
}
139 changes: 80 additions & 59 deletions src/bson/bson-atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@ enum bson_memory_order {
#define MSVC_MEMORDER_SUFFIX(X)
#endif

#if defined(USE_LEGACY_GCC_ATOMICS) || \
(!defined(__clang__) && __GNUC__ == 4)
#if defined(USE_LEGACY_GCC_ATOMICS) || (!defined(__clang__) && __GNUC__ == 4) || defined(__xlC__)
#define BSON_USE_LEGACY_GCC_ATOMICS
#else
#undef BSON_USE_LEGACY_GCC_ATOMICS
Expand All @@ -62,6 +61,7 @@ enum bson_memory_order {
#ifdef BSON_USE_LEGACY_GCC_ATOMICS
#undef BSON_IF_GNU_LIKE
#define BSON_IF_GNU_LIKE(...)
#define BSON_IF_MSVC(...)
#define BSON_IF_GNU_LEGACY_ATOMICS(...) __VA_ARGS__
#else
#define BSON_IF_GNU_LEGACY_ATOMICS(...)
Expand All @@ -74,7 +74,14 @@ enum bson_memory_order {
#define BSON_EMULATE_INT
#endif

#define DEF_ATOMIC_OP(MSVC_Intrinsic, GNU_Intrinsic, GNU_Legacy_Intrinsic, Order, ...) \
/* CDRIVER-4264 Contrary to documentation, VS 2013 targeting x86 does not
* correctly/consistently provide _InterlockedPointerExchange. */
#if defined(_MSC_VER) && _MSC_VER < 1900 && defined(_M_IX86)
#define BSON_EMULATE_PTR
#endif

#define DEF_ATOMIC_OP( \
MSVC_Intrinsic, GNU_Intrinsic, GNU_Legacy_Intrinsic, Order, ...) \
do { \
switch (Order) { \
case bson_memory_order_acq_rel: \
Expand Down Expand Up @@ -127,47 +134,43 @@ enum bson_memory_order {
} while (0)


#define DEF_ATOMIC_CMPEXCH_STRONG( \
VCSuffix1, VCSuffix2, GNU_MemOrder, Ptr, ExpectActualVar, NewValue) \
do { \
BSON_IF_MSVC (ExpectActualVar = BSON_CONCAT3 ( \
_InterlockedCompareExchange, VCSuffix1, VCSuffix2) ( \
Ptr, NewValue, ExpectActualVar);) \
BSON_IF_GNU_LIKE ( \
(void) __atomic_compare_exchange_n (Ptr, \
&ExpectActualVar, \
NewValue, \
false, /* Not weak */ \
GNU_MemOrder, \
GNU_MemOrder);) \
BSON_IF_GNU_LEGACY_ATOMICS ( \
__typeof__ (ExpectActualVar) _val; \
_val = __sync_val_compare_and_swap (Ptr, \
ExpectActualVar, \
NewValue); \
ExpectActualVar = _val;) \
#define DEF_ATOMIC_CMPEXCH_STRONG( \
VCSuffix1, VCSuffix2, GNU_MemOrder, Ptr, ExpectActualVar, NewValue) \
do { \
BSON_IF_MSVC (ExpectActualVar = BSON_CONCAT3 ( \
_InterlockedCompareExchange, VCSuffix1, VCSuffix2) ( \
Ptr, NewValue, ExpectActualVar);) \
BSON_IF_GNU_LIKE ( \
(void) __atomic_compare_exchange_n (Ptr, \
&ExpectActualVar, \
NewValue, \
false, /* Not weak */ \
GNU_MemOrder, \
GNU_MemOrder);) \
BSON_IF_GNU_LEGACY_ATOMICS ( \
__typeof__ (ExpectActualVar) _val; \
_val = __sync_val_compare_and_swap (Ptr, ExpectActualVar, NewValue); \
ExpectActualVar = _val;) \
} while (0)


#define DEF_ATOMIC_CMPEXCH_WEAK( \
VCSuffix1, VCSuffix2, GNU_MemOrder, Ptr, ExpectActualVar, NewValue) \
do { \
BSON_IF_MSVC (ExpectActualVar = BSON_CONCAT3 ( \
_InterlockedCompareExchange, VCSuffix1, VCSuffix2) ( \
Ptr, NewValue, ExpectActualVar);) \
BSON_IF_GNU_LIKE ( \
(void) __atomic_compare_exchange_n (Ptr, \
&ExpectActualVar, \
NewValue, \
true, /* Yes weak */ \
GNU_MemOrder, \
GNU_MemOrder);) \
BSON_IF_GNU_LEGACY_ATOMICS ( \
__typeof__ (ExpectActualVar) _val; \
_val = __sync_val_compare_and_swap (Ptr, \
ExpectActualVar, \
NewValue); \
ExpectActualVar = _val;) \
#define DEF_ATOMIC_CMPEXCH_WEAK( \
VCSuffix1, VCSuffix2, GNU_MemOrder, Ptr, ExpectActualVar, NewValue) \
do { \
BSON_IF_MSVC (ExpectActualVar = BSON_CONCAT3 ( \
_InterlockedCompareExchange, VCSuffix1, VCSuffix2) ( \
Ptr, NewValue, ExpectActualVar);) \
BSON_IF_GNU_LIKE ( \
(void) __atomic_compare_exchange_n (Ptr, \
&ExpectActualVar, \
NewValue, \
true, /* Yes weak */ \
GNU_MemOrder, \
GNU_MemOrder);) \
BSON_IF_GNU_LEGACY_ATOMICS ( \
__typeof__ (ExpectActualVar) _val; \
_val = __sync_val_compare_and_swap (Ptr, ExpectActualVar, NewValue); \
ExpectActualVar = _val;) \
} while (0)


Expand Down Expand Up @@ -217,7 +220,10 @@ enum bson_memory_order {
default: \
BSON_UNREACHABLE ("Invalid bson_memory_order value"); \
}) \
BSON_IF_GNU_LEGACY_ATOMICS ({ __sync_synchronize (); return *a; }) \
BSON_IF_GNU_LEGACY_ATOMICS ({ \
__sync_synchronize (); \
return *a; \
}) \
} \
\
static BSON_INLINE Type bson_atomic_##NamePart##_exchange ( \
Expand Down Expand Up @@ -342,24 +348,34 @@ enum bson_memory_order {
#define DECL_ATOMIC_STDINT(Name, VCSuffix) \
DECL_ATOMIC_INTEGRAL (Name, Name##_t, VCSuffix)

#if defined(_MSC_VER) || defined (BSON_USE_LEGACY_GCC_ATOMICS)
/* MSVC expects precise types for their atomic intrinsics. */
DECL_ATOMIC_INTEGRAL (int8, char, 8);
#if defined(_MSC_VER) || defined(BSON_USE_LEGACY_GCC_ATOMICS)
/* MSVC and GCC require built-in types (not typedefs) for their atomic
* intrinsics. */
#if defined(_MSC_VER)
#define DECL_ATOMIC_INTEGRAL_INT8 char
#define DECL_ATOMIC_INTEGRAL_INT32 long
#define DECL_ATOMIC_INTEGRAL_INT long
#else
#define DECL_ATOMIC_INTEGRAL_INT8 signed char
#define DECL_ATOMIC_INTEGRAL_INT32 int
#define DECL_ATOMIC_INTEGRAL_INT int
#endif
DECL_ATOMIC_INTEGRAL (int8, DECL_ATOMIC_INTEGRAL_INT8, 8)
DECL_ATOMIC_INTEGRAL (int16, short, 16)
#if !defined (BSON_EMULATE_INT32)
DECL_ATOMIC_INTEGRAL (int32, long, )
#if !defined(BSON_EMULATE_INT32)
DECL_ATOMIC_INTEGRAL (int32, DECL_ATOMIC_INTEGRAL_INT32, )
#endif
#if !defined (BSON_EMULATE_INT)
DECL_ATOMIC_INTEGRAL (int, long, )
#if !defined(BSON_EMULATE_INT)
DECL_ATOMIC_INTEGRAL (int, DECL_ATOMIC_INTEGRAL_INT, )
#endif
#else
/* Other compilers that we support provide generic intrinsics */
DECL_ATOMIC_STDINT (int8, 8)
DECL_ATOMIC_STDINT (int16, 16)
#if !defined (BSON_EMULATE_INT32)
#if !defined(BSON_EMULATE_INT32)
DECL_ATOMIC_STDINT (int32, )
#endif
#if !defined (BSON_EMULATE_INT)
#if !defined(BSON_EMULATE_INT)
DECL_ATOMIC_INTEGRAL (int, int, )
#endif
#endif
Expand Down Expand Up @@ -424,6 +440,11 @@ _bson_emul_atomic_int_compare_exchange_weak (int volatile *val,
int new_value,
enum bson_memory_order);

BSON_EXPORT (void *)
_bson_emul_atomic_ptr_exchange (void *volatile *val,
void *v,
enum bson_memory_order);

BSON_EXPORT (void)
bson_thrd_yield (void);

Expand Down Expand Up @@ -599,8 +620,10 @@ bson_atomic_ptr_exchange (void *volatile *ptr,
void *new_value,
enum bson_memory_order ord)
{
#if defined(BSON_EMULATE_PTR)
return _bson_emul_atomic_ptr_exchange (ptr, new_value, ord);
#elif defined(BSON_USE_LEGACY_GCC_ATOMICS)
/* The older __sync_val_compare_and_swap also takes oldval */
#if defined(BSON_USE_LEGACY_GCC_ATOMICS)
DEF_ATOMIC_OP (_InterlockedExchangePointer,
,
__sync_val_compare_and_swap,
Expand All @@ -609,12 +632,8 @@ bson_atomic_ptr_exchange (void *volatile *ptr,
*ptr,
new_value);
#else
DEF_ATOMIC_OP (_InterlockedExchangePointer,
__atomic_exchange_n,
,
ord,
ptr,
new_value);
DEF_ATOMIC_OP (
_InterlockedExchangePointer, __atomic_exchange_n, , ord, ptr, new_value);
#endif
}

Expand Down Expand Up @@ -722,7 +741,7 @@ bson_atomic_ptr_fetch (void *volatile const *ptr, enum bson_memory_order ord)
* @brief Generate a full-fence memory barrier at the call site.
*/
static BSON_INLINE void
bson_atomic_thread_fence ()
bson_atomic_thread_fence (void)
{
BSON_IF_MSVC (MemoryBarrier ();)
BSON_IF_GNU_LIKE (__sync_synchronize ();)
Expand All @@ -745,6 +764,8 @@ BSON_EXPORT (int32_t) bson_atomic_int_add (volatile int32_t *p, int32_t n);
BSON_GNUC_DEPRECATED_FOR ("bson_atomic_int64_fetch_add")
BSON_EXPORT (int64_t) bson_atomic_int64_add (volatile int64_t *p, int64_t n);


#undef BSON_EMULATE_PTR
#undef BSON_EMULATE_INT32
#undef BSON_EMULATE_INT

Expand Down
Loading
Loading