Attention une mise à jour du service Gitlab va être effectuée le mardi 30 novembre entre 17h30 et 18h00. Cette mise à jour va générer une interruption du service dont nous ne maîtrisons pas complètement la durée mais qui ne devrait pas excéder quelques minutes. Cette mise à jour intermédiaire en version 14.0.12 nous permettra de rapidement pouvoir mettre à votre disposition une version plus récente.

Commit 94ded299 authored by (no author)'s avatar (no author) Committed by Jens Gustedt
Browse files

white space and copyright


Signed-off-by: Jens Gustedt's avatarJens Gustedt <Jens.Gustedt@inria.fr>
parent 3a8c8ea4
......@@ -978,26 +978,26 @@ p99_extension \
**/
#define atomic_exchange(OBJP, DESIRED)
#else
#define atomic_exchange(OBJP, DESIRED) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
register p00_base_t const p00_des = (DESIRED); \
register p00_ubase_t p00_ret = P99_INIT; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) { \
p00_ret.p00_t = P00_AT(p00_objp); \
P00_AT(p00_objp) = p00_des; \
} \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
() \
(register p00_ubase_t const p00_desm = { .p00_t = p00_des }; \
#define atomic_exchange(OBJP, DESIRED) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
register p00_base_t const p00_des = (DESIRED); \
register p00_ubase_t p00_ret = P99_INIT; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) { \
p00_ret.p00_t = P00_AT(p00_objp); \
P00_AT(p00_objp) = p00_des; \
} \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
() \
(register p00_ubase_t const p00_desm = { .p00_t = p00_des }; \
p00_ret.p00_m = p00_atomic_exchange_n(&P00_AM(p00_objp), p00_desm.p00_m)); \
} \
p00_ret.p00_t; \
} \
p00_ret.p00_t; \
})
#endif
......@@ -1009,23 +1009,23 @@ p99_extension \
**
** @see atomic_int
**/
#define atomic_load(OBJP) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
register p00_ubase_t p00_ret = P99_INIT; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) \
p00_ret.p00_t = P00_AT(p00_objp); \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(P00_AT(p00_objp)) \
#define atomic_load(OBJP) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
register p00_ubase_t p00_ret = P99_INIT; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) \
p00_ret.p00_t = P00_AT(p00_objp); \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(P00_AT(p00_objp)) \
(p00_ret.p00_m = p00_atomic_load_n(&P00_AM(p00_objp), memory_order_seq_cst)); \
} \
p00_ret.p00_t = p00_ret.p00_t; \
} \
p00_ret.p00_t = p00_ret.p00_t; \
})
#define P00_CVT(EXP) ((void const*)(((struct { void const volatile* a; }){ .a = (EXP) }).a))
......@@ -1046,36 +1046,36 @@ p99_extension \
**
** @see atomic_int
**/
#define atomic_compare_exchange_weak(OBJP, EXPECTED, DESIRED) \
#define atomic_compare_exchange_weak(OBJP, EXPECTED, DESIRED) \
p00_atomic_compare_exchange(true, OBJP, EXPECTED, DESIRED, memory_order_seq_cst, memory_order_seq_cst,)
#define atomic_compare_exchange_strong(OBJP, EXPECTED, DESIRED) \
#define atomic_compare_exchange_strong(OBJP, EXPECTED, DESIRED) \
p00_atomic_compare_exchange(false, OBJP, EXPECTED, DESIRED, memory_order_seq_cst, memory_order_seq_cst,)
#define p00_atomic_compare_exchange(WEAK, OBJP, EXPECTED, DESIRED, SUCC, FAIL, ...) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
/* Both, *EXPECTED and DESIRED must be assignment compatible with the base type */ \
register p00_base_t volatile* const p00_exp = (EXPECTED); \
register p00_ubase_t const p00_des = { .p00_t = (DESIRED) }; \
register _Bool p00_ret = false; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) { \
p00_ret = !memcmp(P00_CVT(p00_exp), P00_CVT(&P00_AT(p00_objp)), sizeof *p00_exp); \
if (p00_ret) P00_AT(p00_objp) = p00_des.p00_t; \
else *p00_exp = P00_AT(p00_objp); \
} \
} \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(else p00_ret = false;) \
#define p00_atomic_compare_exchange(WEAK, OBJP, EXPECTED, DESIRED, SUCC, FAIL, ...) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
/* Both, *EXPECTED and DESIRED must be assignment compatible with the base type */ \
register p00_base_t volatile* const p00_exp = (EXPECTED); \
register p00_ubase_t const p00_des = { .p00_t = (DESIRED) }; \
register _Bool p00_ret = false; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) { \
p00_ret = !memcmp(P00_CVT(p00_exp), P00_CVT(&P00_AT(p00_objp)), sizeof *p00_exp); \
if (p00_ret) P00_AT(p00_objp) = p00_des.p00_t; \
else *p00_exp = P00_AT(p00_objp); \
} \
} \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(else p00_ret = false;) \
(else p00_ret = p00_atomic_compare_exchange_n(&P00_AM(p00_objp), ((p00_mbase_t*)p00_exp), p00_des.p00_m, \
(WEAK), memory_order_seq_cst, memory_order_seq_cst); \
) \
p00_ret; \
(WEAK), memory_order_seq_cst, memory_order_seq_cst); \
) \
p00_ret; \
})
#ifdef P00_DOXYGEN
......@@ -1091,23 +1091,23 @@ p99_extension
**/
#define atomic_store(OBJP, DES)
#else
#define atomic_store(OBJP, DES) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
P99_MACRO_VAR(p00_desp, (DES)); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
register p00_ubase_t const p00_des = { .p00_t = (p00_desp) }; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) \
P00_AT(p00_objp) = p00_des.p00_t; \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(P00_AT(p00_objp)) \
#define atomic_store(OBJP, DES) \
p99_extension \
({ \
P99_MACRO_PVAR(p00_objp, (OBJP), volatile); \
P99_MACRO_VAR(p00_desp, (DES)); \
typedef __typeof__(P00_AT(p00_objp)) p00_base_t; \
typedef __typeof__(P00_AX(p00_objp)) p00_ubase_t; \
typedef __typeof__(P00_AM(p00_objp)) p00_mbase_t; \
register p00_ubase_t const p00_des = { .p00_t = (p00_desp) }; \
if (!atomic_is_lock_free(p00_objp)) { \
P99_SPIN_EXCLUDE(&p00_objp->p00_lock) \
P00_AT(p00_objp) = p00_des.p00_t; \
} else { \
P99_IF_EMPTY(P99_ATOMIC_LOCK_FREE_TYPES) \
(P00_AT(p00_objp)) \
(p00_atomic_store_n(&P00_AM(p00_objp), p00_des.p00_m, memory_order_seq_cst)); \
} \
} \
})
#endif
......
......@@ -2,7 +2,7 @@
/* */
/* Except for parts copied from previous work and as explicitly stated below, */
/* the authors and copyright holders for this work are as follows: */
/* (C) copyright 2012 Jens Gustedt, INRIA, France */
/* (C) copyright 2012, 2014 Jens Gustedt, INRIA, France */
/* (C) copyright 2012 William Morris */
/* */
/* This file is free software; it is part of the P99 project. */
......@@ -301,18 +301,18 @@ void p00_mfence_internal(memory_order p00_ord) {
__asm__ __volatile__("dmb":::"memory");
}
#define p00_mfence(...) \
P99_IF_EMPTY(__VA_ARGS__) \
(p00_mfence_internal(memory_order_seq_cst)) \
#define p00_mfence(...) \
P99_IF_EMPTY(__VA_ARGS__) \
(p00_mfence_internal(memory_order_seq_cst)) \
(p00_mfence_internal(__VA_ARGS__))
#define p00_sync_lock_release(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_sync_lock_release_internal(__VA_ARGS__)) \
(p00_sync_lock_release_internal(P99_ALLBUTLAST(__VA_ARGS__)))
#define p00_sync_lock_test_and_set(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_sync_lock_test_and_set_internal(__VA_ARGS__)) \
#define p00_sync_lock_test_and_set(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_sync_lock_test_and_set_internal(__VA_ARGS__)) \
(p00_sync_lock_test_and_set_internal(P99_ALLBUTLAST(__VA_ARGS__)))
p99_inline
......
......@@ -2,7 +2,7 @@
/* */
/* Except for parts copied from previous work and as explicitly stated below, */
/* the author and copyright holder for this work is */
/* (C) copyright 2014 Jens Gustedt, INRIA, France */
/* (C) copyright 2012, 2014 Jens Gustedt, INRIA, France */
/* */
/* This file is free software; it is part of the P99 project. */
/* You can redistribute it and/or modify it under the terms of the QPL as */
......@@ -30,112 +30,112 @@
**/
#define p00_atomic_clear(OBJ, ORD) \
p99_extension ({ \
p00_atomic_flag* p00_obj = (OBJ); \
__atomic_clear(p00_obj, (ORD)); \
#define p00_atomic_clear(OBJ, ORD) \
p99_extension ({ \
p00_atomic_flag* p00_obj = (OBJ); \
__atomic_clear(p00_obj, (ORD)); \
})
#define p00_atomic_test_and_set(OBJ, ORD) \
p99_extension ({ \
p00_atomic_flag* p00_obj = (OBJ); \
__atomic_test_and_set(p00_obj, (ORD)); \
#define p00_atomic_test_and_set(OBJ, ORD) \
p99_extension ({ \
p00_atomic_flag* p00_obj = (OBJ); \
__atomic_test_and_set(p00_obj, (ORD)); \
})
#define p00_mfence(...) \
P99_IF_EMPTY(__VA_ARGS__) \
(__atomic_thread_fence(__ATOMIC_SEQ_CST)) \
#define p00_mfence(...) \
P99_IF_EMPTY(__VA_ARGS__) \
(__atomic_thread_fence(__ATOMIC_SEQ_CST)) \
(__atomic_thread_fence(__VA_ARGS__))
#define p00_sync_lock_release(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_atomic_clear(__VA_ARGS__, __ATOMIC_SEQ_CST)) \
#define p00_sync_lock_release(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_atomic_clear(__VA_ARGS__, __ATOMIC_SEQ_CST)) \
(p00_atomic_clear(__VA_ARGS__))
#define p00_sync_lock_test_and_set(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_atomic_test_and_set(__VA_ARGS__, __ATOMIC_SEQ_CST)) \
#define p00_sync_lock_test_and_set(...) \
P99_IF_LT(P99_NARG(__VA_ARGS__), 2) \
(p00_atomic_test_and_set(__VA_ARGS__, __ATOMIC_SEQ_CST)) \
(p00_atomic_test_and_set(__VA_ARGS__))
#define p00_atomic_compare_exchange_n_(PTR, EXP, DES, WEAK, SUC, FAI, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_PVAR(p00_exp, (EXP), volatile); \
P99_MACRO_VAR(p00_desp, (DES)); \
__typeof__(*(PTR)) p00_des = p00_desp; \
#define p00_atomic_compare_exchange_n_(PTR, EXP, DES, WEAK, SUC, FAI, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_PVAR(p00_exp, (EXP), volatile); \
P99_MACRO_VAR(p00_desp, (DES)); \
__typeof__(*(PTR)) p00_des = p00_desp; \
__atomic_compare_exchange_n(p00_ptr, (void*)p00_exp, p00_des, (WEAK), (SUC), (FAI)); \
})
#define p00_atomic_compare_exchange_n(...) \
P99_IF_EQ(P99_NARG(__VA_ARGS__), 3) \
#define p00_atomic_compare_exchange_n(...) \
P99_IF_EQ(P99_NARG(__VA_ARGS__), 3) \
(p00_atomic_compare_exchange_n_(__VA_ARGS__, 0, memory_order_seq_cst, memory_order_seq_cst)) \
(p00_atomic_compare_exchange_n_(__VA_ARGS__, memory_order_seq_cst, memory_order_seq_cst, ))
#define p00_atomic_exchange_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
__typeof__(*p00_ptr) p00_ret; \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
#define p00_atomic_exchange_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
__typeof__(*p00_ptr) p00_ret; \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
p00_ret = __atomic_exchange_n(p00_ptr, p00_des, __ATOMIC_RELAXED); \
break; \
case __ATOMIC_ACQUIRE:; \
break; \
case __ATOMIC_ACQUIRE:; \
p00_ret = __atomic_exchange_n(p00_ptr, p00_des, __ATOMIC_ACQUIRE); \
break; \
case __ATOMIC_RELEASE:; \
break; \
case __ATOMIC_RELEASE:; \
p00_ret = __atomic_exchange_n(p00_ptr, p00_des, __ATOMIC_RELEASE); \
break; \
case __ATOMIC_ACQ_REL:; \
break; \
case __ATOMIC_ACQ_REL:; \
p00_ret = __atomic_exchange_n(p00_ptr, p00_des, __ATOMIC_ACQ_REL); \
break; \
default: \
break; \
default: \
p00_ret = __atomic_exchange_n(p00_ptr, p00_des, __ATOMIC_SEQ_CST); \
} \
p00_ret = p00_ret; \
} \
p00_ret = p00_ret; \
})
#define p00_atomic_exchange_n(...) p00_atomic_exchange_n_(__VA_ARGS__, __ATOMIC_SEQ_CST,)
#define p00_atomic_load_n_(PTR, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
__typeof__(*p00_ptr) p00_ret; \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_RELAXED); \
break; \
case __ATOMIC_ACQUIRE:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_ACQUIRE); \
break; \
case __ATOMIC_CONSUME:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_CONSUME); \
break; \
default: \
p00_ret =__atomic_load_n(p00_ptr, __ATOMIC_SEQ_CST); \
} \
p00_ret = p00_ret; \
#define p00_atomic_load_n_(PTR, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
__typeof__(*p00_ptr) p00_ret; \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_RELAXED); \
break; \
case __ATOMIC_ACQUIRE:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_ACQUIRE); \
break; \
case __ATOMIC_CONSUME:; \
p00_ret = __atomic_load_n(p00_ptr, __ATOMIC_CONSUME); \
break; \
default: \
p00_ret =__atomic_load_n(p00_ptr, __ATOMIC_SEQ_CST); \
} \
p00_ret = p00_ret; \
})
#define p00_atomic_load_n(...) p00_atomic_load_n_(__VA_ARGS__, __ATOMIC_SEQ_CST,)
#define p00_atomic_store_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_RELAXED); \
break; \
case __ATOMIC_RELEASE:; \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_RELEASE); \
break; \
default: \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_SEQ_CST); \
} \
#define p00_atomic_store_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
P99_MACRO_VAR(p00_ord, (ORD), const); \
switch (p00_ord) { \
case __ATOMIC_RELAXED:; \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_RELAXED); \
break; \
case __ATOMIC_RELEASE:; \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_RELEASE); \
break; \
default: \
__atomic_store_n(p00_ptr, p00_des, __ATOMIC_SEQ_CST); \
} \
})
#define p00_atomic_store_n(...) p00_atomic_store_n_(__VA_ARGS__, __ATOMIC_SEQ_CST,)
......
......@@ -26,113 +26,113 @@
** @c __sync builtins.
**/
#define p00_atomic_exchange_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
register __typeof__(*(PTR)) p00_ret = p00_des; \
for (;;) { \
#define p00_atomic_exchange_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES)); \
register __typeof__(*(PTR)) p00_ret = p00_des; \
for (;;) { \
register __typeof__(*(PTR)) p00_val = __sync_val_compare_and_swap(p00_ptr, p00_ret, p00_des); \
if (P99_LIKELY(p00_val == p00_ret)) break; \
p00_ret = p00_val; \
} \
p00_ret = p00_ret; \
if (P99_LIKELY(p00_val == p00_ret)) break; \
p00_ret = p00_val; \
} \
p00_ret = p00_ret; \
})
#define p00_atomic_exchange_n(...) p00_atomic_exchange_n_(__VA_ARGS__,,)
#define p00_mfence(...) __sync_synchronize()
#define p00_sync_lock_release_(OBJ, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_obj, (OBJ)); \
/* __sync_lock_release only has release consistency */ \
/* the fence must come before so nothing can be reordered after */ \
switch (ORD) { \
case memory_order_consume: ; \
case memory_order_acquire: ; \
case memory_order_acq_rel: ; \
case memory_order_seq_cst: ; \
p00_mfence(); \
} \
__sync_lock_release(p00_obj); \
#define p00_sync_lock_release_(OBJ, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_obj, (OBJ)); \
/* __sync_lock_release only has release consistency */ \
/* the fence must come before so nothing can be reordered after */ \
switch (ORD) { \
case memory_order_consume: ; \
case memory_order_acquire: ; \
case memory_order_acq_rel: ; \
case memory_order_seq_cst: ; \
p00_mfence(); \
} \
__sync_lock_release(p00_obj); \
})
#define p00_sync_lock_test_and_set_(OBJ, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_obj, (OBJ)); \
/* __sync_lock_test_and_set only has acquire consistency */ \
int ret = __sync_lock_test_and_set(p00_obj, 1); \
/* the fence must come after so nothing can be reordered before */ \
switch (ORD) { \
case memory_order_consume: ; \
case memory_order_release: ; \
case memory_order_acq_rel: ; \
case memory_order_seq_cst: ; \
p00_mfence(); \
} \
ret = ret; \
#define p00_sync_lock_test_and_set_(OBJ, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_obj, (OBJ)); \
/* __sync_lock_test_and_set only has acquire consistency */ \
int ret = __sync_lock_test_and_set(p00_obj, 1); \
/* the fence must come after so nothing can be reordered before */ \
switch (ORD) { \
case memory_order_consume: ; \
case memory_order_release: ; \
case memory_order_acq_rel: ; \
case memory_order_seq_cst: ; \
p00_mfence(); \
} \
ret = ret; \
})
#define p00_sync_lock_release(...) p00_sync_lock_release_(__VA_ARGS__, memory_order_seq_cst, )
#define p00_sync_lock_test_and_set(...) p00_sync_lock_test_and_set_(__VA_ARGS__, memory_order_seq_cst, )
#define p00_atomic_compare_exchange_n_(PTR, EXP, DES, WEAK, SUC, FAI, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_exp, (EXP)); \
P99_MACRO_VAR(p00_des, (DES), const); \
#define p00_atomic_compare_exchange_n_(PTR, EXP, DES, WEAK, SUC, FAI, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_exp, (EXP)); \
P99_MACRO_VAR(p00_des, (DES), const); \
__typeof__(*PTR) p00_val = __sync_val_compare_and_swap(p00_ptr, *p00_exp, p00_des); \
_Bool p00_ret = (*p00_exp == p00_val); \
if (!p00_ret) *p00_exp = p00_val; \
p00_ret = p00_ret; \
_Bool p00_ret = (*p00_exp == p00_val); \
if (!p00_ret) *p00_exp = p00_val; \
p00_ret = p00_ret; \
})
#define p00_atomic_compare_exchange_n(...) \
P99_IF_EQ(P99_NARG(__VA_ARGS__), 3) \
#define p00_atomic_compare_exchange_n(...) \
P99_IF_EQ(P99_NARG(__VA_ARGS__), 3) \
(p00_atomic_compare_exchange_n_(__VA_ARGS__, 0, memory_order_seq_cst, memory_order_seq_cst)) \
(p00_atomic_compare_exchange_n_(__VA_ARGS__, memory_order_seq_cst, memory_order_seq_cst, ))
#define p00_atomic_load_n_(PTR, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
/* Ensure synchronization with all stores before */ \
switch (ORD) { \
case memory_order_consume:; \
case memory_order_acquire:; \
case memory_order_acq_rel:; \
(void)__sync_bool_compare_and_swap(p00_ptr, 0, 0); \
break; \
case memory_order_seq_cst:; \
p00_mfence(); \
} \
/* now ensure the load of the correct value */ \
register __typeof__(*(PTR)) const p00_ret = *p00_ptr; \
/* Inhibit reordering with all code after */ \
P99_MARK("load boundary for " P99_STRINGIFY(PTR)); \
p00_ret; \
#define p00_atomic_load_n_(PTR, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
/* Ensure synchronization with all stores before */ \
switch (ORD) { \
case memory_order_consume:; \
case memory_order_acquire:; \
case memory_order_acq_rel:; \
(void)__sync_bool_compare_and_swap(p00_ptr, 0, 0); \
break; \
case memory_order_seq_cst:; \
p00_mfence(); \
} \
/* now ensure the load of the correct value */ \
register __typeof__(*(PTR)) const p00_ret = *p00_ptr; \
/* Inhibit reordering with all code after */ \
P99_MARK("load boundary for " P99_STRINGIFY(PTR)); \
p00_ret; \
})
#define p00_atomic_load_n(...) p00_atomic_load_n_(__VA_ARGS__,,)
#define p00_atomic_store_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES), const); \
/* Inhibit reordering with all code before */ \
P99_MARK("store boundary for " P99_STRINGIFY(PTR)); \
/* now ensure the store of the correct value */ \
*p00_ptr = p00_des; \
/* Ensure that store is visible for all code hereafter */ \
switch (ORD) { \
case memory_order_consume:; \
case memory_order_release:; \
case memory_order_acq_rel:; \
(void)__sync_bool_compare_and_swap(p00_ptr, p00_des, p00_des); \
break; \
case memory_order_seq_cst:; \
p00_mfence(); \
} \
#define p00_atomic_store_n_(PTR, DES, ORD, ...) \
p99_extension ({ \
P99_MACRO_PVAR(p00_ptr, (PTR), volatile); \
P99_MACRO_VAR(p00_des, (DES), const); \
/* Inhibit reordering with all code before */ \
P99_MARK("store boundary for " P99_STRINGIFY(PTR)); \
/* now ensure the store of the correct value */ \
*p00_ptr = p00_des; \
/* Ensure that store is visible for all code hereafter */ \
switch (ORD) { \
case memory_order_consume:; \
case memory_order_release:; \
case memory_order_acq_rel:; \
(void)__sync_bool_compare_and_swap(p00_ptr, p00_des, p00_des); \
break; \
case memory_order_seq_cst:; \
p00_mfence(); \
} \
})
#define p00_atomic_store_n(...) p00_atomic_store_n_(__VA_ARGS__, memory_order_seq_cst,)
......
......@@ -2,7 +2,7 @@
/* */
/* Except for parts copied from previous work and as explicitly stated below, */
/* the authors and copyright holders for this work are as follows: */
/* (C) copyright 2012-2013 Jens Gustedt, INRIA, France */
/* (C) copyright 2012-2014 Jens Gustedt, INRIA, France */
/* (C) copyright 2012 William Morris */
/* */