diff --git a/include/ck_pr.h b/include/ck_pr.h index c3007f2..43a0b1c 100644 --- a/include/ck_pr.h +++ b/include/ck_pr.h @@ -76,6 +76,12 @@ CK_PR_FENCE_NOOP(load_depends) * Only stores to the same location have a global * ordering. */ +CK_PR_FENCE_EMIT(atomic) +CK_PR_FENCE_EMIT(atomic_atomic) +CK_PR_FENCE_EMIT(atomic_load) +CK_PR_FENCE_EMIT(atomic_store) +CK_PR_FENCE_EMIT(store_atomic) +CK_PR_FENCE_EMIT(load_atomic) CK_PR_FENCE_EMIT(load_load) CK_PR_FENCE_EMIT(load_store) CK_PR_FENCE_EMIT(store_store) @@ -88,6 +94,12 @@ CK_PR_FENCE_EMIT(memory) * Anything can be re-ordered with respect to stores. * Otherwise, loads are executed in-order. */ +CK_PR_FENCE_EMIT(atomic) +CK_PR_FENCE_EMIT(atomic_atomic) +CK_PR_FENCE_NOOP(atomic_load) +CK_PR_FENCE_EMIT(atomic_store) +CK_PR_FENCE_EMIT(store_atomic) +CK_PR_FENCE_NOOP(load_atomic) CK_PR_FENCE_NOOP(load_load) CK_PR_FENCE_EMIT(load_store) CK_PR_FENCE_EMIT(store_store) @@ -98,8 +110,14 @@ CK_PR_FENCE_EMIT(memory) #elif defined(CK_MD_TSO) /* * Only loads are re-ordered and only with respect to - * prior stores. + * prior stores. Atomic operations are serializing. */ +CK_PR_FENCE_NOOP(atomic) +CK_PR_FENCE_NOOP(atomic_atomic) +CK_PR_FENCE_NOOP(atomic_load) +CK_PR_FENCE_NOOP(atomic_store) +CK_PR_FENCE_NOOP(store_atomic) +CK_PR_FENCE_NOOP(load_atomic) CK_PR_FENCE_NOOP(load_load) CK_PR_FENCE_NOOP(load_store) CK_PR_FENCE_NOOP(store_store) diff --git a/include/gcc/ck_pr.h b/include/gcc/ck_pr.h index b88b177..196d267 100644 --- a/include/gcc/ck_pr.h +++ b/include/gcc/ck_pr.h @@ -126,6 +126,12 @@ ck_pr_fence_load_depends(void) __sync_synchronize(); \ } +CK_PR_FENCE(atomic) +CK_PR_FENCE(atomic_atomic) +CK_PR_FENCE(atomic_load) +CK_PR_FENCE(atomic_store) +CK_PR_FENCE(store_atomic) +CK_PR_FENCE(load_atomic) CK_PR_FENCE(load) CK_PR_FENCE(load_load) CK_PR_FENCE(load_store) diff --git a/include/gcc/ppc/ck_pr.h b/include/gcc/ppc/ck_pr.h index 0b9796f..c82e217 100644 --- a/include/gcc/ppc/ck_pr.h +++ b/include/gcc/ppc/ck_pr.h @@ -67,7 +67,10 @@ ck_pr_stall(void) __asm__ __volatile__(I ::: "memory"); \ } -CK_PR_FENCE(load_depends, "") +CK_PR_FENCE(atomic_store, "lwsync") +CK_PR_FENCE(atomic_load, "sync") +CK_PR_FENCE(store_atomic, "lwsync") +CK_PR_FENCE(load_atomic, "lwsync") CK_PR_FENCE(store, "lwsync") CK_PR_FENCE(store_store, "lwsync") CK_PR_FENCE(store_load, "sync") diff --git a/include/gcc/ppc64/ck_pr.h b/include/gcc/ppc64/ck_pr.h index 0fb688a..457efda 100644 --- a/include/gcc/ppc64/ck_pr.h +++ b/include/gcc/ppc64/ck_pr.h @@ -70,7 +70,10 @@ ck_pr_stall(void) * These are derived from: * http://www.ibm.com/developerworks/systems/articles/powerpc.html */ -CK_PR_FENCE(load_depends, "") +CK_PR_FENCE(atomic_store, "lwsync") +CK_PR_FENCE(atomic_load, "sync") +CK_PR_FENCE(store_atomic, "lwsync") +CK_PR_FENCE(load_atomic, "lwsync") CK_PR_FENCE(store, "lwsync") CK_PR_FENCE(store_store, "lwsync") CK_PR_FENCE(store_load, "sync") diff --git a/include/gcc/sparcv9/ck_pr.h b/include/gcc/sparcv9/ck_pr.h index b92c751..29b9f9c 100644 --- a/include/gcc/sparcv9/ck_pr.h +++ b/include/gcc/sparcv9/ck_pr.h @@ -63,7 +63,14 @@ ck_pr_stall(void) __asm__ __volatile__(I ::: "memory"); \ } -CK_PR_FENCE(load_depends, "") +/* + * Atomic operations are treated as both load and store + * operations on SPARCv9. + */ +CK_PR_FENCE(atomic_store, "membar #StoreStore") +CK_PR_FENCE(atomic_load, "membar #StoreLoad") +CK_PR_FENCE(store_atomic, "membar #StoreStore") +CK_PR_FENCE(load_atomic, "membar #LoadStore") CK_PR_FENCE(store, "membar #StoreStore") CK_PR_FENCE(store_store, "membar #StoreStore") CK_PR_FENCE(store_load, "membar #StoreLoad") diff --git a/include/gcc/x86/ck_pr.h b/include/gcc/x86/ck_pr.h index 7c058db..eed49ba 100644 --- a/include/gcc/x86/ck_pr.h +++ b/include/gcc/x86/ck_pr.h @@ -70,6 +70,10 @@ ck_pr_stall(void) __asm__ __volatile__(I ::: "memory"); \ } +CK_PR_FENCE(atomic_store, "sfence") +CK_PR_FENCE(atomic_load, "mfence") +CK_PR_FENCE(store_atomic, "sfence") +CK_PR_FENCE(load_atomic, "mfence") CK_PR_FENCE(load, "lfence") CK_PR_FENCE(load_load, "lfence") CK_PR_FENCE(load_store, "mfence") diff --git a/include/gcc/x86_64/ck_pr.h b/include/gcc/x86_64/ck_pr.h index 89b4238..b0813e4 100644 --- a/include/gcc/x86_64/ck_pr.h +++ b/include/gcc/x86_64/ck_pr.h @@ -69,6 +69,10 @@ ck_pr_stall(void) __asm__ __volatile__(I ::: "memory"); \ } +CK_PR_FENCE(atomic_store, "sfence") +CK_PR_FENCE(atomic_load, "mfence") +CK_PR_FENCE(store_atomic, "sfence") +CK_PR_FENCE(load_atomic, "mfence") CK_PR_FENCE(load, "lfence") CK_PR_FENCE(load_load, "lfence") CK_PR_FENCE(load_store, "mfence")