AArch64 LSE support.

Add a new configure option, --enable-lse, which is only effective for
the AArch64 architecture. When used, most ck_pr_* atomics will use Large
System Extensions instructions as per the ARMv8.1 specification, rather
then LL/SC instruction pairs.
ck_pring
Alexey Kopytov 8 years ago
parent dae84bac06
commit 5f1be5dc83

15
configure vendored

@ -120,6 +120,7 @@ generate()
-e "s#@POINTER_PACK_ENABLE@#$POINTER_PACK_ENABLE#g" \
-e "s#@DISABLE_DOUBLE@#$DISABLE_DOUBLE#g" \
-e "s#@RTM_ENABLE@#$RTM_ENABLE#g" \
-e "s#@LSE_ENABLE@#$LSE_ENABLE#g" \
-e "s#@VMA_BITS@#$VMA_BITS_R#g" \
-e "s#@VMA_BITS_VALUE@#$VMA_BITS_VALUE_R#g" \
-e "s#@MM@#$MM#g" \
@ -158,6 +159,7 @@ generate_stdout()
echo " VMA_BITS = $VMA_BITS"
echo " MEMORY_MODEL = $MM"
echo " RTM = $RTM_ENABLE"
echo " LSE = $LSE_ENABLE"
echo
echo "Headers will be installed in $HEADERS"
echo "Libraries will be installed in $LIBRARY"
@ -193,6 +195,7 @@ for option; do
echo "The following options will affect generated code."
echo " --enable-pointer-packing Assumes address encoding is subset of pointer range"
echo " --enable-rtm Enable restricted transactional memory (power, x86_64)"
echo " --enable-lse Enable large system extensions (arm64)"
echo " --memory-model=N Specify memory model (currently tso, pso or rmo)"
echo " --vma-bits=N Specify valid number of VMA bits"
echo " --platform=N Force the platform type, instead of relying on autodetection"
@ -237,6 +240,9 @@ for option; do
--enable-rtm)
RTM_ENABLE_SET="CK_MD_RTM_ENABLE"
;;
--enable-lse)
LSE_ENABLE_SET="CK_MD_LSE_ENABLE"
;;
--cores=*)
CORES=$value
;;
@ -307,6 +313,7 @@ GZIP=${GZIP:-"gzip -c"}
POINTER_PACK_ENABLE=${POINTER_PACK_ENABLE:-"CK_MD_POINTER_PACK_DISABLE"}
DISABLE_DOUBLE=${DISABLE_DOUBLE:-"CK_PR_ENABLE_DOUBLE"}
RTM_ENABLE=${RTM_ENABLE_SET:-"CK_MD_RTM_DISABLE"}
LSE_ENABLE=${LSE_ENABLE_SET:-"CK_MD_LSE_DISABLE"}
VMA_BITS=${VMA_BITS:-"unknown"}
DCORES=2
@ -364,6 +371,7 @@ fi
case $PLATFORM in
"macppc"|"Power Macintosh"|"powerpc")
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_RMO"}"
PLATFORM=ppc
ENVIRONMENT=32
@ -371,12 +379,14 @@ case $PLATFORM in
;;
"sun4u"|"sun4v"|"sparc64")
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_TSO"}"
PLATFORM=sparcv9
ENVIRONMENT=64
LDFLAGS="-m64 $LDFLAGS"
;;
i386|i486|i586|i686|i586_i686|pentium*|athlon*|k5|k6|k6_2|k6_3)
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_TSO"}"
case $SYSTEM in
darwin)
@ -423,6 +433,7 @@ case $PLATFORM in
esac
;;
"amd64"|"x86_64")
LSE_ENABLE="CK_MD_LSE_DISABLE"
PLATFORM=x86_64
ENVIRONMENT=64
LDFLAGS="-m64 $LDFLAGS"
@ -430,6 +441,7 @@ case $PLATFORM in
;;
"i86pc")
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_TSO"}"
if test -z "$ISA"; then ISA=`isainfo -n 2> /dev/null || echo i386` ; fi
case "$ISA" in
@ -447,6 +459,7 @@ case $PLATFORM in
;;
"ppc64"|"ppc64le")
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_RMO"}"
PLATFORM=ppc64
ENVIRONMENT=64
@ -458,6 +471,7 @@ case $PLATFORM in
CFLAGS="$CFLAGS -march=armv7-a";
fi
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
MM="${MM:-"CK_MD_RMO"}"
PLATFORM=arm
ENVIRONMENT=32
@ -470,6 +484,7 @@ case $PLATFORM in
;;
*)
RTM_ENABLE="CK_MD_RTM_DISABLE"
LSE_ENABLE="CK_MD_LSE_DISABLE"
PLATFORM=
MM="${MM:-"CK_MD_RMO"}"
;;

@ -39,6 +39,10 @@
#define @RTM_ENABLE@
#endif /* @RTM_ENABLE@ */
#ifndef @LSE_ENABLE@
#define @LSE_ENABLE@
#endif /* @LSE_ENABLE@ */
#ifndef @POINTER_PACK_ENABLE@
#define @POINTER_PACK_ENABLE@
#endif /* @POINTER_PACK_ENABLE@ */

@ -178,174 +178,25 @@ CK_PR_STORE_S_64(double, double, "str")
#undef CK_PR_STORE
#undef CK_PR_STORE_64
CK_CC_INLINE static bool
ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
{
uint64_t tmp1, tmp2;
__asm__ __volatile__("1:"
"ldxp %0, %1, [%4];"
"mov %2, %0;"
"mov %3, %1;"
"eor %0, %0, %5;"
"eor %1, %1, %6;"
"orr %1, %0, %1;"
"mov %w0, #0;"
"cbnz %1, 2f;"
"stxp %w0, %7, %8, [%4];"
"cbnz %w0, 1b;"
"mov %w0, #1;"
"2:"
: "=&r" (tmp1), "=&r" (tmp2), "=&r" (value[0]), "=&r" (value[1])
: "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
: "cc", "memory");
return (tmp1);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
{
return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set),
CK_CPP_CAST(uint64_t *, value)));
}
CK_CC_INLINE static bool
ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
{
uint64_t tmp1, tmp2;
__asm__ __volatile__("1:"
"ldxp %0, %1, [%2];"
"eor %0, %0, %3;"
"eor %1, %1, %4;"
"orr %1, %0, %1;"
"mov %w0, #0;"
"cbnz %1, 2f;"
"stxp %w0, %5, %6, [%2];"
"cbnz %w0, 1b;"
"mov %w0, #1;"
"2:"
: "=&r" (tmp1), "=&r" (tmp2)
: "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
: "cc", "memory");
return (tmp1);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2(void *target, void *compare, void *set)
{
return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set)));
}
#define CK_PR_CAS(N, M, T, W, R) \
CK_CC_INLINE static bool \
ck_pr_cas_##N##_value(M *target, T compare, T set, M *value) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
"cmp %" R "0, %" R "4;" \
"b.ne 2f;" \
"stxr" W " %w1, %" R "3, [%2];" \
"cbnz %w1, 1b;" \
"2:" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (set), \
"r" (compare) \
: "memory", "cc"); \
*(T *)value = previous; \
return (previous == compare); \
} \
CK_CC_INLINE static bool \
ck_pr_cas_##N(M *target, T compare, T set) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__( \
"1:" \
"ldxr" W " %" R "0, [%2];" \
"cmp %" R "0, %" R "4;" \
"b.ne 2f;" \
"stxr" W " %w1, %" R "3, [%2];" \
"cbnz %w1, 1b;" \
"2:" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (set), \
"r" (compare) \
: "memory", "cc"); \
return (previous == compare); \
}
CK_PR_CAS(ptr, void, void *, "", "")
#define CK_PR_CAS_S(N, M, W, R) CK_PR_CAS(N, M, M, W, R)
CK_PR_CAS_S(64, uint64_t, "", "")
#ifndef CK_PR_DISABLE_DOUBLE
CK_PR_CAS_S(double, double, "", "")
#ifdef CK_MD_LSE_ENABLE
#include "ck_pr_lse.h"
#else
#include "ck_pr_llsc.h"
#endif
CK_PR_CAS_S(32, uint32_t, "", "w")
CK_PR_CAS_S(uint, unsigned int, "", "w")
CK_PR_CAS_S(int, int, "", "w")
CK_PR_CAS_S(16, uint16_t, "h", "w")
CK_PR_CAS_S(8, uint8_t, "b", "w")
CK_PR_CAS_S(short, short, "h", "w")
CK_PR_CAS_S(char, char, "b", "w")
#undef CK_PR_CAS_S
#undef CK_PR_CAS
#define CK_PR_FAS(N, M, T, W, R) \
CK_CC_INLINE static T \
ck_pr_fas_##N(M *target, T v) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
"stxr" W " %w1, %" R "3, [%2];"\
"cbnz %w1, 1b;" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (v) \
: "memory", "cc"); \
return (previous); \
}
CK_PR_FAS(64, uint64_t, uint64_t, "", "")
CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
CK_PR_FAS(ptr, void, void *, "", "")
CK_PR_FAS(int, int, int, "", "w")
CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
CK_PR_FAS(short, short, short, "h", "w")
CK_PR_FAS(char, char, char, "b", "w")
#undef CK_PR_FAS
#define CK_PR_UNARY(O, N, M, T, I, W, R) \
/*
* ck_pr_neg_*() functions can only be implemented via LL/SC, as there are no
* LSE alternatives.
*/
#define CK_PR_NEG(N, M, T, W, R) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target) \
ck_pr_neg_##N(M *target) \
{ \
T previous = 0; \
T tmp = 0; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
I ";" \
"neg %" R "0, %" R "0;" \
"stxr" W " %w1, %" R "0, [%2];" \
"cbnz %w1, 1b;" \
: "=&r" (previous), \
@ -355,148 +206,22 @@ CK_PR_FAS(char, char, char, "b", "w")
return; \
}
CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "", "")
CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "", "")
CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "", "")
CK_PR_UNARY(neg, ptr, void, void *, "neg %0, %0", "", "")
CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "add %0, %0, #1", "", "")
CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "sub %0, %0, #1", "", "")
CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mvn %0, %0", "", "")
CK_PR_UNARY(neg, 64, uint64_t, uint64_t, "neg %0, %0", "", "")
#define CK_PR_UNARY_S(S, T, W) \
CK_PR_UNARY(inc, S, T, T, "add %w0, %w0, #1", W, "w") \
CK_PR_UNARY(dec, S, T, T, "sub %w0, %w0, #1", W, "w") \
CK_PR_UNARY(not, S, T, T, "mvn %w0, %w0", W, "w") \
CK_PR_UNARY(neg, S, T, T, "neg %w0, %w0", W, "w") \
CK_PR_UNARY_S(32, uint32_t, "")
CK_PR_UNARY_S(uint, unsigned int, "")
CK_PR_UNARY_S(int, int, "")
CK_PR_UNARY_S(16, uint16_t, "h")
CK_PR_UNARY_S(8, uint8_t, "b")
CK_PR_UNARY_S(short, short, "h")
CK_PR_UNARY_S(char, char, "b")
#undef CK_PR_UNARY_S
#undef CK_PR_UNARY
#define CK_PR_BINARY(O, N, M, T, I, W, R) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target, T delta) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];"\
I " %" R "0, %" R "0, %" R "3;" \
"stxr" W " %w1, %" R "0, [%2];" \
"cbnz %w1, 1b;" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (delta) \
: "memory", "cc"); \
return; \
}
CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "", "")
CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "")
CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "", "")
CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "", "")
CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "")
CK_PR_BINARY(and, 64, uint64_t, uint64_t, "and", "", "")
CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "")
CK_PR_BINARY(or, 64, uint64_t, uint64_t, "orr", "", "")
CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "sub", "", "")
CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "")
#define CK_PR_BINARY_S(S, T, W) \
CK_PR_BINARY(and, S, T, T, "and", W, "w") \
CK_PR_BINARY(add, S, T, T, "add", W, "w") \
CK_PR_BINARY(or, S, T, T, "orr", W, "w") \
CK_PR_BINARY(sub, S, T, T, "sub", W, "w") \
CK_PR_BINARY(xor, S, T, T, "eor", W, "w")
CK_PR_BINARY_S(32, uint32_t, "")
CK_PR_BINARY_S(uint, unsigned int, "")
CK_PR_BINARY_S(int, int, "")
CK_PR_BINARY_S(16, uint16_t, "h")
CK_PR_BINARY_S(8, uint8_t, "b")
CK_PR_BINARY_S(short, short, "h")
CK_PR_BINARY_S(char, char, "b")
#undef CK_PR_BINARY_S
#undef CK_PR_BINARY
CK_CC_INLINE static void *
ck_pr_faa_ptr(void *target, uintptr_t delta)
{
uintptr_t previous, r, tmp;
__asm__ __volatile__("1:"
"ldxr %0, [%3];"
"add %1, %4, %0;"
"stxr %w2, %1, [%3];"
"cbnz %w2, 1b;"
: "=&r" (previous),
"=&r" (r),
"=&r" (tmp)
: "r" (target),
"r" (delta)
: "memory", "cc");
return (void *)(previous);
}
CK_CC_INLINE static uint64_t
ck_pr_faa_64(uint64_t *target, uint64_t delta)
{
uint64_t previous, r, tmp;
CK_PR_NEG(ptr, void, void *, "", "")
CK_PR_NEG(64, uint64_t, uint64_t, "", "")
__asm__ __volatile__("1:"
"ldxr %0, [%3];"
"add %1, %4, %0;"
"stxr %w2, %1, [%3];"
"cbnz %w2, 1b;"
: "=&r" (previous),
"=&r" (r),
"=&r" (tmp)
: "r" (target),
"r" (delta)
: "memory", "cc");
return (previous);
}
#define CK_PR_FAA(S, T, W) \
CK_CC_INLINE static T \
ck_pr_faa_##S(T *target, T delta) \
{ \
T previous, r, tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %w0, [%3];" \
"add %w1, %w4, %w0;" \
"stxr" W " %w2, %w1, [%3];" \
"cbnz %w2, 1b;" \
: "=&r" (previous), \
"=&r" (r), \
"=&r" (tmp) \
: "r" (target), \
"r" (delta) \
: "memory", "cc"); \
return (previous); \
}
#define CK_PR_NEG_S(S, T, W) \
CK_PR_NEG(S, T, T, W, "w") \
CK_PR_FAA(32, uint32_t, "")
CK_PR_FAA(uint, unsigned int, "")
CK_PR_FAA(int, int, "")
CK_PR_FAA(16, uint16_t, "h")
CK_PR_FAA(8, uint8_t, "b")
CK_PR_FAA(short, short, "h")
CK_PR_FAA(char, char, "b")
CK_PR_NEG_S(32, uint32_t, "")
CK_PR_NEG_S(uint, unsigned int, "")
CK_PR_NEG_S(int, int, "")
CK_PR_NEG_S(16, uint16_t, "h")
CK_PR_NEG_S(8, uint8_t, "b")
CK_PR_NEG_S(short, short, "h")
CK_PR_NEG_S(char, char, "b")
#undef CK_PR_FAA
#undef CK_PR_NEG_S
#undef CK_PR_NEG
#endif /* CK_PR_AARCH64_H */

@ -0,0 +1,352 @@
/*
* Copyright 2009-2016 Samy Al Bahra.
* Copyright 2013-2016 Olivier Houchard.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef CK_PR_AARCH64_LLSC_H
#define CK_PR_AARCH64_LLSC_H
#ifndef CK_PR_H
#error Do not include this file directly, use ck_pr.h
#endif
CK_CC_INLINE static bool
ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
{
uint64_t tmp1, tmp2;
__asm__ __volatile__("1:"
"ldxp %0, %1, [%4];"
"mov %2, %0;"
"mov %3, %1;"
"eor %0, %0, %5;"
"eor %1, %1, %6;"
"orr %1, %0, %1;"
"mov %w0, #0;"
"cbnz %1, 2f;"
"stxp %w0, %7, %8, [%4];"
"cbnz %w0, 1b;"
"mov %w0, #1;"
"2:"
: "=&r" (tmp1), "=&r" (tmp2), "=&r" (value[0]), "=&r" (value[1])
: "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
: "cc", "memory");
return (tmp1);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
{
return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set),
CK_CPP_CAST(uint64_t *, value)));
}
CK_CC_INLINE static bool
ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
{
uint64_t tmp1, tmp2;
__asm__ __volatile__("1:"
"ldxp %0, %1, [%2];"
"eor %0, %0, %3;"
"eor %1, %1, %4;"
"orr %1, %0, %1;"
"mov %w0, #0;"
"cbnz %1, 2f;"
"stxp %w0, %5, %6, [%2];"
"cbnz %w0, 1b;"
"mov %w0, #1;"
"2:"
: "=&r" (tmp1), "=&r" (tmp2)
: "r" (target), "r" (compare[0]), "r" (compare[1]), "r" (set[0]), "r" (set[1])
: "cc", "memory");
return (tmp1);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2(void *target, void *compare, void *set)
{
return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set)));
}
#define CK_PR_CAS(N, M, T, W, R) \
CK_CC_INLINE static bool \
ck_pr_cas_##N##_value(M *target, T compare, T set, M *value) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
"cmp %" R "0, %" R "4;" \
"b.ne 2f;" \
"stxr" W " %w1, %" R "3, [%2];" \
"cbnz %w1, 1b;" \
"2:" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (set), \
"r" (compare) \
: "memory", "cc"); \
*(T *)value = previous; \
return (previous == compare); \
} \
CK_CC_INLINE static bool \
ck_pr_cas_##N(M *target, T compare, T set) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__( \
"1:" \
"ldxr" W " %" R "0, [%2];" \
"cmp %" R "0, %" R "4;" \
"b.ne 2f;" \
"stxr" W " %w1, %" R "3, [%2];" \
"cbnz %w1, 1b;" \
"2:" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (set), \
"r" (compare) \
: "memory", "cc"); \
return (previous == compare); \
}
CK_PR_CAS(ptr, void, void *, "", "")
#define CK_PR_CAS_S(N, M, W, R) CK_PR_CAS(N, M, M, W, R)
CK_PR_CAS_S(64, uint64_t, "", "")
#ifndef CK_PR_DISABLE_DOUBLE
CK_PR_CAS_S(double, double, "", "")
#endif
CK_PR_CAS_S(32, uint32_t, "", "w")
CK_PR_CAS_S(uint, unsigned int, "", "w")
CK_PR_CAS_S(int, int, "", "w")
CK_PR_CAS_S(16, uint16_t, "h", "w")
CK_PR_CAS_S(8, uint8_t, "b", "w")
CK_PR_CAS_S(short, short, "h", "w")
CK_PR_CAS_S(char, char, "b", "w")
#undef CK_PR_CAS_S
#undef CK_PR_CAS
#define CK_PR_FAS(N, M, T, W, R) \
CK_CC_INLINE static T \
ck_pr_fas_##N(M *target, T v) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
"stxr" W " %w1, %" R "3, [%2];"\
"cbnz %w1, 1b;" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (v) \
: "memory", "cc"); \
return (previous); \
}
CK_PR_FAS(64, uint64_t, uint64_t, "", "")
CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
CK_PR_FAS(ptr, void, void *, "", "")
CK_PR_FAS(int, int, int, "", "w")
CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
CK_PR_FAS(short, short, short, "h", "w")
CK_PR_FAS(char, char, char, "b", "w")
#undef CK_PR_FAS
#define CK_PR_UNARY(O, N, M, T, I, W, R) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target) \
{ \
T previous = 0; \
T tmp = 0; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];" \
I ";" \
"stxr" W " %w1, %" R "0, [%2];" \
"cbnz %w1, 1b;" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target) \
: "memory", "cc"); \
return; \
}
CK_PR_UNARY(inc, ptr, void, void *, "add %0, %0, #1", "", "")
CK_PR_UNARY(dec, ptr, void, void *, "sub %0, %0, #1", "", "")
CK_PR_UNARY(not, ptr, void, void *, "mvn %0, %0", "", "")
CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "add %0, %0, #1", "", "")
CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "sub %0, %0, #1", "", "")
CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mvn %0, %0", "", "")
#define CK_PR_UNARY_S(S, T, W) \
CK_PR_UNARY(inc, S, T, T, "add %w0, %w0, #1", W, "w") \
CK_PR_UNARY(dec, S, T, T, "sub %w0, %w0, #1", W, "w") \
CK_PR_UNARY(not, S, T, T, "mvn %w0, %w0", W, "w") \
CK_PR_UNARY_S(32, uint32_t, "")
CK_PR_UNARY_S(uint, unsigned int, "")
CK_PR_UNARY_S(int, int, "")
CK_PR_UNARY_S(16, uint16_t, "h")
CK_PR_UNARY_S(8, uint8_t, "b")
CK_PR_UNARY_S(short, short, "h")
CK_PR_UNARY_S(char, char, "b")
#undef CK_PR_UNARY_S
#undef CK_PR_UNARY
#define CK_PR_BINARY(O, N, M, T, I, W, R) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target, T delta) \
{ \
T previous; \
T tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %" R "0, [%2];"\
I " %" R "0, %" R "0, %" R "3;" \
"stxr" W " %w1, %" R "0, [%2];" \
"cbnz %w1, 1b;" \
: "=&r" (previous), \
"=&r" (tmp) \
: "r" (target), \
"r" (delta) \
: "memory", "cc"); \
return; \
}
CK_PR_BINARY(and, ptr, void, uintptr_t, "and", "", "")
CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "")
CK_PR_BINARY(or, ptr, void, uintptr_t, "orr", "", "")
CK_PR_BINARY(sub, ptr, void, uintptr_t, "sub", "", "")
CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "")
CK_PR_BINARY(and, 64, uint64_t, uint64_t, "and", "", "")
CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "")
CK_PR_BINARY(or, 64, uint64_t, uint64_t, "orr", "", "")
CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "sub", "", "")
CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "")
#define CK_PR_BINARY_S(S, T, W) \
CK_PR_BINARY(and, S, T, T, "and", W, "w") \
CK_PR_BINARY(add, S, T, T, "add", W, "w") \
CK_PR_BINARY(or, S, T, T, "orr", W, "w") \
CK_PR_BINARY(sub, S, T, T, "sub", W, "w") \
CK_PR_BINARY(xor, S, T, T, "eor", W, "w")
CK_PR_BINARY_S(32, uint32_t, "")
CK_PR_BINARY_S(uint, unsigned int, "")
CK_PR_BINARY_S(int, int, "")
CK_PR_BINARY_S(16, uint16_t, "h")
CK_PR_BINARY_S(8, uint8_t, "b")
CK_PR_BINARY_S(short, short, "h")
CK_PR_BINARY_S(char, char, "b")
#undef CK_PR_BINARY_S
#undef CK_PR_BINARY
CK_CC_INLINE static void *
ck_pr_faa_ptr(void *target, uintptr_t delta)
{
uintptr_t previous, r, tmp;
__asm__ __volatile__("1:"
"ldxr %0, [%3];"
"add %1, %4, %0;"
"stxr %w2, %1, [%3];"
"cbnz %w2, 1b;"
: "=&r" (previous),
"=&r" (r),
"=&r" (tmp)
: "r" (target),
"r" (delta)
: "memory", "cc");
return (void *)(previous);
}
CK_CC_INLINE static uint64_t
ck_pr_faa_64(uint64_t *target, uint64_t delta)
{
uint64_t previous, r, tmp;
__asm__ __volatile__("1:"
"ldxr %0, [%3];"
"add %1, %4, %0;"
"stxr %w2, %1, [%3];"
"cbnz %w2, 1b;"
: "=&r" (previous),
"=&r" (r),
"=&r" (tmp)
: "r" (target),
"r" (delta)
: "memory", "cc");
return (previous);
}
#define CK_PR_FAA(S, T, W) \
CK_CC_INLINE static T \
ck_pr_faa_##S(T *target, T delta) \
{ \
T previous, r, tmp; \
__asm__ __volatile__("1:" \
"ldxr" W " %w0, [%3];" \
"add %w1, %w4, %w0;" \
"stxr" W " %w2, %w1, [%3];" \
"cbnz %w2, 1b;" \
: "=&r" (previous), \
"=&r" (r), \
"=&r" (tmp) \
: "r" (target), \
"r" (delta) \
: "memory", "cc"); \
return (previous); \
}
CK_PR_FAA(32, uint32_t, "")
CK_PR_FAA(uint, unsigned int, "")
CK_PR_FAA(int, int, "")
CK_PR_FAA(16, uint16_t, "h")
CK_PR_FAA(8, uint8_t, "b")
CK_PR_FAA(short, short, "h")
CK_PR_FAA(char, char, "b")
#undef CK_PR_FAA
#endif /* CK_PR_AARCH64_LLSC_H */

@ -0,0 +1,298 @@
/*
* Copyright 2009-2016 Samy Al Bahra.
* Copyright 2013-2016 Olivier Houchard.
* Copyright 2016 Alexey Kopytov.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#ifndef CK_PR_AARCH64_LSE_H
#define CK_PR_AARCH64_LSE_H
#ifndef CK_PR_H
#error Do not include this file directly, use ck_pr.h
#endif
CK_CC_INLINE static bool
ck_pr_cas_64_2_value(uint64_t target[2], uint64_t compare[2], uint64_t set[2], uint64_t value[2])
{
uint64_t tmp1;
uint64_t tmp2;
register uint64_t x0 __asm__ ("x0") = compare[0];
register uint64_t x1 __asm__ ("x1") = compare[1];
register uint64_t x2 __asm__ ("x2") = set[0];
register uint64_t x3 __asm__ ("x3") = set[1];
__asm__ __volatile__("casp %0, %1, %4, %5, [%6];"
"eor %2, %0, %7;"
"eor %3, %1, %8;"
"orr %2, %2, %3;"
: "+&r" (x0), "+&r" (x1), "=&r" (tmp1), "=&r" (tmp2)
: "r" (x2), "r" (x3), "r" (target), "r" (compare[0]), "r" (compare[1])
: "memory");
value[0] = x0;
value[1] = x1;
return (!!tmp1);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2_value(void *target, void *compare, void *set, void *value)
{
return (ck_pr_cas_64_2_value(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set),
CK_CPP_CAST(uint64_t *, value)));
}
CK_CC_INLINE static bool
ck_pr_cas_64_2(uint64_t target[2], uint64_t compare[2], uint64_t set[2])
{
register uint64_t x0 __asm__ ("x0") = compare[0];
register uint64_t x1 __asm__ ("x1") = compare[1];
register uint64_t x2 __asm__ ("x2") = set[0];
register uint64_t x3 __asm__ ("x3") = set[1];
__asm__ __volatile__("casp %0, %1, %2, %3, [%4];"
"eor %0, %0, %5;"
"eor %1, %1, %6;"
"orr %0, %0, %1;"
: "+&r" (x0), "+&r" (x1)
: "r" (x2), "r" (x3), "r" (target), "r" (compare[0]), "r" (compare[1])
: "memory");
return (!!x0);
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2(void *target, void *compare, void *set)
{
return (ck_pr_cas_64_2(CK_CPP_CAST(uint64_t *, target),
CK_CPP_CAST(uint64_t *, compare),
CK_CPP_CAST(uint64_t *, set)));
}
#define CK_PR_CAS(N, M, T, W, R) \
CK_CC_INLINE static bool \
ck_pr_cas_##N##_value(M *target, T compare, T set, M *value) \
{ \
*(T *)value = compare; \
__asm__ __volatile__( \
"cas" W " %" R "0, %" R "2, [%1];" \
: "+&r" (*(T *)value) \
: "r" (target), \
"r" (set) \
: "memory"); \
return (*(T *)value == compare); \
} \
CK_CC_INLINE static bool \
ck_pr_cas_##N(M *target, T compare, T set) \
{ \
T previous = compare; \
__asm__ __volatile__( \
"cas" W " %" R "0, %" R "2, [%1];" \
: "+&r" (previous) \
: "r" (target), \
"r" (set) \
: "memory"); \
return (previous == compare); \
}
CK_PR_CAS(ptr, void, void *, "", "")
#define CK_PR_CAS_S(N, M, W, R) CK_PR_CAS(N, M, M, W, R)
CK_PR_CAS_S(64, uint64_t, "", "")
#ifndef CK_PR_DISABLE_DOUBLE
CK_PR_CAS_S(double, double, "", "")
#endif
CK_PR_CAS_S(32, uint32_t, "", "w")
CK_PR_CAS_S(uint, unsigned int, "", "w")
CK_PR_CAS_S(int, int, "", "w")
CK_PR_CAS_S(16, uint16_t, "h", "w")
CK_PR_CAS_S(8, uint8_t, "b", "w")
CK_PR_CAS_S(short, short, "h", "w")
CK_PR_CAS_S(char, char, "b", "w")
#undef CK_PR_CAS_S
#undef CK_PR_CAS
#define CK_PR_FAS(N, M, T, W, R) \
CK_CC_INLINE static T \
ck_pr_fas_##N(M *target, T v) \
{ \
T previous; \
__asm__ __volatile__( \
"swp" W " %" R "2, %" R "0, [%1];" \
: "=&r" (previous) \
: "r" (target), \
"r" (v) \
: "memory"); \
return (previous); \
}
CK_PR_FAS(64, uint64_t, uint64_t, "", "")
CK_PR_FAS(32, uint32_t, uint32_t, "", "w")
CK_PR_FAS(ptr, void, void *, "", "")
CK_PR_FAS(int, int, int, "", "w")
CK_PR_FAS(uint, unsigned int, unsigned int, "", "w")
CK_PR_FAS(16, uint16_t, uint16_t, "h", "w")
CK_PR_FAS(8, uint8_t, uint8_t, "b", "w")
CK_PR_FAS(short, short, short, "h", "w")
CK_PR_FAS(char, char, char, "b", "w")
#undef CK_PR_FAS
#define CK_PR_UNARY(O, N, M, T, I, W, R, S) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target) \
{ \
__asm__ __volatile__(I ";" \
"st" S W " " R "0, [%0];" \
: \
: "r" (target) \
: "x0", "memory"); \
return; \
}
CK_PR_UNARY(inc, ptr, void, void *, "mov x0, 1", "", "x", "add")
CK_PR_UNARY(dec, ptr, void, void *, "mov x0, -1", "", "x", "add")
CK_PR_UNARY(not, ptr, void, void *, "mov x0, -1", "", "x", "eor")
CK_PR_UNARY(inc, 64, uint64_t, uint64_t, "mov x0, 1", "", "x", "add")
CK_PR_UNARY(dec, 64, uint64_t, uint64_t, "mov x0, -1", "", "x", "add")
CK_PR_UNARY(not, 64, uint64_t, uint64_t, "mov x0, -1", "", "x", "eor")
#define CK_PR_UNARY_S(S, T, W) \
CK_PR_UNARY(inc, S, T, T, "mov w0, 1", W, "w", "add") \
CK_PR_UNARY(dec, S, T, T, "mov w0, -1", W, "w", "add") \
CK_PR_UNARY(not, S, T, T, "mov w0, -1", W, "w", "eor") \
CK_PR_UNARY_S(32, uint32_t, "")
CK_PR_UNARY_S(uint, unsigned int, "")
CK_PR_UNARY_S(int, int, "")
CK_PR_UNARY_S(16, uint16_t, "h")
CK_PR_UNARY_S(8, uint8_t, "b")
CK_PR_UNARY_S(short, short, "h")
CK_PR_UNARY_S(char, char, "b")
#undef CK_PR_UNARY_S
#undef CK_PR_UNARY
#define CK_PR_BINARY(O, N, M, T, S, W, R, I) \
CK_CC_INLINE static void \
ck_pr_##O##_##N(M *target, T delta) \
{ \
__asm__ __volatile__(I ";" \
"st" S W " %" R "0, [%1];" \
: "+&r" (delta) \
: "r" (target) \
: "memory"); \
return; \
}
CK_PR_BINARY(and, ptr, void, uintptr_t, "clr", "", "", "mvn %0, %0")
CK_PR_BINARY(add, ptr, void, uintptr_t, "add", "", "", "")
CK_PR_BINARY(or, ptr, void, uintptr_t, "set", "", "", "")
CK_PR_BINARY(sub, ptr, void, uintptr_t, "add", "", "", "neg %0, %0")
CK_PR_BINARY(xor, ptr, void, uintptr_t, "eor", "", "", "")
CK_PR_BINARY(and, 64, uint64_t, uint64_t, "clr", "", "", "mvn %0, %0")
CK_PR_BINARY(add, 64, uint64_t, uint64_t, "add", "", "", "")
CK_PR_BINARY(or, 64, uint64_t, uint64_t, "set", "", "", "")
CK_PR_BINARY(sub, 64, uint64_t, uint64_t, "add", "", "", "neg %0, %0")
CK_PR_BINARY(xor, 64, uint64_t, uint64_t, "eor", "", "", "")
#define CK_PR_BINARY_S(S, T, W) \
CK_PR_BINARY(and, S, T, T, "clr", W, "w", "mvn %w0, %w0") \
CK_PR_BINARY(add, S, T, T, "add", W, "w", "") \
CK_PR_BINARY(or, S, T, T, "set", W, "w", "") \
CK_PR_BINARY(sub, S, T, T, "add", W, "w", "neg %w0, %w0") \
CK_PR_BINARY(xor, S, T, T, "eor", W, "w", "")
CK_PR_BINARY_S(32, uint32_t, "")
CK_PR_BINARY_S(uint, unsigned int, "")
CK_PR_BINARY_S(int, int, "")
CK_PR_BINARY_S(16, uint16_t, "h")
CK_PR_BINARY_S(8, uint8_t, "b")
CK_PR_BINARY_S(short, short, "h")
CK_PR_BINARY_S(char, char, "b")
#undef CK_PR_BINARY_S
#undef CK_PR_BINARY
CK_CC_INLINE static void *
ck_pr_faa_ptr(void *target, uintptr_t delta)
{
uintptr_t previous;
__asm__ __volatile__(
"ldadd %2, %0, [%1];"
: "=r" (previous)
: "r" (target),
"r" (delta)
: "memory");
return (void *)(previous);
}
CK_CC_INLINE static uint64_t
ck_pr_faa_64(uint64_t *target, uint64_t delta)
{
uint64_t previous;
__asm__ __volatile__(
"ldadd %2, %0, [%1];"
: "=r" (previous)
: "r" (target),
"r" (delta)
: "memory");
return (previous);
}
#define CK_PR_FAA(S, T, W) \
CK_CC_INLINE static T \
ck_pr_faa_##S(T *target, T delta) \
{ \
T previous; \
__asm__ __volatile__( \
"ldadd" W " %w2, %w0, [%1];" \
: "=r" (previous) \
: "r" (target), \
"r" (delta) \
: "memory"); \
return (previous); \
}
CK_PR_FAA(32, uint32_t, "")
CK_PR_FAA(uint, unsigned int, "")
CK_PR_FAA(int, int, "")
CK_PR_FAA(16, uint16_t, "h")
CK_PR_FAA(8, uint8_t, "b")
CK_PR_FAA(short, short, "h")
CK_PR_FAA(char, char, "b")
#undef CK_PR_FAA
#endif /* CK_PR_AARCH64_LSE_H */
Loading…
Cancel
Save