ck_pr/x86_32: Drop 64-bit operations. We just don't care enough about these right now.

The real issue is supporting PIC environments. This likely requires stand-alone
assembly blobs, something we don't want to deal with right now.
ck_pring
Samy Al Bahra 14 years ago
parent 3f87223d21
commit f6a2cb1b39

@ -29,36 +29,18 @@
#define CK_F_PR_BTS_PTR
#define CK_F_PR_BTS_UINT
#define CK_F_PR_CAS_16
#define CK_F_PR_CAS_16_4
#define CK_F_PR_CAS_16_4_VALUE
#define CK_F_PR_CAS_16_VALUE
#define CK_F_PR_CAS_32
#define CK_F_PR_CAS_32_2
#define CK_F_PR_CAS_32_2_VALUE
#define CK_F_PR_CAS_32_VALUE
#define CK_F_PR_CAS_64
#define CK_F_PR_CAS_64_1
#define CK_F_PR_CAS_64_1_VALUE
#define CK_F_PR_CAS_64_VALUE
#define CK_F_PR_CAS_8
#define CK_F_PR_CAS_8_8
#define CK_F_PR_CAS_8_8_VALUE
#define CK_F_PR_CAS_8_VALUE
#define CK_F_PR_CAS_CHAR
#define CK_F_PR_CAS_CHAR_8
#define CK_F_PR_CAS_CHAR_8_VALUE
#define CK_F_PR_CAS_CHAR_VALUE
#define CK_F_PR_CAS_INT
#define CK_F_PR_CAS_INT_2
#define CK_F_PR_CAS_INT_2_VALUE
#define CK_F_PR_CAS_INT_VALUE
#define CK_F_PR_CAS_PTR
#define CK_F_PR_CAS_PTR_2
#define CK_F_PR_CAS_PTR_2_VALUE
#define CK_F_PR_CAS_PTR_VALUE
#define CK_F_PR_CAS_UINT
#define CK_F_PR_CAS_UINT_2
#define CK_F_PR_CAS_UINT_2_VALUE
#define CK_F_PR_CAS_UINT_VALUE
#define CK_F_PR_DEC_16
#define CK_F_PR_DEC_16_ZERO
@ -111,20 +93,12 @@
#define CK_F_PR_INC_UINT
#define CK_F_PR_INC_UINT_ZERO
#define CK_F_PR_LOAD_16
#define CK_F_PR_LOAD_16_4
#define CK_F_PR_LOAD_32
#define CK_F_PR_LOAD_32_2
#define CK_F_PR_LOAD_64
#define CK_F_PR_LOAD_8
#define CK_F_PR_LOAD_8_8
#define CK_F_PR_LOAD_CHAR
#define CK_F_PR_LOAD_CHAR_8
#define CK_F_PR_LOAD_INT
#define CK_F_PR_LOAD_INT_2
#define CK_F_PR_LOAD_PTR
#define CK_F_PR_LOAD_PTR_2
#define CK_F_PR_LOAD_UINT
#define CK_F_PR_LOAD_UINT_2
#define CK_F_PR_NEG_16
#define CK_F_PR_NEG_16_ZERO
#define CK_F_PR_NEG_32
@ -156,7 +130,6 @@
#define CK_F_PR_STALL
#define CK_F_PR_STORE_16
#define CK_F_PR_STORE_32
#define CK_F_PR_STORE_64
#define CK_F_PR_STORE_8
#define CK_F_PR_STORE_CHAR
#define CK_F_PR_STORE_INT

@ -1,5 +1,5 @@
/*
* Copyright 2009, 2010 Samy Al Bahra.
* Copyright 2009-2011 Samy Al Bahra.
* Copyright 2011 Devon H. O'Dell <devon.odell@gmail.com>
* All rights reserved.
*
@ -117,24 +117,6 @@ CK_PR_FAS_S(8, uint8_t, "xchgb")
#undef CK_PR_FAS_S
#undef CK_PR_FAS
/*
* Atomic load-from-memory operations.
*/
CK_CC_INLINE static uint64_t
ck_pr_load_64(uint64_t *target)
{
uint64_t r;
__asm__ __volatile__("movq %0, %%xmm0;"
"movq %%xmm0, %1;"
: "+m" (*target),
"=m" (r)
:
: "memory", "%xmm0");
return (r);
}
#define CK_PR_LOAD(S, M, T, C, I) \
CK_CC_INLINE static T \
ck_pr_load_##S(M *target) \
@ -161,51 +143,6 @@ CK_PR_LOAD_S(8, uint8_t, "movb")
#undef CK_PR_LOAD_S
#undef CK_PR_LOAD
CK_CC_INLINE static void
ck_pr_load_32_2(uint32_t target[2], uint32_t v[2])
{
__asm__ __volatile__("movq %1, %0;"
: "=m" (*(uint64_t *)target)
: "y" (*(uint64_t *)v)
: "%xmm0", "memory");
return;
}
CK_CC_INLINE static void
ck_pr_load_ptr_2(void *t, void *v)
{
ck_pr_load_32_2(t, v);
return;
}
#define CK_PR_LOAD_2(S, W, T) \
CK_CC_INLINE static void \
ck_pr_load_##S##_##W(T t[2], T v[2]) \
{ \
ck_pr_load_32_2((uint32_t *)t, (uint32_t *)v); \
return; \
}
CK_PR_LOAD_2(char, 8, char)
CK_PR_LOAD_2(int, 2, int)
CK_PR_LOAD_2(uint, 2, unsigned int)
CK_PR_LOAD_2(16, 4, uint16_t)
CK_PR_LOAD_2(8, 8, uint8_t)
#undef CK_PR_LOAD_2
/*
* Atomic store-to-memory operations.
*/
CK_CC_INLINE static void
ck_pr_store_64(uint64_t *target, uint64_t val)
{
__asm__ __volatile__("movq %1, %0;"
: "+m" (*target)
: "y" (val)
: "memory");
}
#define CK_PR_STORE(S, M, T, C, I) \
CK_CC_INLINE static void \
ck_pr_store_##S(M *target, T v) \
@ -418,238 +355,6 @@ CK_PR_CAS_O_S(8, uint8_t, "b", "al")
#undef CK_PR_CAS_O_S
#undef CK_PR_CAS_O
CK_CC_INLINE static bool
ck_pr_cas_64(uint64_t *t, uint64_t c, uint64_t s)
{
bool z;
union {
uint64_t s;
uint32_t v[2];
} set;
uint64_t A;
#ifdef __PIC__
uint32_t ebxt;
#endif
ck_pr_store_64(&set.s, s);
#ifdef __PIC__
__asm__ __volatile__("movl %%ebx, %6;"
"movl %5, %%ebx;"
CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %2;"
"movl %6, %%ebx;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=A" (A),
"=q" (z)
: "A" (c),
"c" (set.v[1]),
"m" (set.v[0]),
"m" (ebxt)
: "memory", "cc");
#else
__asm__ __volatile__(CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %2;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=A" (A),
"=q" (z)
: "A" (c),
"b" (set.v[0]),
"c" (set.v[1])
: "memory", "cc");
#endif
return (bool)z;
}
CK_CC_INLINE static bool
ck_pr_cas_64_value(uint64_t *t, uint64_t c, uint64_t s, uint64_t *v)
{
bool z;
union {
uint64_t s;
uint32_t v[2];
} set;
uint32_t *val = (uint32_t *)v;
#ifdef __PIC__
uint32_t ebxt;
#endif
ck_pr_store_64(&set.s, s);
#ifdef __PIC__
__asm__ __volatile__("movl %%ebx, %7;"
"movl %6, %%ebx;"
CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %3;"
"movl %7, %%ebx;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=a" (val[0]),
"=d" (val[1]),
"=q" (z)
: "A" (c),
"c" (set.v[1]),
"m" (set.v[0]),
"m" (ebxt)
: "memory", "cc");
#else
__asm__ __volatile__(CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %3;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=a" (val[0]),
"=d" (val[1]),
"=q" (z)
: "A" (c),
"b" (set.v[0]),
"c" (set.v[1])
: "memory", "cc");
#endif
return (bool)z;
}
CK_CC_INLINE static bool
ck_pr_cas_32_2(uint32_t t[2], uint32_t c[2], uint32_t s[2])
{
bool z;
uint64_t A;
#ifdef __PIC__
uint32_t ebxt;
__asm__ __volatile__("movl %%ebx, %6;"
"movl %5, %%ebx;"
CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %2;"
"movl %6, %%ebx;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=A" (A),
"=q" (z)
: "A" (c),
"c" (s[1]),
"m" (s[0]),
"m" (ebxt)
: "memory", "cc");
#else
__asm__ __volatile__(CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %2;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)t),
#else
: "=m" (*(uint32_t *)t),
#endif
"=A" (A),
"=q" (z)
: "a" (c[0]),
"d" (c[1]),
"b" (s[0]),
"c" (s[1])
: "memory", "cc");
#endif
return (bool)z;
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2(void *t, void *c, void *s)
{
return ck_pr_cas_32_2(t, c, s);
}
CK_CC_INLINE static bool
ck_pr_cas_32_2_value(uint32_t target[2], uint32_t compare[2], uint32_t set[2], uint32_t v[2])
{
bool z;
#ifdef __PIC__
uint32_t ebxt;
__asm__ __volatile__("movl %%ebx, %7;"
"movl %6, %%ebx;"
CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %3;"
"movl %7, %%ebx;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)target),
#else
: "=m" (*(uint32_t *)target),
#endif
"=a" (v[0]),
"=d" (v[1]),
"=q" (z)
: "A" (compare),
"c" (set[1]),
"m" (set[0]),
"m" (ebxt)
: "memory", "cc");
#else
__asm__ __volatile__(CK_PR_LOCK_PREFIX "cmpxchg8b %0; setz %3;"
#if (__GNUC__ * 100 + __GNUC_MINOR__) >= 403
: "+m" (*(uint32_t *)target),
#else
: "=m" (*(uint32_t *)target),
#endif
"=a" (v[0]),
"=d" (v[1]),
"=q" (z)
: "A" (compare),
"b" (set[0]),
"c" (set[1])
: "memory", "cc");
#endif
return (bool)z;
}
CK_CC_INLINE static bool
ck_pr_cas_ptr_2_value(void *t, void *c, void *s, void *v)
{
return ck_pr_cas_32_2_value(t, c, s, v);
}
#define CK_PR_CAS_V(S, W, T) \
CK_CC_INLINE static bool \
ck_pr_cas_##S##_##W(T t[W], T c[W], T s[W]) \
{ \
return ck_pr_cas_32_2((uint32_t *)t, \
(uint32_t *)c, \
(uint32_t *)s); \
} \
CK_CC_INLINE static bool \
ck_pr_cas_##S##_##W##_value(T *t, T c[W], T s[W], T *v) \
{ \
return ck_pr_cas_32_2_value((uint32_t *)t, \
(uint32_t *)c, \
(uint32_t *)s, \
(uint32_t *)v); \
}
CK_PR_CAS_V(char, 8, char)
CK_PR_CAS_V(int, 2, int)
CK_PR_CAS_V(uint, 2, unsigned int)
CK_PR_CAS_V(16, 4, uint16_t)
CK_PR_CAS_V(8, 8, uint8_t)
#undef CK_PR_CAS_V
/*
* Atomic bit test operations.
*/

Loading…
Cancel
Save