summaryrefslogtreecommitdiffhomepage
path: root/include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h
diff options
context:
space:
mode:
authormidipix <writeonce@midipix.org>2018-08-09 04:15:25 -0400
committermidipix <writeonce@midipix.org>2018-08-09 04:15:25 -0400
commitf1a76632596dd69a233d09ed278467c5fed7b13d (patch)
treed2a42c766fb7414d29e84950ad32d54f1818caee /include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h
parentdc7e61e60fb6957717a978131c3dc2dca58cf44c (diff)
downloadntapi-f1a76632596dd69a233d09ed278467c5fed7b13d.tar.bz2
ntapi-f1a76632596dd69a233d09ed278467c5fed7b13d.tar.xz
library: atomics: simplify and-or-xor interfaces.
Diffstat (limited to 'include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h')
-rw-r--r--include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h72
1 files changed, 18 insertions, 54 deletions
diff --git a/include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h b/include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h
index 1e3f10d..2162b1c 100644
--- a/include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h
+++ b/include/ntapi/bits/nt64/nt_atomic_x86_64_asm__gcc.h
@@ -300,156 +300,120 @@ static __inline__ int64_t at_locked_cas_64(
}
-static __inline__ intptr_t at_locked_and(
+static __inline__ void at_locked_and(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"andq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_and_32(
+static __inline__ void at_locked_and_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"andl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_and_64(
+static __inline__ void at_locked_and_64(
int64_t volatile * dst,
int64_t mask)
{
- int64_t ret;
-
__asm__(
"lock;"
"andq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ intptr_t at_locked_or(
+static __inline__ void at_locked_or(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"orq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_or_32(
+static __inline__ void at_locked_or_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"orl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_or_64(
+static __inline__ void at_locked_or_64(
int64_t volatile * dst,
int64_t mask)
{
- int64_t ret;
-
__asm__(
"lock;"
"orq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ intptr_t at_locked_xor(
+static __inline__ void at_locked_xor(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"xorq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_xor_32(
+static __inline__ void at_locked_xor_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"xorl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_xor_64(
+static __inline__ void at_locked_xor_64(
int64_t volatile * dst,
int64_t mask)
{
- int64_t ret;
-
__asm__(
"lock;"
"xorq %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}