summaryrefslogtreecommitdiffhomepage
path: root/include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h')
-rw-r--r--include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h60
1 files changed, 15 insertions, 45 deletions
diff --git a/include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h b/include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h
index adf000f..002a62a 100644
--- a/include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h
+++ b/include/ntapi/bits/nt32/nt_atomic_i386_asm__gcc.h
@@ -273,41 +273,33 @@ static __inline__ int64_t at_locked_cas_64(
}
-static __inline__ intptr_t at_locked_and(
+static __inline__ void at_locked_and(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"andl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_and_32(
+static __inline__ void at_locked_and_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"andl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_and_64(
+static __inline__ void at_locked_and_64(
int64_t volatile * dst,
int64_t mask)
{
@@ -320,46 +312,36 @@ static __inline__ int64_t at_locked_and_64(
xchg = cmp & mask;
ret = at_locked_cas_64(dst,cmp,xchg);
} while (ret != cmp);
-
- return ret;
}
-static __inline__ intptr_t at_locked_or(
+static __inline__ void at_locked_or(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"orl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_or_32(
+static __inline__ void at_locked_or_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"orl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_or_64(
+static __inline__ void at_locked_or_64(
int64_t volatile * dst,
int64_t mask)
{
@@ -372,46 +354,36 @@ static __inline__ int64_t at_locked_or_64(
xchg = cmp | mask;
ret = at_locked_cas_64(dst,cmp,xchg);
} while (ret != cmp);
-
- return ret;
}
-static __inline__ intptr_t at_locked_xor(
+static __inline__ void at_locked_xor(
intptr_t volatile * dst,
intptr_t mask)
{
- intptr_t ret;
-
__asm__(
"lock;"
"xorl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int32_t at_locked_xor_32(
+static __inline__ void at_locked_xor_32(
int32_t volatile * dst,
int32_t mask)
{
- int32_t ret;
-
__asm__(
"lock;"
"xorl %1, %0"
- : "=m" (*dst), "=a" (ret)
+ : "=m" (*dst)
: "r" (mask)
: "memory");
-
- return ret;
}
-static __inline__ int64_t at_locked_xor_64(
+static __inline__ void at_locked_xor_64(
int64_t volatile * dst,
int64_t mask)
{
@@ -424,8 +396,6 @@ static __inline__ int64_t at_locked_xor_64(
xchg = cmp ^ mask;
ret = at_locked_cas_64(dst,cmp,xchg);
} while (ret != cmp);
-
- return ret;
}