[PATCH] Fix x86_64 _spin_lock_irqsave()

From: Edward Falk
Date: Wed Aug 23 2006 - 22:54:25 EST


Add spin_lock_string_flags and _raw_spin_lock_flags() to asm-x86_64/spinlock.h so that _spin_lock_irqsave() has the same semantics on x86_64 as it does on i386 and does *not* have interrupts disabled while it is waiting for the lock.

This fix is courtesy of Michael Davidson Change 2012926 by md@md-test on 2006/01/23 18:28:12

Add spin_lock_string_flags and _raw_spin_lock_flags() to
asm-x86_64/spinlock.h so that _spin_lock_irqsave() has the
same semantics on x86_64 as it does on i386 and does *not*
have interrupts disabled while it is waiting for the lock.

PRESUBMIT=passed

R=mbligh mikew
OCL=2010261

diff -uprN linux-2.6.17/include/asm-x86_64/spinlock.h 2012926/include/asm-x86_64/spinlock.h
--- linux-2.6.17/include/asm-x86_64/spinlock.h 2006-06-17 18:49:35.000000000 -0700
+++ 2012926/include/asm-x86_64/spinlock.h 2006-07-12 16:09:50.000000000 -0700
@@ -32,6 +32,23 @@
"jmp 1b\n" \
LOCK_SECTION_END

+#define __raw_spin_lock_string_flags \
+ "\n1:\t" \
+ "lock ; decb %0\n\t" \
+ "js 2f\n\t" \
+ LOCK_SECTION_START("") \
+ "2:\t" \
+ "test $0x200, %1\n\t" \
+ "jz 3f\n\t" \
+ "sti\n\t" \
+ "3:\t" \
+ "rep;nop\n\t" \
+ "cmpb $0, %0\n\t" \
+ "jle 3b\n\t" \
+ "cli\n\t" \
+ "jmp 1b\n" \
+ LOCK_SECTION_END
+
#define __raw_spin_unlock_string \
"movl $1,%0" \
:"=m" (lock->slock) : : "memory"
@@ -43,8 +60,6 @@ static inline void __raw_spin_lock(raw_s
:"=m" (lock->slock) : : "memory");
}

-#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
-
static inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
int oldval;
@@ -57,6 +72,13 @@ static inline int __raw_spin_trylock(raw
return oldval > 0;
}

+static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
+{
+ __asm__ __volatile__(
+ __raw_spin_lock_string_flags
+ :"=m" (lock->slock) : "r" (flags) : "memory");
+}
+
static inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
__asm__ __volatile__(