Jelajahi Sumber

Merge branch 'upstream' of git://ftp.linux-mips.org/pub/scm/upstream-linus

* 'upstream' of git://ftp.linux-mips.org/pub/scm/upstream-linus:
  [MIPS] Terminally fix local_{dec,sub}_if_positive
  [MIPS] Type proof reimplementation of cmpxchg.
  [MIPS] pg-r4k.c: Fix a typo in an R4600 v2 erratum workaround
Linus Torvalds 18 tahun lalu
induk
melakukan
c7659e2c13
4 mengubah file dengan 110 tambahan dan 329 penghapusan
  1. 1 1
      arch/mips/mm/pg-r4k.c
  2. 107 0
      include/asm-mips/cmpxchg.h
  3. 1 68
      include/asm-mips/local.h
  4. 1 260
      include/asm-mips/system.h

+ 1 - 1
arch/mips/mm/pg-r4k.c

@@ -209,7 +209,7 @@ static inline void build_cdex_p(void)
 	}
 
 	if (R4600_V2_HIT_CACHEOP_WAR && cpu_is_r4600_v2_x())
-		build_insn_word(0x3c01a000);	/* lui     $at, 0xa000  */
+		build_insn_word(0x8c200000);	/* lw      $zero, ($at) */
 
 	mi.c_format.opcode     = cache_op;
 	mi.c_format.rs         = 4;		/* $a0 */

+ 107 - 0
include/asm-mips/cmpxchg.h

@@ -0,0 +1,107 @@
+/*
+ * This file is subject to the terms and conditions of the GNU General Public
+ * License.  See the file "COPYING" in the main directory of this archive
+ * for more details.
+ *
+ * Copyright (C) 2003, 06, 07 by Ralf Baechle (ralf@linux-mips.org)
+ */
+#ifndef __ASM_CMPXCHG_H
+#define __ASM_CMPXCHG_H
+
+#include <linux/irqflags.h>
+
+#define __HAVE_ARCH_CMPXCHG 1
+
+#define __cmpxchg_asm(ld, st, m, old, new)				\
+({									\
+	__typeof(*(m)) __ret;						\
+									\
+	if (cpu_has_llsc && R10000_LLSC_WAR) {				\
+		__asm__ __volatile__(					\
+		"	.set	push				\n"	\
+		"	.set	noat				\n"	\
+		"	.set	mips3				\n"	\
+		"1:	" ld "	%0, %2		# __cmpxchg_asm	\n"	\
+		"	bne	%0, %z3, 2f			\n"	\
+		"	.set	mips0				\n"	\
+		"	move	$1, %z4				\n"	\
+		"	.set	mips3				\n"	\
+		"	" st "	$1, %1				\n"	\
+		"	beqzl	$1, 1b				\n"	\
+		"2:						\n"	\
+		"	.set	pop				\n"	\
+		: "=&r" (__ret), "=R" (*m)				\
+		: "R" (*m), "Jr" (old), "Jr" (new)			\
+		: "memory");						\
+	} else if (cpu_has_llsc) {					\
+		__asm__ __volatile__(					\
+		"	.set	push				\n"	\
+		"	.set	noat				\n"	\
+		"	.set	mips3				\n"	\
+		"1:	" ld "	%0, %2		# __cmpxchg_asm	\n"	\
+		"	bne	%0, %z3, 2f			\n"	\
+		"	.set	mips0				\n"	\
+		"	move	$1, %z4				\n"	\
+		"	.set	mips3				\n"	\
+		"	" st "	$1, %1				\n"	\
+		"	beqz	$1, 3f				\n"	\
+		"2:						\n"	\
+		"	.subsection 2				\n"	\
+		"3:	b	1b				\n"	\
+		"	.previous				\n"	\
+		"	.set	pop				\n"	\
+		: "=&r" (__ret), "=R" (*m)				\
+		: "R" (*m), "Jr" (old), "Jr" (new)			\
+		: "memory");						\
+	} else {							\
+		unsigned long __flags;					\
+									\
+		raw_local_irq_save(__flags);				\
+		__ret = *m;						\
+		if (__ret == old)					\
+			*m = new;					\
+		raw_local_irq_restore(__flags);				\
+	}								\
+									\
+	__ret;								\
+})
+
+/*
+ * This function doesn't exist, so you'll get a linker error
+ * if something tries to do an invalid cmpxchg().
+ */
+extern void __cmpxchg_called_with_bad_pointer(void);
+
+#define __cmpxchg(ptr,old,new,barrier)					\
+({									\
+	__typeof__(ptr) __ptr = (ptr);					\
+	__typeof__(*(ptr)) __old = (old);				\
+	__typeof__(*(ptr)) __new = (new);				\
+	__typeof__(*(ptr)) __res = 0;					\
+									\
+	barrier;							\
+									\
+	switch (sizeof(*(__ptr))) {					\
+	case 4:								\
+		__res = __cmpxchg_asm("ll", "sc", __ptr, __old, __new);	\
+		break;							\
+	case 8:								\
+		if (sizeof(long) == 8) {				\
+			__res = __cmpxchg_asm("lld", "scd", __ptr,	\
+					   __old, __new);		\
+			break;						\
+		}							\
+	default:							\
+		__cmpxchg_called_with_bad_pointer();			\
+		break;							\
+	}								\
+									\
+	barrier;							\
+									\
+	__res;								\
+})
+
+#define cmpxchg(ptr, old, new)		__cmpxchg(ptr, old, new, smp_llsc_mb())
+#define cmpxchg_local(ptr, old, new)	__cmpxchg(ptr, old, new,)
+
+#endif /* __ASM_CMPXCHG_H */

+ 1 - 68
include/asm-mips/local.h

@@ -4,6 +4,7 @@
 #include <linux/percpu.h>
 #include <linux/bitops.h>
 #include <asm/atomic.h>
+#include <asm/cmpxchg.h>
 #include <asm/war.h>
 
 typedef struct
@@ -114,68 +115,6 @@ static __inline__ long local_sub_return(long i, local_t * l)
 	return result;
 }
 
-/*
- * local_sub_if_positive - conditionally subtract integer from atomic variable
- * @i: integer value to subtract
- * @l: pointer of type local_t
- *
- * Atomically test @l and subtract @i if @l is greater or equal than @i.
- * The function returns the old value of @l minus @i.
- */
-static __inline__ long local_sub_if_positive(long i, local_t * l)
-{
-	unsigned long result;
-
-	if (cpu_has_llsc && R10000_LLSC_WAR) {
-		unsigned long temp;
-
-		__asm__ __volatile__(
-		"	.set	mips3					\n"
-		"1:"	__LL	"%1, %2		# local_sub_if_positive\n"
-		"	dsubu	%0, %1, %3				\n"
-		"	bltz	%0, 1f					\n"
-			__SC	"%0, %2					\n"
-		"	.set	noreorder				\n"
-		"	beqzl	%0, 1b					\n"
-		"	 dsubu	%0, %1, %3				\n"
-		"	.set	reorder					\n"
-		"1:							\n"
-		"	.set	mips0					\n"
-		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
-		: "Ir" (i), "m" (l->a.counter)
-		: "memory");
-	} else if (cpu_has_llsc) {
-		unsigned long temp;
-
-		__asm__ __volatile__(
-		"	.set	mips3					\n"
-		"1:"	__LL	"%1, %2		# local_sub_if_positive\n"
-		"	dsubu	%0, %1, %3				\n"
-		"	bltz	%0, 1f					\n"
-			__SC	"%0, %2					\n"
-		"	.set	noreorder				\n"
-		"	beqz	%0, 1b					\n"
-		"	 dsubu	%0, %1, %3				\n"
-		"	.set	reorder					\n"
-		"1:							\n"
-		"	.set	mips0					\n"
-		: "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
-		: "Ir" (i), "m" (l->a.counter)
-		: "memory");
-	} else {
-		unsigned long flags;
-
-		local_irq_save(flags);
-		result = l->a.counter;
-		result -= i;
-		if (result >= 0)
-			l->a.counter = result;
-		local_irq_restore(flags);
-	}
-
-	return result;
-}
-
 #define local_cmpxchg(l, o, n) \
 	((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
 #define local_xchg(l, n) (xchg_local(&((l)->a.counter),(n)))
@@ -233,12 +172,6 @@ static __inline__ long local_sub_if_positive(long i, local_t * l)
  */
 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
 
-/*
- * local_dec_if_positive - decrement by 1 if old value positive
- * @l: pointer of type local_t
- */
-#define local_dec_if_positive(l)	local_sub_if_positive(1, l)
-
 /*
  * local_add_negative - add and test if negative
  * @l: pointer of type local_t

+ 1 - 260
include/asm-mips/system.h

@@ -17,6 +17,7 @@
 
 #include <asm/addrspace.h>
 #include <asm/barrier.h>
+#include <asm/cmpxchg.h>
 #include <asm/cpu-features.h>
 #include <asm/dsp.h>
 #include <asm/war.h>
@@ -194,266 +195,6 @@ static inline unsigned long __xchg(unsigned long x, volatile void * ptr, int siz
 
 #define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
 
-#define __HAVE_ARCH_CMPXCHG 1
-
-static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
-	unsigned long new)
-{
-	__u32 retval;
-
-	if (cpu_has_llsc && R10000_LLSC_WAR) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	.set	mips0					\n"
-		"	move	$1, %z4					\n"
-		"	.set	mips3					\n"
-		"	sc	$1, %1					\n"
-		"	beqzl	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else if (cpu_has_llsc) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	.set	mips0					\n"
-		"	move	$1, %z4					\n"
-		"	.set	mips3					\n"
-		"	sc	$1, %1					\n"
-		"	beqz	$1, 3f					\n"
-		"2:							\n"
-		"	.subsection 2					\n"
-		"3:	b	1b					\n"
-		"	.previous					\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else {
-		unsigned long flags;
-
-		raw_local_irq_save(flags);
-		retval = *m;
-		if (retval == old)
-			*m = new;
-		raw_local_irq_restore(flags);	/* implies memory barrier  */
-	}
-
-	smp_llsc_mb();
-
-	return retval;
-}
-
-static inline unsigned long __cmpxchg_u32_local(volatile int * m,
-	unsigned long old, unsigned long new)
-{
-	__u32 retval;
-
-	if (cpu_has_llsc && R10000_LLSC_WAR) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	.set	mips0					\n"
-		"	move	$1, %z4					\n"
-		"	.set	mips3					\n"
-		"	sc	$1, %1					\n"
-		"	beqzl	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else if (cpu_has_llsc) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	ll	%0, %2			# __cmpxchg_u32	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	.set	mips0					\n"
-		"	move	$1, %z4					\n"
-		"	.set	mips3					\n"
-		"	sc	$1, %1					\n"
-		"	beqz	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else {
-		unsigned long flags;
-
-		local_irq_save(flags);
-		retval = *m;
-		if (retval == old)
-			*m = new;
-		local_irq_restore(flags);	/* implies memory barrier  */
-	}
-
-	return retval;
-}
-
-#ifdef CONFIG_64BIT
-static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
-	unsigned long new)
-{
-	__u64 retval;
-
-	if (cpu_has_llsc && R10000_LLSC_WAR) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	lld	%0, %2			# __cmpxchg_u64	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	move	$1, %z4					\n"
-		"	scd	$1, %1					\n"
-		"	beqzl	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else if (cpu_has_llsc) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	lld	%0, %2			# __cmpxchg_u64	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	move	$1, %z4					\n"
-		"	scd	$1, %1					\n"
-		"	beqz	$1, 3f					\n"
-		"2:							\n"
-		"	.subsection 2					\n"
-		"3:	b	1b					\n"
-		"	.previous					\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else {
-		unsigned long flags;
-
-		raw_local_irq_save(flags);
-		retval = *m;
-		if (retval == old)
-			*m = new;
-		raw_local_irq_restore(flags);	/* implies memory barrier  */
-	}
-
-	smp_llsc_mb();
-
-	return retval;
-}
-
-static inline unsigned long __cmpxchg_u64_local(volatile int * m,
-	unsigned long old, unsigned long new)
-{
-	__u64 retval;
-
-	if (cpu_has_llsc && R10000_LLSC_WAR) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	lld	%0, %2			# __cmpxchg_u64	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	move	$1, %z4					\n"
-		"	scd	$1, %1					\n"
-		"	beqzl	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else if (cpu_has_llsc) {
-		__asm__ __volatile__(
-		"	.set	push					\n"
-		"	.set	noat					\n"
-		"	.set	mips3					\n"
-		"1:	lld	%0, %2			# __cmpxchg_u64	\n"
-		"	bne	%0, %z3, 2f				\n"
-		"	move	$1, %z4					\n"
-		"	scd	$1, %1					\n"
-		"	beqz	$1, 1b					\n"
-		"2:							\n"
-		"	.set	pop					\n"
-		: "=&r" (retval), "=R" (*m)
-		: "R" (*m), "Jr" (old), "Jr" (new)
-		: "memory");
-	} else {
-		unsigned long flags;
-
-		local_irq_save(flags);
-		retval = *m;
-		if (retval == old)
-			*m = new;
-		local_irq_restore(flags);	/* implies memory barrier  */
-	}
-
-	return retval;
-}
-
-#else
-extern unsigned long __cmpxchg_u64_unsupported_on_32bit_kernels(
-	volatile int * m, unsigned long old, unsigned long new);
-#define __cmpxchg_u64 __cmpxchg_u64_unsupported_on_32bit_kernels
-extern unsigned long __cmpxchg_u64_local_unsupported_on_32bit_kernels(
-	volatile int * m, unsigned long old, unsigned long new);
-#define __cmpxchg_u64_local __cmpxchg_u64_local_unsupported_on_32bit_kernels
-#endif
-
-/* This function doesn't exist, so you'll get a linker error
-   if something tries to do an invalid cmpxchg().  */
-extern void __cmpxchg_called_with_bad_pointer(void);
-
-static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
-	unsigned long new, int size)
-{
-	switch (size) {
-	case 4:
-		return __cmpxchg_u32(ptr, old, new);
-	case 8:
-		return __cmpxchg_u64(ptr, old, new);
-	}
-	__cmpxchg_called_with_bad_pointer();
-	return old;
-}
-
-static inline unsigned long __cmpxchg_local(volatile void * ptr,
-	unsigned long old, unsigned long new, int size)
-{
-	switch (size) {
-	case 4:
-		return __cmpxchg_u32_local(ptr, old, new);
-	case 8:
-		return __cmpxchg_u64_local(ptr, old, new);
-	}
-	__cmpxchg_called_with_bad_pointer();
-	return old;
-}
-
-#define cmpxchg(ptr,old,new) \
-	((__typeof__(*(ptr)))__cmpxchg((ptr), \
-		(unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
-
-#define cmpxchg_local(ptr,old,new) \
-	((__typeof__(*(ptr)))__cmpxchg_local((ptr), \
-		(unsigned long)(old), (unsigned long)(new),sizeof(*(ptr))))
-
 extern void set_handler (unsigned long offset, void *addr, unsigned long len);
 extern void set_uncached_handler (unsigned long offset, void *addr, unsigned long len);