MIPS: Make local_irq_disable macro safe for non-Mipsr2

For non-mipsr2 processors, the local_irq_disable contains an mfc0-mtc0
pair with instructions inbetween.  With preemption enabled, this sequence
may get preempted and effect a stale value of CP0_STATUS when executing
the mtc0 instruction.  This commit avoids this scenario by incrementing
the preempt count before the mfc0 and decrementing it after the mtc9.

[ralf@linux-mips.org: This patch is sorting out the part that were missed
by e97c5b6098 [MIPS: Make irqflags.h functions preempt-safe for non-mipsr2
cpus.]  I also re-enabled the inclusion of <asm/asm-offsets.h> at the top
of <asm/asmmacro.h>].

Signed-off-by: Jim Quinlan <jim2101024@gmail.com>
Cc: linux-mips@linux-mips.org
Cc: cernekee@gmail.com
Patchwork: https://patchwork.linux-mips.org/patch/6164/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
diff --git a/arch/mips/include/asm/asmmacro.h b/arch/mips/include/asm/asmmacro.h
index 69a9a22..4225e99 100644
--- a/arch/mips/include/asm/asmmacro.h
+++ b/arch/mips/include/asm/asmmacro.h
@@ -9,6 +9,7 @@
 #define _ASM_ASMMACRO_H
 
 #include <asm/hazards.h>
+#include <asm/asm-offsets.h>
 
 #ifdef CONFIG_32BIT
 #include <asm/asmmacro-32.h>
@@ -54,11 +55,21 @@
 	.endm
 
 	.macro	local_irq_disable reg=t0
+#ifdef CONFIG_PREEMPT
+	lw      \reg, TI_PRE_COUNT($28)
+	addi    \reg, \reg, 1
+	sw      \reg, TI_PRE_COUNT($28)
+#endif
 	mfc0	\reg, CP0_STATUS
 	ori	\reg, \reg, 1
 	xori	\reg, \reg, 1
 	mtc0	\reg, CP0_STATUS
 	irq_disable_hazard
+#ifdef CONFIG_PREEMPT
+	lw      \reg, TI_PRE_COUNT($28)
+	addi    \reg, \reg, -1
+	sw      \reg, TI_PRE_COUNT($28)
+#endif
 	.endm
 #endif /* CONFIG_MIPS_MT_SMTC */