aboutsummaryrefslogtreecommitdiffstats
path: root/arch/powerpc/include/asm/feature-fixups.h
diff options
context:
space:
mode:
authorMilton Miller <miltonm@bga.com>2009-04-29 20:58:01 +0000
committerBenjamin Herrenschmidt <benh@kernel.crashing.org>2009-05-21 15:44:21 +1000
commitaf20aeb1a3292ae7ecfc492a4dc059e35465e016 (patch)
tree8462db2d11f395468b20f43d882337ad0bea9683 /arch/powerpc/include/asm/feature-fixups.h
parentpowerpc: Move VSX load/stores into ppc-opcode.h (diff)
downloadlinux-dev-af20aeb1a3292ae7ecfc492a4dc059e35465e016.tar.xz
linux-dev-af20aeb1a3292ae7ecfc492a4dc059e35465e016.zip
powerpc: Enable MMU feature sections for inline asm
powerpc: Enable MMU feature sections for inline asm This adds the ability to do MMU feature sections for inline asm. Signed-off-by: Milton Miller <miltonm@bga.com> Signed-off-by: Michael Neuling <mikey@neuling.org> Signed-off-by: Benjamin Herrenschmidt <benh@kernel.crashing.org>
Diffstat (limited to 'arch/powerpc/include/asm/feature-fixups.h')
-rw-r--r--arch/powerpc/include/asm/feature-fixups.h25
1 files changed, 20 insertions, 5 deletions
diff --git a/arch/powerpc/include/asm/feature-fixups.h b/arch/powerpc/include/asm/feature-fixups.h
index e4094a5cb05b..cbd4dfa4bce2 100644
--- a/arch/powerpc/include/asm/feature-fixups.h
+++ b/arch/powerpc/include/asm/feature-fixups.h
@@ -8,8 +8,6 @@
* 2 of the License, or (at your option) any later version.
*/
-#ifdef __ASSEMBLY__
-
/*
* Feature section common macros
*
@@ -23,10 +21,12 @@
/* 64 bits kernel, 32 bits code (ie. vdso32) */
#define FTR_ENTRY_LONG .llong
#define FTR_ENTRY_OFFSET .long 0xffffffff; .long
+#elif defined(CONFIG_PPC64)
+#define FTR_ENTRY_LONG .llong
+#define FTR_ENTRY_OFFSET .llong
#else
-/* 64 bit kernel 64 bit code, or 32 bit kernel 32 bit code */
-#define FTR_ENTRY_LONG PPC_LONG
-#define FTR_ENTRY_OFFSET PPC_LONG
+#define FTR_ENTRY_LONG .long
+#define FTR_ENTRY_OFFSET .long
#endif
#define START_FTR_SECTION(label) label##1:
@@ -141,6 +141,21 @@ label##5: \
#define ALT_FW_FTR_SECTION_END_IFCLR(msk) \
ALT_FW_FTR_SECTION_END_NESTED_IFCLR(msk, 97)
+#ifndef __ASSEMBLY__
+
+#define ASM_MMU_FTR_IF(section_if, section_else, msk, val) \
+ stringify_in_c(BEGIN_MMU_FTR_SECTION) \
+ section_if "; " \
+ stringify_in_c(MMU_FTR_SECTION_ELSE) \
+ section_else "; " \
+ stringify_in_c(ALT_MMU_FTR_SECTION_END((msk), (val)))
+
+#define ASM_MMU_FTR_IFSET(section_if, section_else, msk) \
+ ASM_MMU_FTR_IF(section_if, section_else, (msk), (msk))
+
+#define ASM_MMU_FTR_IFCLR(section_if, section_else, msk) \
+ ASM_MMU_FTR_IF(section_if, section_else, (msk), 0)
+
#endif /* __ASSEMBLY__ */
/* LWSYNC feature sections */