aboutsummaryrefslogtreecommitdiffstats
path: root/include/linux/mm.h
diff options
context:
space:
mode:
authorAndy Whitcroft <apw@shadowen.org>2007-02-10 01:43:14 -0800
committerLinus Torvalds <torvalds@woody.linux-foundation.org>2007-02-11 10:51:19 -0800
commitbd8029b66069d29fd02c304599411ca9bb7fa38c (patch)
tree2b4088813f060afc40c00ac668c2c3d183e156ad /include/linux/mm.h
parent[PATCH] Set CONFIG_ZONE_DMA for arches with GENERIC_ISA_DMA (diff)
downloadlinux-dev-bd8029b66069d29fd02c304599411ca9bb7fa38c.tar.xz
linux-dev-bd8029b66069d29fd02c304599411ca9bb7fa38c.zip
[PATCH] zoneid: fix up calculations for ZONEID_PGSHIFT
Currently if we have a non-zero ZONES_SHIFT we assume we are able to rely on that as the bottom edge of the ZONEID, if not then we use the NODES_PGOFF as the right end of either NODES _or_ SECTION. This latter is more luck than judgement and would be incorrect if we reordered the SECTION,NODE,ZONE options in the fields space. Really what we want is the lower of the right hand end of the two fields we are using (either NODE,ZONE or SECTION,ZONE). Codify that explicitly. As always allow for there being no bits in either of the fields, such as might be valid in a non-numa machine with only a zone NORMAL. I have checked that the compiler is still able to constant fold all of this away correctly. Signed-off-by: Andy Whitcroft <apw@shadowen.org> Acked-by: Christoph Lameter <clameter@sgi.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Diffstat (limited to 'include/linux/mm.h')
-rw-r--r--include/linux/mm.h11
1 files changed, 5 insertions, 6 deletions
diff --git a/include/linux/mm.h b/include/linux/mm.h
index bb793a4c8e9e..26adfcc0d61a 100644
--- a/include/linux/mm.h
+++ b/include/linux/mm.h
@@ -437,15 +437,15 @@ static inline compound_page_dtor *get_compound_page_dtor(struct page *page)
/* NODE:ZONE or SECTION:ZONE is used to ID a zone for the buddy allcator */
#ifdef NODE_NOT_IN_PAGEFLAGS
#define ZONEID_SHIFT (SECTIONS_SHIFT + ZONES_SHIFT)
+#define ZONEID_PGOFF ((SECTIONS_PGOFF < ZONES_PGOFF)? \
+ SECTIONS_PGOFF : ZONES_PGOFF)
#else
#define ZONEID_SHIFT (NODES_SHIFT + ZONES_SHIFT)
+#define ZONEID_PGOFF ((NODES_PGOFF < ZONES_PGOFF)? \
+ NODES_PGOFF : ZONES_PGOFF)
#endif
-#if ZONES_WIDTH > 0
-#define ZONEID_PGSHIFT ZONES_PGSHIFT
-#else
-#define ZONEID_PGSHIFT NODES_PGOFF
-#endif
+#define ZONEID_PGSHIFT (ZONEID_PGOFF * (ZONEID_SHIFT != 0))
#if SECTIONS_WIDTH+NODES_WIDTH+ZONES_WIDTH > FLAGS_RESERVED
#error SECTIONS_WIDTH+NODES_WIDTH+ZONES_WIDTH > FLAGS_RESERVED
@@ -471,7 +471,6 @@ static inline enum zone_type page_zonenum(struct page *page)
*/
static inline int page_zone_id(struct page *page)
{
- BUILD_BUG_ON(ZONEID_PGSHIFT == 0 && ZONEID_MASK);
return (page->flags >> ZONEID_PGSHIFT) & ZONEID_MASK;
}