This patch removes TLB related codes in nommu mode. Signed-off-by: Hyok S. Choi <hyok.choi@samsung.com> Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
@@ -19,6 +19,14 @@
#include <asm/cacheflush.h>
#include <asm/tlbflush.h>
+
+#ifndef CONFIG_MMU
+#include <linux/pagemap.h>
+#include <asm-generic/tlb.h>
+#else /* !CONFIG_MMU */
#include <asm/pgalloc.h>
/*
@@ -82,4 +90,5 @@ tlb_end_vma(struct mmu_gather *tlb, struct vm_area_struct *vma)
#define tlb_migrate_finish(mm) do { } while (0)
+#endif /* CONFIG_MMU */
#endif
@@ -11,6 +11,13 @@
#define _ASMARM_TLBFLUSH_H
#include <linux/config.h>
+#define tlb_flush(tlb) ((void) tlb)
+#else /* CONFIG_MMY */
#include <asm/glue.h>
#define TLB_V3_PAGE (1 << 0)
@@ -423,4 +430,6 @@ extern void update_mmu_cache(struct vm_area_struct *vma, unsigned long addr, pte