|
@@ -8,7 +8,6 @@
|
|
|
#include <linux/spinlock.h>
|
|
|
#include <asm/mmu.h>
|
|
|
#include <asm/cputable.h>
|
|
|
-#include <asm-generic/mm_hooks.h>
|
|
|
#include <asm/cputhreads.h>
|
|
|
|
|
|
/*
|
|
@@ -127,5 +126,27 @@ static inline void enter_lazy_tlb(struct mm_struct *mm,
|
|
|
#endif
|
|
|
}
|
|
|
|
|
|
+static inline void arch_dup_mmap(struct mm_struct *oldmm,
|
|
|
+ struct mm_struct *mm)
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
+static inline void arch_exit_mmap(struct mm_struct *mm)
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
+static inline void arch_unmap(struct mm_struct *mm,
|
|
|
+ struct vm_area_struct *vma,
|
|
|
+ unsigned long start, unsigned long end)
|
|
|
+{
|
|
|
+ if (start <= mm->context.vdso_base && mm->context.vdso_base < end)
|
|
|
+ mm->context.vdso_base = 0;
|
|
|
+}
|
|
|
+
|
|
|
+static inline void arch_bprm_mm_init(struct mm_struct *mm,
|
|
|
+ struct vm_area_struct *vma)
|
|
|
+{
|
|
|
+}
|
|
|
+
|
|
|
#endif /* __KERNEL__ */
|
|
|
#endif /* __ASM_POWERPC_MMU_CONTEXT_H */
|