1 #ifndef _SPARC64_TLBFLUSH_H
2 #define _SPARC64_TLBFLUSH_H
4 #include <asm/mmu_context.h>
6 /* TSB flush operations. */
8 #define TLB_BATCH_NR 192
14 unsigned long vaddrs[TLB_BATCH_NR];
17 extern void flush_tsb_kernel_range(unsigned long start, unsigned long end);
18 extern void flush_tsb_user(struct tlb_batch *tb);
19 extern void flush_tsb_user_page(struct mm_struct *mm, unsigned long vaddr);
21 /* TLB flush operations. */
23 static inline void flush_tlb_mm(struct mm_struct *mm)
27 static inline void flush_tlb_page(struct vm_area_struct *vma,
32 static inline void flush_tlb_range(struct vm_area_struct *vma,
33 unsigned long start, unsigned long end)
37 #define __HAVE_ARCH_ENTER_LAZY_MMU_MODE
39 extern void flush_tlb_pending(void);
40 extern void arch_enter_lazy_mmu_mode(void);
41 extern void arch_leave_lazy_mmu_mode(void);
42 #define arch_flush_lazy_mmu_mode() do {} while (0)
45 extern void __flush_tlb_all(void);
46 extern void __flush_tlb_page(unsigned long context, unsigned long vaddr);
47 extern void __flush_tlb_kernel_range(unsigned long start, unsigned long end);
51 #define flush_tlb_kernel_range(start,end) \
52 do { flush_tsb_kernel_range(start,end); \
53 __flush_tlb_kernel_range(start,end); \
56 static inline void global_flush_tlb_page(struct mm_struct *mm, unsigned long vaddr)
58 __flush_tlb_page(CTX_HWBITS(mm->context), vaddr);
61 #else /* CONFIG_SMP */
63 extern void smp_flush_tlb_kernel_range(unsigned long start, unsigned long end);
64 extern void smp_flush_tlb_page(struct mm_struct *mm, unsigned long vaddr);
66 #define flush_tlb_kernel_range(start, end) \
67 do { flush_tsb_kernel_range(start,end); \
68 smp_flush_tlb_kernel_range(start, end); \
71 #define global_flush_tlb_page(mm, vaddr) \
72 smp_flush_tlb_page(mm, vaddr)
74 #endif /* ! CONFIG_SMP */
76 #endif /* _SPARC64_TLBFLUSH_H */