Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef _ASM_M32R_SMP_H |
2 | #define _ASM_M32R_SMP_H | |
3 | ||
1da177e4 LT |
4 | #ifdef CONFIG_SMP |
5 | #ifndef __ASSEMBLY__ | |
6 | ||
7 | #include <linux/cpumask.h> | |
8 | #include <linux/spinlock.h> | |
9 | #include <linux/threads.h> | |
10 | #include <asm/m32r.h> | |
11 | ||
12 | #define PHYSID_ARRAY_SIZE 1 | |
13 | ||
14 | struct physid_mask | |
15 | { | |
16 | unsigned long mask[PHYSID_ARRAY_SIZE]; | |
17 | }; | |
18 | ||
19 | typedef struct physid_mask physid_mask_t; | |
20 | ||
21 | #define physid_set(physid, map) set_bit(physid, (map).mask) | |
22 | #define physid_clear(physid, map) clear_bit(physid, (map).mask) | |
23 | #define physid_isset(physid, map) test_bit(physid, (map).mask) | |
24 | #define physid_test_and_set(physid, map) test_and_set_bit(physid, (map).mask) | |
25 | ||
26 | #define physids_and(dst, src1, src2) bitmap_and((dst).mask, (src1).mask, (src2).mask, MAX_APICS) | |
27 | #define physids_or(dst, src1, src2) bitmap_or((dst).mask, (src1).mask, (src2).mask, MAX_APICS) | |
28 | #define physids_clear(map) bitmap_zero((map).mask, MAX_APICS) | |
29 | #define physids_complement(dst, src) bitmap_complement((dst).mask,(src).mask, MAX_APICS) | |
30 | #define physids_empty(map) bitmap_empty((map).mask, MAX_APICS) | |
31 | #define physids_equal(map1, map2) bitmap_equal((map1).mask, (map2).mask, MAX_APICS) | |
32 | #define physids_weight(map) bitmap_weight((map).mask, MAX_APICS) | |
33 | #define physids_shift_right(d, s, n) bitmap_shift_right((d).mask, (s).mask, n, MAX_APICS) | |
34 | #define physids_shift_left(d, s, n) bitmap_shift_left((d).mask, (s).mask, n, MAX_APICS) | |
35 | #define physids_coerce(map) ((map).mask[0]) | |
36 | ||
37 | #define physids_promote(physids) \ | |
38 | ({ \ | |
39 | physid_mask_t __physid_mask = PHYSID_MASK_NONE; \ | |
40 | __physid_mask.mask[0] = physids; \ | |
41 | __physid_mask; \ | |
42 | }) | |
43 | ||
44 | #define physid_mask_of_physid(physid) \ | |
45 | ({ \ | |
46 | physid_mask_t __physid_mask = PHYSID_MASK_NONE; \ | |
47 | physid_set(physid, __physid_mask); \ | |
48 | __physid_mask; \ | |
49 | }) | |
50 | ||
51 | #define PHYSID_MASK_ALL { {[0 ... PHYSID_ARRAY_SIZE-1] = ~0UL} } | |
52 | #define PHYSID_MASK_NONE { {[0 ... PHYSID_ARRAY_SIZE-1] = 0UL} } | |
53 | ||
54 | extern physid_mask_t phys_cpu_present_map; | |
55 | ||
56 | /* | |
57 | * Some lowlevel functions might want to know about | |
58 | * the real CPU ID <-> CPU # mapping. | |
59 | */ | |
1da177e4 | 60 | extern volatile int cpu_2_physid[NR_CPUS]; |
1da177e4 LT |
61 | #define cpu_to_physid(cpu_id) cpu_2_physid[cpu_id] |
62 | ||
39c715b7 | 63 | #define raw_smp_processor_id() (current_thread_info()->cpu) |
1da177e4 LT |
64 | |
65 | extern cpumask_t cpu_callout_map; | |
7c1c4e54 HT |
66 | extern cpumask_t cpu_possible_map; |
67 | extern cpumask_t cpu_present_map; | |
1da177e4 LT |
68 | |
69 | static __inline__ int hard_smp_processor_id(void) | |
70 | { | |
71 | return (int)*(volatile long *)M32R_CPUID_PORTL; | |
72 | } | |
73 | ||
74 | static __inline__ int cpu_logical_map(int cpu) | |
75 | { | |
76 | return cpu; | |
77 | } | |
78 | ||
79 | static __inline__ int cpu_number_map(int cpu) | |
80 | { | |
81 | return cpu; | |
82 | } | |
83 | ||
84 | static __inline__ unsigned int num_booting_cpus(void) | |
85 | { | |
86 | return cpus_weight(cpu_callout_map); | |
87 | } | |
88 | ||
89 | extern void smp_send_timer(void); | |
90 | extern unsigned long send_IPI_mask_phys(cpumask_t, int, int); | |
91 | ||
7b7426c8 JA |
92 | extern void arch_send_call_function_single_ipi(int cpu); |
93 | extern void arch_send_call_function_ipi(cpumask_t mask); | |
94 | ||
1da177e4 LT |
95 | #endif /* not __ASSEMBLY__ */ |
96 | ||
97 | #define NO_PROC_ID (0xff) /* No processor magic marker */ | |
98 | ||
99 | #define PROC_CHANGE_PENALTY (15) /* Schedule penalty */ | |
100 | ||
101 | /* | |
102 | * M32R-mp IPI | |
103 | */ | |
104 | #define RESCHEDULE_IPI (M32R_IRQ_IPI0-M32R_IRQ_IPI0) | |
105 | #define INVALIDATE_TLB_IPI (M32R_IRQ_IPI1-M32R_IRQ_IPI0) | |
106 | #define CALL_FUNCTION_IPI (M32R_IRQ_IPI2-M32R_IRQ_IPI0) | |
107 | #define LOCAL_TIMER_IPI (M32R_IRQ_IPI3-M32R_IRQ_IPI0) | |
108 | #define INVALIDATE_CACHE_IPI (M32R_IRQ_IPI4-M32R_IRQ_IPI0) | |
109 | #define CPU_BOOT_IPI (M32R_IRQ_IPI5-M32R_IRQ_IPI0) | |
7b7426c8 | 110 | #define CALL_FUNC_SINGLE_IPI (M32R_IRQ_IPI6-M32R_IRQ_IPI0) |
1da177e4 LT |
111 | |
112 | #define IPI_SHIFT (0) | |
113 | #define NR_IPIS (8) | |
114 | ||
2f4dfe20 FLVC |
115 | #else /* CONFIG_SMP */ |
116 | ||
117 | #define hard_smp_processor_id() 0 | |
118 | ||
119 | #endif /* CONFIG_SMP */ | |
1da177e4 LT |
120 | |
121 | #endif /* _ASM_M32R_SMP_H */ |