]>
Commit | Line | Data |
---|---|---|
e6d11017 JR |
1 | Use specific machine level instructions for mb() for new |
2 | processors (P3,P4,Athlon). | |
3 | Author: Zwane Mwaikambo <zwane@linux.realnet.co.sz> | |
4 | ||
5 | --- linux-2.5.19/arch/i386/config.in.orig Mon Jun 3 10:33:18 2002 | |
6 | +++ linux-2.5.19/arch/i386/config.in Mon Jun 10 08:12:02 2002 | |
7 | @@ -111,6 +111,7 @@ | |
8 | define_bool CONFIG_X86_PGE y | |
9 | define_bool CONFIG_X86_USE_PPRO_CHECKSUM y | |
10 | define_bool CONFIG_X86_F00F_WORKS_OK y | |
11 | + define_bool CONFIG_X86_SFENCE y | |
12 | fi | |
13 | if [ "$CONFIG_MPENTIUM4" = "y" ]; then | |
14 | define_int CONFIG_X86_L1_CACHE_SHIFT 7 | |
15 | @@ -119,6 +120,9 @@ | |
16 | define_bool CONFIG_X86_PGE y | |
17 | define_bool CONFIG_X86_USE_PPRO_CHECKSUM y | |
18 | define_bool CONFIG_X86_F00F_WORKS_OK y | |
19 | + define_bool CONFIG_X86_SFENCE y | |
20 | + define_bool CONFIG_X86_LFENCE y | |
21 | + define_bool CONFIG_X86_MFENCE y | |
22 | fi | |
23 | if [ "$CONFIG_MK6" = "y" ]; then | |
24 | define_int CONFIG_X86_L1_CACHE_SHIFT 5 | |
25 | @@ -134,6 +138,7 @@ | |
26 | define_bool CONFIG_X86_PGE y | |
27 | define_bool CONFIG_X86_USE_PPRO_CHECKSUM y | |
28 | define_bool CONFIG_X86_F00F_WORKS_OK y | |
29 | + define_bool CONFIG_X86_SFENCE y | |
30 | fi | |
31 | if [ "$CONFIG_MELAN" = "y" ]; then | |
32 | define_int CONFIG_X86_L1_CACHE_SHIFT 4 | |
33 | --- linux-2.5.19/include/asm-i386/system.h.orig Mon Jun 10 08:10:55 2002 | |
34 | +++ linux-2.5.19/include/asm-i386/system.h Mon Jun 10 08:11:04 2002 | |
35 | @@ -290,16 +290,33 @@ | |
36 | * | |
37 | * Some non intel clones support out of order store. wmb() ceases to be a | |
38 | * nop for these. | |
39 | + * | |
40 | + * Pentium III introduced the SFENCE instruction for serialising all store | |
41 | + * operations, Pentium IV further introduced LFENCE and MFENCE for load and | |
42 | + * memory barriers respecively. | |
43 | */ | |
44 | - | |
45 | + | |
46 | +#ifdef CONFIG_X86_MFENCE | |
47 | +#define mb() __asm__ __volatile__ ("mfence": : :"memory") | |
48 | +#else | |
49 | #define mb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory") | |
50 | +#endif | |
51 | + | |
52 | +#ifdef CONFIG_X86_LFENCE | |
53 | +#define rmb() __asm__ __volatile__ ("lfence": : :"memory") | |
54 | +#else | |
55 | #define rmb() mb() | |
56 | +#endif | |
57 | ||
58 | +#ifdef CONFIG_X86_SFENCE | |
59 | +#define wmb() __asm__ __volatile__ ("sfence": : :"memory") | |
60 | +#else | |
61 | #ifdef CONFIG_X86_OOSTORE | |
62 | #define wmb() __asm__ __volatile__ ("lock; addl $0,0(%%esp)": : :"memory") | |
63 | #else | |
64 | #define wmb() __asm__ __volatile__ ("": : :"memory") | |
65 | #endif | |
66 | +#endif /* CONFIG_X86_SFENCE */ | |
67 | ||
68 | #ifdef CONFIG_SMP | |
69 | #define smp_mb() mb() |