Force inlining of some functions not inlined by gcc3 due to size. Author: Denis Vlasenko diff -urN linux-2.5.45.orig/include/linux/compiler.h linux-2.5.45fix/include/linux/compiler.h --- linux-2.5.45.orig/include/linux/compiler.h Wed Oct 30 22:43:05 2002 +++ linux-2.5.45fix/include/linux/compiler.h Sun Nov 3 15:19:20 2002 @@ -13,4 +13,12 @@ #define likely(x) __builtin_expect((x),1) #define unlikely(x) __builtin_expect((x),0) +/* GCC 3 (and probably earlier, I'm not sure) can be told to always inline + a function. */ +#if __GNUC__ < 3 +#define force_inline inline +#else +#define force_inline inline __attribute__ ((always_inline)) +#endif + #endif /* __LINUX_COMPILER_H */ diff -urN linux-2.5.45.orig/include/asm-i386/string.h linux-2.5.45fix/include/asm-i386/string.h --- linux-2.5.45.orig/include/asm-i386/string.h Wed Oct 30 22:43:46 2002 +++ linux-2.5.45fix/include/asm-i386/string.h Sun Nov 3 15:58:08 2002 @@ -3,6 +3,7 @@ #ifdef __KERNEL__ #include +#include /* * On a 486 or Pentium, we are better off not using the * byte string operations. But on a 386 or a PPro the @@ -218,7 +219,7 @@ * This looks horribly ugly, but the compiler can optimize it totally, * as the count is constant. */ -static inline void * __constant_memcpy(void * to, const void * from, size_t n) +static force_inline void * __constant_memcpy(void * to, const void * from, size_t n) { switch (n) { case 0: @@ -453,7 +454,7 @@ * This looks horribly ugly, but the compiler can optimize it totally, * as we by now know that both pattern and count is constant.. */ -static inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) +static force_inline void * __constant_c_and_count_memset(void * s, unsigned long pattern, size_t count) { switch (count) { case 0: --- linux/include/asm-i386/uaccess.h.orig 2002-11-13 01:27:42.000000000 +0100 +++ linux/include/asm-i386/uaccess.h 2002-11-13 01:30:04.000000000 +0100 @@ -543,7 +543,7 @@ unsigned long __generic_copy_to_user(void *, const void *, unsigned long); unsigned long __generic_copy_from_user(void *, const void *, unsigned long); -static inline unsigned long +static force_inline unsigned long __constant_copy_to_user(void *to, const void *from, unsigned long n) { prefetch(from); @@ -552,7 +552,7 @@ return n; } -static inline unsigned long +static force_inline unsigned long __constant_copy_from_user(void *to, const void *from, unsigned long n) { if (access_ok(VERIFY_READ, from, n)) @@ -562,14 +562,14 @@ return n; } -static inline unsigned long +static force_inline unsigned long __constant_copy_to_user_nocheck(void *to, const void *from, unsigned long n) { __constant_copy_user(to,from,n); return n; } -static inline unsigned long +static force_inline unsigned long __constant_copy_from_user_nocheck(void *to, const void *from, unsigned long n) { __constant_copy_user_zeroing(to,from,n);