]> git.pld-linux.org Git - packages/gcc3.git/blame - gcc3-ssp.patch
- renamed to gcc3.4
[packages/gcc3.git] / gcc3-ssp.patch
CommitLineData
8136aaa0 1diff -uNr gcc-3.4.3.orig/gcc/calls.c gcc-3.4.3/gcc/calls.c
2--- gcc-3.4.3.orig/gcc/calls.c 2004-06-24 09:26:50.000000000 +0200
3+++ gcc-3.4.3/gcc/calls.c 2004-11-24 18:35:31.000000000 +0100
4@@ -2321,8 +2321,12 @@
5 {
6 /* For variable-sized objects, we must be called with a target
7 specified. If we were to allocate space on the stack here,
8- we would have no way of knowing when to free it. */
9- rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
10+ we would have no way of knowing when to free it.
11+
12+ This is the structure of a function return object and it isn't
13+ a character array for the stack protection, so it is
14+ marked using the assignment of the KEEP argument to 5. */
15+ rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
16
17 mark_temp_addr_taken (d);
18 structure_value_addr = XEXP (d, 0);
19diff -uNr gcc-3.4.3.orig/gcc/c-cppbuiltin.c gcc-3.4.3/gcc/c-cppbuiltin.c
20--- gcc-3.4.3.orig/gcc/c-cppbuiltin.c 2004-03-04 11:24:54.000000000 +0100
21+++ gcc-3.4.3/gcc/c-cppbuiltin.c 2004-11-24 18:35:31.000000000 +0100
22@@ -408,6 +408,12 @@
23 if (c_dialect_objc () && flag_next_runtime)
24 cpp_define (pfile, "__NEXT_RUNTIME__");
25
26+ /* Make the choice of the stack protector runtime visible to source code. */
27+ if (flag_propolice_protection)
28+ cpp_define (pfile, "__SSP__=1");
29+ if (flag_stack_protection)
30+ cpp_define (pfile, "__SSP_ALL__=2");
31+
32 /* A straightforward target hook doesn't work, because of problems
33 linking that hook's body when part of non-C front ends. */
34 # define preprocessing_asm_p() (cpp_get_options (pfile)->lang == CLK_ASM)
35diff -uNr gcc-3.4.3.orig/gcc/combine.c gcc-3.4.3/gcc/combine.c
36--- gcc-3.4.3.orig/gcc/combine.c 2004-10-13 01:35:29.000000000 +0200
37+++ gcc-3.4.3/gcc/combine.c 2004-11-24 18:35:31.000000000 +0100
38@@ -1401,6 +1401,10 @@
39 && ! fixed_regs[REGNO (dest)]
40 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
41 return 1;
42+ /* Never combine loads and stores protecting argument that use set insn
43+ with used flag on. */
44+ if (SET_VOLATILE_P (set))
45+ return 1;
46
47 return 0;
48 }
49@@ -3781,7 +3785,20 @@
50 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
51 rtx inner_op1 = XEXP (x, 1);
52 rtx inner;
53-
54+
55+#ifndef FRAME_GROWS_DOWNWARD
56+ /* For the case where the frame grows upward,
57+ the stack protector keeps the offset of the frame pointer
58+ positive integer. */
59+ if (flag_propolice_protection
60+ && code == PLUS
61+ && other == frame_pointer_rtx
62+ && GET_CODE (inner_op0) == CONST_INT
63+ && GET_CODE (inner_op1) == CONST_INT
64+ && INTVAL (inner_op0) > 0
65+ && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
66+ return x;
67+#endif
68 /* Make sure we pass the constant operand if any as the second
69 one if this is a commutative operation. */
70 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
71@@ -4146,6 +4163,13 @@
72 they are now checked elsewhere. */
73 if (GET_CODE (XEXP (x, 0)) == PLUS
74 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
75+#ifndef FRAME_GROWS_DOWNWARD
76+ /* The stack protector keeps the addressing style of a local variable
77+ to be able to change its stack position. */
78+ if (! (flag_propolice_protection
79+ && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
80+ && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
81+#endif
82 return gen_binary (PLUS, mode,
83 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
84 XEXP (x, 1)),
85@@ -4273,8 +4297,14 @@
86 }
87
88 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
89- integers. */
90- if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
91+ integers.
92+
93+ The stack protector keeps the addressing style of
94+ a local variable. */
95+ if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
96+ && (! (flag_propolice_protection
97+ && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
98+ && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
99 return gen_binary (MINUS, mode,
100 gen_binary (MINUS, mode, XEXP (x, 0),
101 XEXP (XEXP (x, 1), 0)),
102diff -uNr gcc-3.4.3.orig/gcc/common.opt gcc-3.4.3/gcc/common.opt
103--- gcc-3.4.3.orig/gcc/common.opt 2004-11-24 18:04:19.000000000 +0100
104+++ gcc-3.4.3/gcc/common.opt 2004-11-24 18:35:31.000000000 +0100
105@@ -152,6 +152,10 @@
106 Common
107 Warn when a variable is unused
108
109+Wstack-protector
110+Common
111+Warn when not issuing stack smashing protection for some reason
112+
113 aux-info
114 Common Separate
115 -aux-info <file> Emit declaration information into <file>
116@@ -743,6 +747,14 @@
117 Common
118 Put zero initialized data in the bss section
119
120+fstack-protector
121+Common
122+Enables stack protection
123+
124+fstack-protector-all
125+Common
126+Enables stack protection of every function
127+
128 g
129 Common JoinedOrMissing
130 Generate debug information in default format
131diff -uNr gcc-3.4.3.orig/gcc/config/arm/arm.md gcc-3.4.3/gcc/config/arm/arm.md
132--- gcc-3.4.3.orig/gcc/config/arm/arm.md 2004-08-25 17:46:19.000000000 +0200
133+++ gcc-3.4.3/gcc/config/arm/arm.md 2004-11-24 18:35:31.000000000 +0100
134@@ -3840,7 +3840,13 @@
135 (match_operand:DI 1 "general_operand" ""))]
136 "TARGET_EITHER"
137 "
138- if (TARGET_THUMB)
139+ if (TARGET_ARM)
140+ {
141+ /* Everything except mem = const or mem = mem can be done easily */
142+ if (GET_CODE (operands[0]) == MEM)
143+ operands[1] = force_reg (DImode, operands[1]);
144+ }
145+ else /* TARGET_THUMB.... */
146 {
147 if (!no_new_pseudos)
148 {
149diff -uNr gcc-3.4.3.orig/gcc/config/t-linux gcc-3.4.3/gcc/config/t-linux
150--- gcc-3.4.3.orig/gcc/config/t-linux 2003-09-23 20:55:57.000000000 +0200
151+++ gcc-3.4.3/gcc/config/t-linux 2004-11-24 18:35:31.000000000 +0100
152@@ -1,7 +1,7 @@
153 # Compile crtbeginS.o and crtendS.o with pic.
154 CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
155 # Compile libgcc2.a with pic.
156-TARGET_LIBGCC2_CFLAGS = -fPIC
157+TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
158
159 # Override t-slibgcc-elf-ver to export some libgcc symbols with
160 # the symbol versions that glibc used.
161diff -uNr gcc-3.4.3.orig/gcc/configure gcc-3.4.3/gcc/configure
162--- gcc-3.4.3.orig/gcc/configure 2004-11-05 05:14:05.000000000 +0100
163+++ gcc-3.4.3/gcc/configure 2004-11-24 18:44:13.000000000 +0100
164@@ -4809,6 +4809,9 @@
165 fi;
166
167
168+ENABLESSP=""
169+
170+
171 # -------------------------
172 # Checks for other programs
173 # -------------------------
174@@ -13036,6 +13039,7 @@
175 s,@TARGET_SYSTEM_ROOT_DEFINE@,$TARGET_SYSTEM_ROOT_DEFINE,;t t
176 s,@CROSS_SYSTEM_HEADER_DIR@,$CROSS_SYSTEM_HEADER_DIR,;t t
177 s,@onestep@,$onestep,;t t
178+s,@ENABLESSP@,$ENABLESSP,;t t
179 s,@SET_MAKE@,$SET_MAKE,;t t
180 s,@AWK@,$AWK,;t t
181 s,@LN@,$LN,;t t
182diff -uNr gcc-3.4.3.orig/gcc/configure.ac gcc-3.4.3/gcc/configure.ac
183--- gcc-3.4.3.orig/gcc/configure.ac 2004-11-24 18:04:19.000000000 +0100
184+++ gcc-3.4.3/gcc/configure.ac 2004-11-24 18:46:57.000000000 +0100
185@@ -613,6 +613,9 @@
186 [onestep=""])
187 AC_SUBST(onestep)
188
189+ENABLESSP=""
190+AC_SUBST(ENABLESSP)
191+
192 # -------------------------
193 # Checks for other programs
194 # -------------------------
195diff -uNr gcc-3.4.3.orig/gcc/cse.c gcc-3.4.3/gcc/cse.c
196--- gcc-3.4.3.orig/gcc/cse.c 2004-10-26 20:05:42.000000000 +0200
197+++ gcc-3.4.3/gcc/cse.c 2004-11-24 18:35:31.000000000 +0100
198@@ -4212,7 +4212,14 @@
199
200 if (new_const == 0)
201 break;
202-
203+#ifndef FRAME_GROWS_DOWNWARD
204+ if (flag_propolice_protection
205+ && GET_CODE (y) == PLUS
206+ && XEXP (y, 0) == frame_pointer_rtx
207+ && INTVAL (inner_const) > 0
208+ && INTVAL (new_const) <= 0)
209+ break;
210+#endif
211 /* If we are associating shift operations, don't let this
212 produce a shift of the size of the object or larger.
213 This could occur when we follow a sign-extend by a right
214@@ -4744,6 +4751,14 @@
215 if (SET_DEST (x) == pc_rtx
216 && GET_CODE (SET_SRC (x)) == LABEL_REF)
217 ;
218+ /* cut the reg propagation of stack-protected argument. */
219+ else if (SET_VOLATILE_P (x)) {
220+ rtx x1 = SET_DEST (x);
221+ if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
222+ x1 = SUBREG_REG (x1);
223+ if (! REGNO_QTY_VALID_P(REGNO (x1)))
224+ make_new_qty (REGNO (x1), GET_MODE (x1));
225+ }
226
227 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
228 The hard function value register is used only once, to copy to
229diff -uNr gcc-3.4.3.orig/gcc/doc/invoke.texi gcc-3.4.3/gcc/doc/invoke.texi
230--- gcc-3.4.3.orig/gcc/doc/invoke.texi 2004-11-24 18:04:19.000000000 +0100
231+++ gcc-3.4.3/gcc/doc/invoke.texi 2004-11-24 18:35:32.000000000 +0100
232@@ -228,7 +228,7 @@
233 -Wno-multichar -Wnonnull -Wpacked -Wpadded @gol
234 -Wparentheses -Wpointer-arith -Wredundant-decls @gol
235 -Wreturn-type -Wsequence-point -Wshadow @gol
236--Wsign-compare -Wstrict-aliasing @gol
237+-Wsign-compare -Wstack-protector -Wstrict-aliasing @gol
238 -Wswitch -Wswitch-default -Wswitch-enum @gol
239 -Wsystem-headers -Wtrigraphs -Wundef -Wuninitialized @gol
240 -Wunknown-pragmas -Wunreachable-code @gol
241@@ -673,6 +673,7 @@
242 -fshort-double -fshort-wchar @gol
243 -fverbose-asm -fpack-struct -fstack-check @gol
244 -fstack-limit-register=@var{reg} -fstack-limit-symbol=@var{sym} @gol
245+-fstack-protector -fstack-protector-all @gol
246 -fargument-alias -fargument-noalias @gol
247 -fargument-noalias-global -fleading-underscore @gol
248 -ftls-model=@var{model} @gol
249@@ -3006,6 +3007,10 @@
250 complex; GCC will refuse to optimize programs when the optimization
251 itself is likely to take inordinate amounts of time.
252
253+@item -Wstack-protector
254+@opindex Wstack-protector
255+Warn when not issuing stack smashing protection for some reason.
256+
257 @item -Werror
258 @opindex Werror
259 Make all warnings into errors.
260@@ -11202,6 +11207,24 @@
261 @option{-Wl,--defsym,__stack_limit=0x7ffe0000} to enforce a stack limit
262 of 128KB@. Note that this may only work with the GNU linker.
263
264+@item -fstack-protector
265+@item -fstack-protector-all
266+@opindex fstack-protector
267+@opindex fstack-protector-all
268+@opindex fno-stack-protector
269+Generate code to protect an application from a stack smashing
270+attack. The features are (1) the insertion of random value next to the
271+frame pointer to detect the integrity of the stack, (2) the reordering
272+of local variables to place buffers after pointers to avoid the
273+corruption of pointers that could be used to further corrupt arbitrary
274+memory locations, (3) the copying of pointers in function arguments to
275+an area preceding local variable buffers to prevent the corruption of
276+pointers that could be used to further corrupt arbitrary memory
277+locations, and the (4) omission of instrumentation code from some
278+functions to decrease the performance overhead. If the integrity
279+would be broken, the program is aborted. If stack-protector-all is
280+specified, instrumentation codes are generated at every functions.
281+
282 @cindex aliasing of parameters
283 @cindex parameters, aliased
284 @item -fargument-alias
285diff -uNr gcc-3.4.3.orig/gcc/explow.c gcc-3.4.3/gcc/explow.c
286--- gcc-3.4.3.orig/gcc/explow.c 2004-04-03 01:05:26.000000000 +0200
287+++ gcc-3.4.3/gcc/explow.c 2004-11-24 18:35:31.000000000 +0100
288@@ -84,7 +84,8 @@
289 rtx tem;
290 int all_constant = 0;
291
292- if (c == 0)
293+ if (c == 0
294+ && ! (flag_propolice_protection && x == virtual_stack_vars_rtx))
295 return x;
296
297 restart:
298@@ -185,7 +186,10 @@
299 break;
300 }
301
302- if (c != 0)
303+ /* For the use of stack protection, keep the frame and offset pattern
304+ even if the offset is zero. */
305+ if (c != 0
306+ || (flag_propolice_protection && x == virtual_stack_vars_rtx))
307 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
308
309 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
310@@ -474,6 +478,26 @@
311 if (memory_address_p (mode, oldx))
312 goto win2;
313
314+ /* The stack protector keeps the addressing style of a local variable.
315+ LEGITIMIZE_ADDRESS changes the addressing to the machine-dependent
316+ style, so the protector split the frame address to a register using
317+ force_reg. */
318+ if (flag_propolice_protection)
319+ {
320+#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \
321+ && XEXP (X, 0) == virtual_stack_vars_rtx \
322+ && GET_CODE (XEXP (X, 1)) == CONST_INT)
323+ rtx y;
324+ if (FRAMEADDR_P (x))
325+ goto win;
326+ for (y = x; y != 0 && GET_CODE (y) == PLUS; y = XEXP (y, 0))
327+ {
328+ if (FRAMEADDR_P (XEXP (y, 0)))
329+ XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
330+ if (FRAMEADDR_P (XEXP (y, 1)))
331+ XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
332+ }
333+ }
334 /* Perform machine-dependent transformations on X
335 in certain cases. This is not necessary since the code
336 below can handle all possible cases, but machine-dependent
337diff -uNr gcc-3.4.3.orig/gcc/expr.c gcc-3.4.3/gcc/expr.c
338--- gcc-3.4.3.orig/gcc/expr.c 2004-05-27 21:35:17.000000000 +0200
339+++ gcc-3.4.3/gcc/expr.c 2004-11-24 18:35:31.000000000 +0100
340@@ -48,6 +48,7 @@
341 #include "intl.h"
342 #include "tm_p.h"
343 #include "target.h"
344+#include "protector.h"
345
346 /* Decide whether a function's arguments should be processed
347 from first to last or from last to first.
348@@ -1060,7 +1061,11 @@
349
350 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
351 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
352- stpcpy. */
353+ stpcpy.
354+
355+ When the stack protector is used at the reverse move, it starts the move
356+ instruction from the address within the region of a variable.
357+ So it eliminates the first address decrement instruction. */
358
359 rtx
360 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
361@@ -1123,6 +1128,8 @@
362
363 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
364 {
365+ if (flag_propolice_protection)
366+ len = len - GET_MODE_SIZE (mode);
367 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
368 data.autinc_from = 1;
369 data.explicit_inc_from = -1;
370@@ -1137,6 +1144,8 @@
371 data.from_addr = copy_addr_to_reg (from_addr);
372 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
373 {
374+ if (flag_propolice_protection)
375+ len = len - GET_MODE_SIZE (mode);
376 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
377 data.autinc_to = 1;
378 data.explicit_inc_to = -1;
379@@ -1280,11 +1289,15 @@
380 from1 = adjust_address (data->from, mode, data->offset);
381
382 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
383- emit_insn (gen_add2_insn (data->to_addr,
384- GEN_INT (-(HOST_WIDE_INT)size)));
385+ /* The stack protector skips the first address decrement instruction
386+ at the reverse move. */
387+ if (!flag_propolice_protection || data->explicit_inc_to < -1)
388+ emit_insn (gen_add2_insn (data->to_addr,
389+ GEN_INT (-(HOST_WIDE_INT)size)));
390 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
391- emit_insn (gen_add2_insn (data->from_addr,
392- GEN_INT (-(HOST_WIDE_INT)size)));
393+ if (!flag_propolice_protection || data->explicit_inc_from < -1)
394+ emit_insn (gen_add2_insn (data->from_addr,
395+ GEN_INT (-(HOST_WIDE_INT)size)));
396
397 if (data->to)
398 emit_insn ((*genfun) (to1, from1));
399@@ -2475,7 +2488,12 @@
400
401 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
402 {
403- data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
404+ int len = data->len;
405+ /* The stack protector starts the store instruction from
406+ the address within the region of a variable. */
407+ if (flag_propolice_protection)
408+ len -= GET_MODE_SIZE (mode);
409+ data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
410 data->autinc_to = 1;
411 data->explicit_inc_to = -1;
412 }
413@@ -2544,8 +2562,11 @@
414 to1 = adjust_address (data->to, mode, data->offset);
415
416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
417- emit_insn (gen_add2_insn (data->to_addr,
418- GEN_INT (-(HOST_WIDE_INT) size)));
419+ /* The stack protector skips the first address decrement instruction
420+ at the reverse store. */
421+ if (!flag_propolice_protection || data->explicit_inc_to < -1)
422+ emit_insn (gen_add2_insn (data->to_addr,
423+ GEN_INT (-(HOST_WIDE_INT) size)));
424
425 cst = (*data->constfun) (data->constfundata, data->offset, mode);
426 emit_insn ((*genfun) (to1, cst));
427@@ -5701,7 +5722,9 @@
428 && GET_CODE (XEXP (value, 0)) == PLUS
429 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
430 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
431- && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
432+ && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
433+ && (!flag_propolice_protection
434+ || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
435 {
436 rtx temp = expand_simple_binop (GET_MODE (value), code,
437 XEXP (XEXP (value, 0), 0), op2,
438diff -uNr gcc-3.4.3.orig/gcc/flags.h gcc-3.4.3/gcc/flags.h
439--- gcc-3.4.3.orig/gcc/flags.h 2004-11-24 18:04:19.000000000 +0100
440+++ gcc-3.4.3/gcc/flags.h 2004-11-24 18:35:31.492689688 +0100
441@@ -210,6 +210,10 @@
442
443 extern bool warn_strict_aliasing;
444
445+/* Warn when not issuing stack smashing protection for some reason. */
446+
447+extern bool warn_stack_protector;
448+
449 /* Nonzero if generating code to do profiling. */
450
451 extern int profile_flag;
452@@ -795,4 +799,12 @@
453 #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
454 (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && flag_rounding_math)
455
456+/* Nonzero means use propolice as a stack protection method. */
457+
458+extern int flag_propolice_protection;
459+
460+/* Nonzero means use a stack protection method for every function. */
461+
462+extern int flag_stack_protection;
463+
464 #endif /* ! GCC_FLAGS_H */
465diff -uNr gcc-3.4.3.orig/gcc/function.c gcc-3.4.3/gcc/function.c
466--- gcc-3.4.3.orig/gcc/function.c 2004-10-14 01:18:13.000000000 +0200
467+++ gcc-3.4.3/gcc/function.c 2004-11-24 18:35:31.542682088 +0100
468@@ -63,6 +63,7 @@
469 #include "integrate.h"
470 #include "langhooks.h"
471 #include "target.h"
472+#include "protector.h"
473
474 #ifndef TRAMPOLINE_ALIGNMENT
475 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
476@@ -155,6 +156,10 @@
477 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
478 in this function. */
479 static GTY(()) varray_type sibcall_epilogue;
480+
481+/* Current boundary mark for character arrays. */
482+static int temp_boundary_mark = 0;
483+
484 \f
485 /* In order to evaluate some expressions, such as function calls returning
486 structures in memory, we need to temporarily allocate stack locations.
487@@ -208,6 +213,8 @@
488 /* The size of the slot, including extra space for alignment. This
489 info is for combine_temp_slots. */
490 HOST_WIDE_INT full_size;
491+ /* Boundary mark of a character array and the others. This info is for propolice. */
492+ int boundary_mark;
493 };
494 \f
495 /* This structure is used to record MEMs or pseudos used to replace VAR, any
496@@ -638,6 +645,7 @@
497 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
498 if we are to allocate something at an inner level to be treated as
499 a variable in the block (e.g., a SAVE_EXPR).
500+ KEEP is 5 if we allocate a place to return structure.
501
502 TYPE is the type that will be used for the stack slot. */
503
504@@ -648,6 +656,8 @@
505 unsigned int align;
506 struct temp_slot *p, *best_p = 0;
507 rtx slot;
508+ int char_array = (flag_propolice_protection
509+ && keep == 1 && search_string_def (type));
510
511 /* If SIZE is -1 it means that somebody tried to allocate a temporary
512 of a variable size. */
513@@ -673,7 +683,8 @@
514 && ! p->in_use
515 && objects_must_conflict_p (p->type, type)
516 && (best_p == 0 || best_p->size > p->size
517- || (best_p->size == p->size && best_p->align > p->align)))
518+ || (best_p->size == p->size && best_p->align > p->align))
519+ && (! char_array || p->boundary_mark != 0))
520 {
521 if (p->align == align && p->size == size)
522 {
523@@ -708,6 +719,7 @@
524 p->address = 0;
525 p->rtl_expr = 0;
526 p->type = best_p->type;
527+ p->boundary_mark = best_p->boundary_mark;
528 p->next = temp_slots;
529 temp_slots = p;
530
531@@ -768,6 +780,7 @@
532 p->full_size = frame_offset - frame_offset_old;
533 #endif
534 p->address = 0;
535+ p->boundary_mark = char_array ? ++temp_boundary_mark : 0;
536 p->next = temp_slots;
537 temp_slots = p;
538 }
539@@ -932,14 +945,16 @@
540 int delete_q = 0;
541 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
542 {
543- if (p->base_offset + p->full_size == q->base_offset)
544+ if (p->base_offset + p->full_size == q->base_offset &&
545+ p->boundary_mark == q->boundary_mark)
546 {
547 /* Q comes after P; combine Q into P. */
548 p->size += q->size;
549 p->full_size += q->full_size;
550 delete_q = 1;
551 }
552- else if (q->base_offset + q->full_size == p->base_offset)
553+ else if (q->base_offset + q->full_size == p->base_offset &&
554+ p->boundary_mark == q->boundary_mark)
555 {
556 /* P comes after Q; combine P into Q. */
557 q->size += p->size;
558@@ -1449,7 +1464,9 @@
559 }
560
561 if (new == 0)
562- new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
563+ new = function ?
564+ assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func)
565+ : assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
566
567 PUT_CODE (reg, MEM);
568 PUT_MODE (reg, decl_mode);
569@@ -3937,10 +3954,13 @@
570 }
571
572 /* Otherwise copy the new constant into a register and replace
573- constant with that register. */
574+ constant with that register.
575+ At the use of stack protection, stop to replace the frame
576+ offset with a register. */
577 temp = gen_reg_rtx (Pmode);
578 XEXP (x, 0) = new;
579- if (validate_change (object, &XEXP (x, 1), temp, 0))
580+ if (validate_change (object, &XEXP (x, 1), temp, 0)
581+ && !flag_propolice_protection)
582 emit_insn_before (gen_move_insn (temp, new_offset), object);
583 else
584 {
585diff -uNr gcc-3.4.3.orig/gcc/gcse.c gcc-3.4.3/gcc/gcse.c
586--- gcc-3.4.3.orig/gcc/gcse.c 2004-10-30 20:02:53.000000000 +0200
587+++ gcc-3.4.3/gcc/gcse.c 2004-11-24 18:35:31.583675856 +0100
588@@ -4176,9 +4176,13 @@
589 continue;
590
591 /* Find an assignment that sets reg_used and is available
592- at the start of the block. */
593+ at the start of the block.
594+
595+ Skip the copy propagation not to eliminate the register that is
596+ the duplicated pointer of a function argument. It is used for
597+ the function argument protection. */
598 set = find_avail_set (regno, insn);
599- if (! set)
600+ if (! set || SET_VOLATILE_P (set->expr))
601 continue;
602
603 pat = set->expr;
604diff -uNr gcc-3.4.3.orig/gcc/integrate.c gcc-3.4.3/gcc/integrate.c
605--- gcc-3.4.3.orig/gcc/integrate.c 2004-01-24 00:36:00.000000000 +0100
606+++ gcc-3.4.3/gcc/integrate.c 2004-11-24 18:35:31.603672816 +0100
607@@ -393,6 +393,11 @@
608 /* These args would always appear unused, if not for this. */
609 TREE_USED (copy) = 1;
610
611+ /* The inlined variable is marked as INLINE not to change the location
612+ by stack protector. */
613+ if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
614+ DECL_COPIED (copy) = 1;
615+
616 /* Set the context for the new declaration. */
617 if (!DECL_CONTEXT (decl))
618 /* Globals stay global. */
619@@ -1970,6 +1975,12 @@
620
621 seq = get_insns ();
622 end_sequence ();
623+#ifdef ARGS_GROWS_DOWNWARD
624+ /* Mark this pointer as the top of the argument
625+ block. The pointer minus one is in the block. */
626+ if (flag_propolice_protection && GET_CODE (seq) == SET)
627+ RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
628+#endif
629 emit_insn_after (seq, map->insns_at_start);
630 return temp;
631 }
632diff -uNr gcc-3.4.3.orig/gcc/libgcc2.c gcc-3.4.3/gcc/libgcc2.c
633--- gcc-3.4.3.orig/gcc/libgcc2.c 2004-09-26 22:47:14.000000000 +0200
634+++ gcc-3.4.3/gcc/libgcc2.c 2004-11-24 18:35:31.627669168 +0100
635@@ -1678,3 +1678,124 @@
636 #endif /* no INIT_SECTION_ASM_OP and not CTOR_LISTS_DEFINED_EXTERNALLY */
637 #endif /* L_ctors */
638
639+\f
640+#ifdef L_stack_smash_handler
641+#ifndef _LIBC_PROVIDES_SSP_
642+#include <stdio.h>
643+#include <string.h>
644+#include <fcntl.h>
645+#include <unistd.h>
646+
647+#ifdef _POSIX_SOURCE
648+#include <signal.h>
649+#endif
650+
651+#if defined(HAVE_SYSLOG)
652+#include <sys/types.h>
653+#include <sys/socket.h>
654+#include <sys/un.h>
655+
656+#include <sys/syslog.h>
657+#ifndef _PATH_LOG
658+#define _PATH_LOG "/dev/log"
659+#endif
660+#endif
661+
662+long __guard[8] = {0, 0, 0, 0, 0, 0, 0, 0};
663+static void __guard_setup (void) __attribute__ ((constructor));
664+
665+static void
666+__guard_setup (void)
667+{
668+ int fd;
669+ if (__guard[0] != 0)
670+ return;
671+ fd = open ("/dev/urandom", 0);
672+ if (fd != -1) {
673+ ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
674+ close (fd) ;
675+ if (size == sizeof(__guard))
676+ return;
677+ }
678+ /* If a random generator can't be used, the protector switches the guard
679+ to the "terminator canary". */
680+ ((char*)__guard)[0] = 0;
681+ ((char*)__guard)[1] = 0;
682+ ((char*)__guard)[2] = '\n';
683+ ((char*)__guard)[3] = 255;
684+}
685+
686+extern void __stack_smash_handler (char func[], ATTRIBUTE_UNUSED int damaged);
687+void
688+__stack_smash_handler (char func[], ATTRIBUTE_UNUSED int damaged)
689+{
690+#if defined (__GNU_LIBRARY__)
691+ extern char * __progname;
692+#endif
693+ const char message[] = ": stack smashing attack in function ";
694+ int bufsz = 256, len;
695+ char buf[bufsz];
696+#if defined(HAVE_SYSLOG)
697+ int log_file;
698+ struct sockaddr_un sys_log_addr; /* AF_UNIX address of local logger. */
699+#endif
700+#ifdef _POSIX_SOURCE
701+ {
702+ sigset_t mask;
703+ sigfillset (&mask);
704+ /* Block all signal handlers except SIGABRT. */
705+ sigdelset (&mask, SIGABRT);
706+ sigprocmask (SIG_BLOCK, &mask, NULL);
707+ }
708+#endif
709+
710+ /* send LOG_CRIT. */
711+ strcpy (buf, "<2>"); len=3;
712+#if defined (__GNU_LIBRARY__)
713+ strncat (buf, __progname, bufsz - len - 1);
714+ len = strlen (buf);
715+#endif
716+ if (bufsz > len)
717+ {
718+ strncat (buf, message, bufsz - len - 1);
719+ len = strlen (buf);
720+ }
721+ if (bufsz > len)
722+ {
723+ strncat (buf, func, bufsz - len - 1);
724+ len = strlen (buf);
725+ }
726+
727+ /* Print error message. */
728+ write (STDERR_FILENO, buf + 3, len - 3);
729+#if defined(HAVE_SYSLOG)
730+ if ((log_file = socket (AF_UNIX, SOCK_DGRAM, 0)) != -1)
731+ {
732+
733+ /* Send "found" message to the "/dev/log" path. */
734+ sys_log_addr.sun_family = AF_UNIX;
735+ (void)strncpy (sys_log_addr.sun_path, _PATH_LOG,
736+ sizeof (sys_log_addr.sun_path) - 1);
737+ sys_log_addr.sun_path[sizeof (sys_log_addr.sun_path) - 1] = '\0';
738+ sendto(log_file, buf, len, 0, (struct sockaddr *)&sys_log_addr,
739+ sizeof (sys_log_addr));
740+ }
741+#endif
742+
743+#ifdef _POSIX_SOURCE
744+ {
745+ /* Make sure the default handler is associated with SIGABRT. */
746+ struct sigaction sa;
747+
748+ memset (&sa, 0, sizeof(struct sigaction));
749+ sigfillset (&sa.sa_mask); /* Block all signals. */
750+ sa.sa_flags = 0;
751+ sa.sa_handler = SIG_DFL;
752+ sigaction (SIGABRT, &sa, NULL);
753+ (void)kill (getpid(), SIGABRT);
754+ }
755+#endif
756+ _exit (127);
757+}
758+#endif /* _LIBC_PROVIDES_SSP_ */
759+#endif /* L_stack_smash_handler */
760diff -uNr gcc-3.4.3.orig/gcc/libgcc-std.ver gcc-3.4.3/gcc/libgcc-std.ver
761--- gcc-3.4.3.orig/gcc/libgcc-std.ver 2004-09-01 21:14:33.000000000 +0200
762+++ gcc-3.4.3/gcc/libgcc-std.ver 2004-11-24 18:35:31.620670232 +0100
763@@ -174,6 +174,12 @@
764 _Unwind_SjLj_RaiseException
765 _Unwind_SjLj_ForcedUnwind
766 _Unwind_SjLj_Resume
767+
768+%if !defined(_LIBC_PROVIDES_SSP_)
769+ # stack smash handler symbols
770+ __guard
771+ __stack_smash_handler
772+%endif
773 }
774
775 %inherit GCC_3.3 GCC_3.0
776diff -uNr gcc-3.4.3.orig/gcc/loop.c gcc-3.4.3/gcc/loop.c
777--- gcc-3.4.3.orig/gcc/loop.c 2004-07-13 17:29:08.000000000 +0200
778+++ gcc-3.4.3/gcc/loop.c 2004-11-24 18:35:31.680661112 +0100
779@@ -6514,6 +6514,14 @@
780 if (GET_CODE (*mult_val) == USE)
781 *mult_val = XEXP (*mult_val, 0);
782
783+#ifndef FRAME_GROWS_DOWNWARD
784+ if (flag_propolice_protection
785+ && GET_CODE (*add_val) == PLUS
786+ && (XEXP (*add_val, 0) == frame_pointer_rtx
787+ || XEXP (*add_val, 1) == frame_pointer_rtx))
788+ return 0;
789+#endif
790+
791 if (is_addr)
792 *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
793 else
794diff -uNr gcc-3.4.3.orig/gcc/Makefile.in gcc-3.4.3/gcc/Makefile.in
795--- gcc-3.4.3.orig/gcc/Makefile.in 2004-11-24 18:04:18.000000000 +0100
796+++ gcc-3.4.3/gcc/Makefile.in 2004-11-24 18:35:31.038758696 +0100
797@@ -867,7 +867,7 @@
798 sibcall.o simplify-rtx.o sreal.o stmt.o stor-layout.o stringpool.o \
799 targhooks.o timevar.o toplev.o tracer.o tree.o tree-dump.o unroll.o \
800 varasm.o varray.o version.o vmsdbgout.o xcoffout.o alloc-pool.o \
801- et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o
802+ et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o protector.o
803
804 OBJS-md = $(out_object_file)
805 OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) hashtable.o tree-inline.o \
806@@ -1549,7 +1549,7 @@
807 langhooks.h insn-flags.h cfglayout.h real.h cfgloop.h \
808 hosthooks.h $(LANGHOOKS_DEF_H) cgraph.h $(COVERAGE_H) alloc-pool.h
809 $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) \
810- -DTARGET_NAME=\"$(target_noncanonical)\" \
811+ -DTARGET_NAME=\"$(target_noncanonical)\" @ENABLESSP@ \
812 -c $(srcdir)/toplev.c $(OUTPUT_OPTION)
813 main.o : main.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) toplev.h
814
815@@ -1852,6 +1852,10 @@
816 params.o : params.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(PARAMS_H) toplev.h
817 hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
818 pretty-print.o: $(CONFIG_H) $(SYSTEM_H) pretty-print.c $(PRETTY_PRINT_H)
819+protector.o : protector.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
820+ flags.h function.h $(EXPR_H) $(OPTABS_H) $(REGS_H) toplev.h hard-reg-set.h \
821+ insn-config.h insn-flags.h $(RECOG_H) output.h toplev.h except.h reload.h \
822+ $(TM_P_H) conditions.h $(INSN_ATTR_H) real.h protector.h
823
824 $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) $(GGC_H) \
825 $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
826diff -uNr gcc-3.4.3.orig/gcc/mklibgcc.in gcc-3.4.3/gcc/mklibgcc.in
827--- gcc-3.4.3.orig/gcc/mklibgcc.in 2004-10-18 18:00:43.000000000 +0200
828+++ gcc-3.4.3/gcc/mklibgcc.in 2004-11-24 18:35:31.699658224 +0100
829@@ -57,7 +57,7 @@
830 _enable_execute_stack _trampoline __main _absvsi2 _absvdi2 _addvsi3
831 _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
832 _ffssi2 _ffsdi2 _clz _clzsi2 _clzdi2 _ctzsi2 _ctzdi2 _popcount_tab
833- _popcountsi2 _popcountdi2 _paritysi2 _paritydi2'
834+ _popcountsi2 _popcountdi2 _paritysi2 _paritydi2 _stack_smash_handler'
835
836 # Disable SHLIB_LINK if shared libgcc not enabled.
837 if [ "@enable_shared@" = "no" ]; then
838diff -uNr gcc-3.4.3.orig/gcc/optabs.c gcc-3.4.3/gcc/optabs.c
839--- gcc-3.4.3.orig/gcc/optabs.c 2004-03-03 01:45:01.000000000 +0100
840+++ gcc-3.4.3/gcc/optabs.c 2004-11-24 18:35:31.739652144 +0100
841@@ -678,6 +678,27 @@
842 if (target)
843 target = protect_from_queue (target, 1);
844
845+ /* Keep the frame and offset pattern at the use of stack protection. */
846+ if (flag_propolice_protection
847+ && binoptab->code == PLUS
848+ && op0 == virtual_stack_vars_rtx
849+ && GET_CODE(op1) == CONST_INT)
850+ {
851+ int icode = (int) binoptab->handlers[(int) mode].insn_code;
852+ if (target)
853+ temp = target;
854+ else
855+ temp = gen_reg_rtx (mode);
856+
857+ if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
858+ || GET_CODE (temp) != REG)
859+ temp = gen_reg_rtx (mode);
860+
861+ emit_insn (gen_rtx_SET (VOIDmode, temp,
862+ gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
863+ return temp;
864+ }
865+
866 if (flag_force_mem)
867 {
868 /* Load duplicate non-volatile operands once. */
869diff -uNr gcc-3.4.3.orig/gcc/opts.c gcc-3.4.3/gcc/opts.c
870--- gcc-3.4.3.orig/gcc/opts.c 2004-11-24 18:04:19.000000000 +0100
871+++ gcc-3.4.3/gcc/opts.c 2004-11-24 18:35:31.762648648 +0100
872@@ -125,6 +125,9 @@
873 bool warn_unused_variable;
874 bool warn_unused_value;
875
876+/* Warn when not issuing stack smashing protection for some reason */
877+bool warn_stack_protector;
878+
879 /* Hack for cooperation between set_Wunused and set_Wextra. */
880 static bool maybe_warn_unused_parameter;
881
882@@ -804,6 +807,10 @@
883 warn_unused_variable = value;
884 break;
885
886+ case OPT_Wstack_protector:
887+ warn_stack_protector = value;
888+ break;
889+
890 case OPT_aux_info:
891 case OPT_aux_info_:
892 aux_info_file_name = arg;
893@@ -1367,6 +1374,14 @@
894 stack_limit_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (arg));
895 break;
896
897+ case OPT_fstack_protector:
898+ flag_propolice_protection = value;
899+ break;
900+
901+ case OPT_fstack_protector_all:
902+ flag_stack_protection = value;
903+ break;
904+
905 case OPT_fstrength_reduce:
906 flag_strength_reduce = value;
907 break;
908diff -uNr gcc-3.4.3.orig/gcc/protector.c gcc-3.4.3/gcc/protector.c
909--- gcc-3.4.3.orig/gcc/protector.c 1970-01-01 01:00:00.000000000 +0100
910+++ gcc-3.4.3/gcc/protector.c 2004-09-02 11:36:11.000000000 +0200
911@@ -0,0 +1,2730 @@
912+/* RTL buffer overflow protection function for GNU C compiler
913+ Copyright (C) 2003 Free Software Foundation, Inc.
914+
915+This file is part of GCC.
916+
917+GCC is free software; you can redistribute it and/or modify it under
918+the terms of the GNU General Public License as published by the Free
919+Software Foundation; either version 2, or (at your option) any later
920+version.
921+
922+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
923+WARRANTY; without even the implied warranty of MERCHANTABILITY or
924+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
925+for more details.
926+
927+You should have received a copy of the GNU General Public License
928+along with GCC; see the file COPYING. If not, write to the Free
929+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
930+02111-1307, USA. */
931+
932+/* This file contains several memory arrangement functions to protect
933+ the return address and the frame pointer of the stack
934+ from a stack-smashing attack. It also
935+ provides the function that protects pointer variables. */
936+
937+#include "config.h"
938+#include "system.h"
939+#include "coretypes.h"
940+#include "tm.h"
941+#include "machmode.h"
942+#include "real.h"
943+#include "rtl.h"
944+#include "tree.h"
945+#include "regs.h"
946+#include "flags.h"
947+#include "insn-config.h"
948+#include "insn-flags.h"
949+#include "expr.h"
950+#include "output.h"
951+#include "recog.h"
952+#include "hard-reg-set.h"
953+#include "except.h"
954+#include "function.h"
955+#include "toplev.h"
956+#include "tm_p.h"
957+#include "conditions.h"
958+#include "insn-attr.h"
959+#include "optabs.h"
960+#include "reload.h"
961+#include "protector.h"
962+
963+
964+/* Round a value to the lowest integer less than it that is a multiple of
965+ the required alignment. Avoid using division in case the value is
966+ negative. Assume the alignment is a power of two. */
967+#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
968+
969+/* Similar, but round to the next highest integer that meets the
970+ alignment. */
971+#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
972+
973+
974+/* Nonzero if function being compiled can define string buffers that may be
975+ damaged by the stack-smash attack. */
976+static int current_function_defines_vulnerable_string;
977+static int current_function_defines_short_string;
978+static int current_function_has_variable_string;
979+static int current_function_defines_vsized_array;
980+static int current_function_is_inlinable;
981+
982+/* Nonzero if search_string_def finds the variable which contains an array. */
983+static int is_array;
984+
985+/* Nonzero if search_string_def finds a byte-pointer variable,
986+ which may be assigned to alloca output. */
987+static int may_have_alloca_pointer;
988+
989+static rtx guard_area, _guard;
990+static rtx function_first_insn, prologue_insert_point;
991+
992+/* Offset to end of sweeped area for gathering character arrays. */
993+static HOST_WIDE_INT sweep_frame_offset;
994+
995+/* Offset to end of allocated area for instantiating pseudo registers. */
996+static HOST_WIDE_INT push_allocated_offset = 0;
997+
998+/* Offset to end of assigned area for instantiating pseudo registers. */
999+static HOST_WIDE_INT push_frame_offset = 0;
1000+
1001+/* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
1002+ which stage assign_stack_local_for_pseudo_reg is called from. */
1003+static int saved_cse_not_expected = 0;
1004+
1005+static int search_string_from_argsandvars (int);
1006+static int search_string_from_local_vars (tree);
1007+static int search_pointer_def (tree);
1008+static int search_func_pointer (tree);
1009+static int check_used_flag (rtx);
1010+static void reset_used_flags_for_insns (rtx);
1011+static void reset_used_flags_for_decls (tree);
1012+static void reset_used_flags_of_plus (rtx);
1013+static void rtl_prologue (rtx);
1014+static void rtl_epilogue (rtx);
1015+static void arrange_var_order (tree);
1016+static void copy_args_for_protection (void);
1017+static void sweep_string_variable (rtx, HOST_WIDE_INT);
1018+static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1019+static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1020+static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1021+static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
1022+static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
1023+static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
1024+static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
1025+static void validate_insns_of_varrefs (rtx);
1026+static void validate_operand_of_varrefs (rtx, rtx *);
1027+
1028+/* Specify which size of buffers should be protected from a stack smashing
1029+ attack. Because small buffers are not used in situations which may
1030+ overflow buffer, the default size sets to the size of 64 bit register. */
1031+#ifndef SUSPICIOUS_BUF_SIZE
1032+#define SUSPICIOUS_BUF_SIZE 8
1033+#endif
1034+
1035+#define AUTO_BASEPTR(X) \
1036+ (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
1037+#define AUTO_OFFSET(X) \
1038+ (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
1039+#undef PARM_PASSED_IN_MEMORY
1040+#define PARM_PASSED_IN_MEMORY(PARM) \
1041+ (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
1042+#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
1043+
1044+/* Argument values for calling search_string_from_argsandvars. */
1045+#define CALL_FROM_PREPARE_STACK_PROTECTION 0
1046+#define CALL_FROM_PUSH_FRAME 1
1047+
1048+
1049+/* Prepare several stack protection instruments for the current function
1050+ if the function has an array as a local variable, which may be vulnerable
1051+ from a stack smashing attack, and it is not inlinable.
1052+
1053+ The overall steps are as follows;
1054+ (1)search an array,
1055+ (2)insert guard_area on the stack,
1056+ (3)duplicate pointer arguments into local variables, and
1057+ (4)arrange the location of local variables. */
1058+void
1059+prepare_stack_protection (int inlinable)
1060+{
1061+ tree blocks = DECL_INITIAL (current_function_decl);
1062+ current_function_is_inlinable = inlinable && !flag_no_inline;
1063+ push_frame_offset = push_allocated_offset = 0;
1064+ saved_cse_not_expected = 0;
1065+
1066+ /* Skip the protection if the function has no block
1067+ or it is an inline function. */
1068+ if (current_function_is_inlinable)
1069+ validate_insns_of_varrefs (get_insns ());
1070+ if (! blocks || current_function_is_inlinable)
1071+ return;
1072+
1073+ current_function_defines_vulnerable_string
1074+ = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
1075+
1076+ if (current_function_defines_vulnerable_string
1077+ || flag_stack_protection)
1078+ {
1079+ function_first_insn = get_insns ();
1080+
1081+ if (current_function_contains_functions)
1082+ {
1083+ if (warn_stack_protector)
1084+ warning ("not protecting function: it contains functions");
1085+ return;
1086+ }
1087+
1088+ /* Initialize recognition, indicating that volatile is OK. */
1089+ init_recog ();
1090+
1091+ sweep_frame_offset = 0;
1092+
1093+#ifdef STACK_GROWS_DOWNWARD
1094+ /* frame_offset: offset to end of allocated area of stack frame.
1095+ It is defined in the function.c. */
1096+
1097+ /* the location must be before buffers. */
1098+ guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
1099+ PUT_MODE (guard_area, GUARD_m);
1100+ MEM_VOLATILE_P (guard_area) = 1;
1101+
1102+#ifndef FRAME_GROWS_DOWNWARD
1103+ sweep_frame_offset = frame_offset;
1104+#endif
1105+
1106+ /* For making room for guard value, scan all insns and fix the offset
1107+ address of the variable that is based on frame pointer.
1108+ Scan all declarations of variables and fix the offset address
1109+ of the variable that is based on the frame pointer. */
1110+ sweep_string_variable (guard_area, UNITS_PER_GUARD);
1111+
1112+
1113+ /* the location of guard area moves to the beginning of stack frame. */
1114+ if (AUTO_OFFSET(XEXP (guard_area, 0)))
1115+ XEXP (XEXP (guard_area, 0), 1)
1116+ = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
1117+
1118+
1119+ /* Insert prologue rtl instructions. */
1120+ rtl_prologue (function_first_insn);
1121+
1122+ if (! current_function_has_variable_string)
1123+ {
1124+ /* Generate argument saving instruction. */
1125+ copy_args_for_protection ();
1126+
1127+#ifndef FRAME_GROWS_DOWNWARD
1128+ /* If frame grows upward, character arrays for protecting args
1129+ may copy to the top of the guard variable.
1130+ So sweep the guard variable again. */
1131+ sweep_frame_offset = CEIL_ROUND (frame_offset,
1132+ BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1133+ sweep_string_variable (guard_area, UNITS_PER_GUARD);
1134+#endif
1135+ }
1136+ /* Variable can't be protected from the overflow of variable length
1137+ buffer. But variable reordering is still effective against
1138+ the overflow of fixed size character arrays. */
1139+ else if (warn_stack_protector)
1140+ warning ("not protecting variables: it has a variable length buffer");
1141+#endif
1142+#ifndef FRAME_GROWS_DOWNWARD
1143+ if (STARTING_FRAME_OFFSET == 0)
1144+ {
1145+ /* This part may be only for alpha. */
1146+ push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1147+ assign_stack_local (BLKmode, push_allocated_offset, -1);
1148+ sweep_frame_offset = frame_offset;
1149+ sweep_string_variable (const0_rtx, -push_allocated_offset);
1150+ sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
1151+ }
1152+#endif
1153+
1154+ /* Arrange the order of local variables. */
1155+ arrange_var_order (blocks);
1156+
1157+#ifdef STACK_GROWS_DOWNWARD
1158+ /* Insert epilogue rtl instructions. */
1159+ rtl_epilogue (get_last_insn ());
1160+#endif
1161+ init_recog_no_volatile ();
1162+ }
1163+ else if (current_function_defines_short_string
1164+ && warn_stack_protector)
1165+ warning ("not protecting function: buffer is less than %d bytes long",
1166+ SUSPICIOUS_BUF_SIZE);
1167+}
1168+
1169+/*
1170+ Search string from arguments and local variables.
1171+ caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
1172+ CALL_FROM_PUSH_FRAME (1)
1173+*/
1174+static int
1175+search_string_from_argsandvars (int caller)
1176+{
1177+ tree blocks, parms;
1178+ int string_p;
1179+
1180+ /* Saves a latest search result as a cached infomation. */
1181+ static tree __latest_search_decl = 0;
1182+ static int __latest_search_result = FALSE;
1183+
1184+ if (__latest_search_decl == current_function_decl)
1185+ return __latest_search_result;
1186+ else
1187+ if (caller == CALL_FROM_PUSH_FRAME)
1188+ return FALSE;
1189+
1190+ __latest_search_decl = current_function_decl;
1191+ __latest_search_result = TRUE;
1192+
1193+ current_function_defines_short_string = FALSE;
1194+ current_function_has_variable_string = FALSE;
1195+ current_function_defines_vsized_array = FALSE;
1196+ may_have_alloca_pointer = FALSE;
1197+
1198+ /* Search a string variable from local variables. */
1199+ blocks = DECL_INITIAL (current_function_decl);
1200+ string_p = search_string_from_local_vars (blocks);
1201+
1202+ if (! current_function_defines_vsized_array
1203+ && may_have_alloca_pointer
1204+ && current_function_calls_alloca)
1205+ {
1206+ current_function_has_variable_string = TRUE;
1207+ return TRUE;
1208+ }
1209+
1210+ if (string_p)
1211+ return TRUE;
1212+
1213+#ifdef STACK_GROWS_DOWNWARD
1214+ /* Search a string variable from arguments. */
1215+ parms = DECL_ARGUMENTS (current_function_decl);
1216+
1217+ for (; parms; parms = TREE_CHAIN (parms))
1218+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1219+ {
1220+ if (PARM_PASSED_IN_MEMORY (parms))
1221+ {
1222+ string_p = search_string_def (TREE_TYPE(parms));
1223+ if (string_p)
1224+ return TRUE;
1225+ }
1226+ }
1227+#endif
1228+
1229+ __latest_search_result = FALSE;
1230+ return FALSE;
1231+}
1232+
1233+
1234+/* Search string from local variables in the specified scope. */
1235+static int
1236+search_string_from_local_vars (tree block)
1237+{
1238+ tree types;
1239+ int found = FALSE;
1240+
1241+ while (block && TREE_CODE(block)==BLOCK)
1242+ {
1243+ for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1244+ {
1245+ /* Skip the declaration that refers an external variable. */
1246+ /* name: types.decl.name.identifier.id */
1247+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1248+ && TREE_CODE (types) == VAR_DECL
1249+ && ! DECL_ARTIFICIAL (types)
1250+ && DECL_RTL_SET_P (types)
1251+ && GET_CODE (DECL_RTL (types)) == MEM
1252+
1253+ && search_string_def (TREE_TYPE (types)))
1254+ {
1255+ rtx home = DECL_RTL (types);
1256+
1257+ if (GET_CODE (home) == MEM
1258+ && (GET_CODE (XEXP (home, 0)) == MEM
1259+ || (GET_CODE (XEXP (home, 0)) == REG
1260+ && XEXP (home, 0) != virtual_stack_vars_rtx
1261+ && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1262+ && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1263+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1264+ && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1265+#endif
1266+ )))
1267+ /* If the value is indirect by memory or by a register
1268+ that isn't the frame pointer then it means the object is
1269+ variable-sized and address through
1270+ that register or stack slot.
1271+ The protection has no way to hide pointer variables
1272+ behind the array, so all we can do is staying
1273+ the order of variables and arguments. */
1274+ {
1275+ current_function_has_variable_string = TRUE;
1276+ }
1277+
1278+ /* Found character array. */
1279+ found = TRUE;
1280+ }
1281+ }
1282+
1283+ if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
1284+ {
1285+ found = TRUE;
1286+ }
1287+
1288+ block = BLOCK_CHAIN (block);
1289+ }
1290+
1291+ return found;
1292+}
1293+
1294+
1295+/* Search a character array from the specified type tree. */
1296+int
1297+search_string_def (tree type)
1298+{
1299+ tree tem;
1300+
1301+ if (! type)
1302+ return FALSE;
1303+
1304+ switch (TREE_CODE (type))
1305+ {
1306+ case ARRAY_TYPE:
1307+ /* Check if the array is a variable-sized array. */
1308+ if (TYPE_DOMAIN (type) == 0
1309+ || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1310+ && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1311+ current_function_defines_vsized_array = TRUE;
1312+
1313+ /* Check if the array is related to char array. */
1314+ if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1315+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1316+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1317+ {
1318+ /* Check if the string is a variable string. */
1319+ if (TYPE_DOMAIN (type) == 0
1320+ || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1321+ && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1322+ return TRUE;
1323+
1324+ /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
1325+ if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1326+ && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
1327+ >= SUSPICIOUS_BUF_SIZE))
1328+ return TRUE;
1329+
1330+ current_function_defines_short_string = TRUE;
1331+ }
1332+
1333+ /* to protect every functions, sweep any arrays to the frame top. */
1334+ is_array = TRUE;
1335+
1336+ return search_string_def(TREE_TYPE(type));
1337+
1338+ case UNION_TYPE:
1339+ case QUAL_UNION_TYPE:
1340+ case RECORD_TYPE:
1341+ /* Check if each field has character arrays. */
1342+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1343+ {
1344+ /* Omit here local type decls until we know how to support them. */
1345+ if ((TREE_CODE (tem) == TYPE_DECL)
1346+ || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1347+ continue;
1348+
1349+ if (search_string_def(TREE_TYPE(tem)))
1350+ return TRUE;
1351+ }
1352+ break;
1353+
1354+ case POINTER_TYPE:
1355+ /* Check if pointer variables, which may be a pointer assigned
1356+ by alloca function call, are declared. */
1357+ if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1358+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1359+ || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1360+ may_have_alloca_pointer = TRUE;
1361+ break;
1362+
1363+ case REFERENCE_TYPE:
1364+ case OFFSET_TYPE:
1365+ default:
1366+ break;
1367+ }
1368+
1369+ return FALSE;
1370+}
1371+
1372+
1373+/* Examine whether the input contains frame pointer addressing. */
1374+int
1375+contains_fp (rtx op)
1376+{
1377+ enum rtx_code code;
1378+ rtx x;
1379+ int i, j;
1380+ const char *fmt;
1381+
1382+ x = op;
1383+ if (x == 0)
1384+ return FALSE;
1385+
1386+ code = GET_CODE (x);
1387+
1388+ switch (code)
1389+ {
1390+ case CONST_INT:
1391+ case CONST_DOUBLE:
1392+ case CONST:
1393+ case SYMBOL_REF:
1394+ case CODE_LABEL:
1395+ case REG:
1396+ case ADDRESSOF:
1397+ return FALSE;
1398+
1399+ case MEM:
1400+ /* This case is not generated at the stack protection.
1401+ see plus_constant_wide and simplify_plus_minus function. */
1402+ if (XEXP (x, 0) == virtual_stack_vars_rtx)
1403+ abort ();
1404+
1405+ case PLUS:
1406+ if (XEXP (x, 0) == virtual_stack_vars_rtx
1407+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
1408+ return TRUE;
1409+
1410+ default:
1411+ break;
1412+ }
1413+
1414+ /* Scan all subexpressions. */
1415+ fmt = GET_RTX_FORMAT (code);
1416+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1417+ if (*fmt == 'e')
1418+ {
1419+ if (contains_fp (XEXP (x, i)))
1420+ return TRUE;
1421+ }
1422+ else if (*fmt == 'E')
1423+ for (j = 0; j < XVECLEN (x, i); j++)
1424+ if (contains_fp (XVECEXP (x, i, j)))
1425+ return TRUE;
1426+
1427+ return FALSE;
1428+}
1429+
1430+
1431+/* Examine whether the input contains any pointer. */
1432+static int
1433+search_pointer_def (tree type)
1434+{
1435+ tree tem;
1436+
1437+ if (! type)
1438+ return FALSE;
1439+
1440+ switch (TREE_CODE (type))
1441+ {
1442+ case UNION_TYPE:
1443+ case QUAL_UNION_TYPE:
1444+ case RECORD_TYPE:
1445+ /* Check if each field has a pointer. */
1446+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1447+ {
1448+ if ((TREE_CODE (tem) == TYPE_DECL)
1449+ || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1450+ continue;
1451+
1452+ if (search_pointer_def (TREE_TYPE(tem)))
1453+ return TRUE;
1454+ }
1455+ break;
1456+
1457+ case ARRAY_TYPE:
1458+ return search_pointer_def (TREE_TYPE(type));
1459+
1460+ case POINTER_TYPE:
1461+ case REFERENCE_TYPE:
1462+ case OFFSET_TYPE:
1463+ if (TYPE_READONLY (TREE_TYPE (type)))
1464+ {
1465+ /* If this pointer contains function pointer,
1466+ it should be protected. */
1467+ return search_func_pointer (TREE_TYPE (type));
1468+ }
1469+ return TRUE;
1470+
1471+ default:
1472+ break;
1473+ }
1474+
1475+ return FALSE;
1476+}
1477+
1478+
1479+/* Examine whether the input contains function pointer. */
1480+static int
1481+search_func_pointer (tree type)
1482+{
1483+ tree tem;
1484+
1485+ if (! type)
1486+ return FALSE;
1487+
1488+ switch (TREE_CODE (type))
1489+ {
1490+ case UNION_TYPE:
1491+ case QUAL_UNION_TYPE:
1492+ case RECORD_TYPE:
1493+ if (! TREE_VISITED (type))
1494+ {
1495+ /* Mark the type as having been visited already. */
1496+ TREE_VISITED (type) = 1;
1497+
1498+ /* Check if each field has a function pointer. */
1499+ for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1500+ {
1501+ if (TREE_CODE (tem) == FIELD_DECL
1502+ && search_func_pointer (TREE_TYPE(tem)))
1503+ {
1504+ TREE_VISITED (type) = 0;
1505+ return TRUE;
1506+ }
1507+ }
1508+
1509+ TREE_VISITED (type) = 0;
1510+ }
1511+ break;
1512+
1513+ case ARRAY_TYPE:
1514+ return search_func_pointer (TREE_TYPE(type));
1515+
1516+ case POINTER_TYPE:
1517+ case REFERENCE_TYPE:
1518+ case OFFSET_TYPE:
1519+ if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
1520+ return TRUE;
1521+ return search_func_pointer (TREE_TYPE(type));
1522+
1523+ default:
1524+ break;
1525+ }
1526+
1527+ return FALSE;
1528+}
1529+
1530+
1531+/* Check whether the specified rtx contains PLUS rtx with used flag. */
1532+static int
1533+check_used_flag (rtx x)
1534+{
1535+ register int i, j;
1536+ register enum rtx_code code;
1537+ register const char *format_ptr;
1538+
1539+ if (x == 0)
1540+ return FALSE;
1541+
1542+ code = GET_CODE (x);
1543+
1544+ switch (code)
1545+ {
1546+ case REG:
1547+ case QUEUED:
1548+ case CONST_INT:
1549+ case CONST_DOUBLE:
1550+ case SYMBOL_REF:
1551+ case CODE_LABEL:
1552+ case PC:
1553+ case CC0:
1554+ return FALSE;
1555+
1556+ case PLUS:
1557+ if (x->used)
1558+ return TRUE;
1559+
1560+ default:
1561+ break;
1562+ }
1563+
1564+ format_ptr = GET_RTX_FORMAT (code);
1565+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
1566+ {
1567+ switch (*format_ptr++)
1568+ {
1569+ case 'e':
1570+ if (check_used_flag (XEXP (x, i)))
1571+ return TRUE;
1572+ break;
1573+
1574+ case 'E':
1575+ for (j = 0; j < XVECLEN (x, i); j++)
1576+ if (check_used_flag (XVECEXP (x, i, j)))
1577+ return TRUE;
1578+ break;
1579+ }
1580+ }
1581+
1582+ return FALSE;
1583+}
1584+
1585+
1586+/* Reset used flag of every insns after the spcecified insn. */
1587+static void
1588+reset_used_flags_for_insns (rtx insn)
1589+{
1590+ int i, j;
1591+ enum rtx_code code;
1592+ const char *format_ptr;
1593+
1594+ for (; insn; insn = NEXT_INSN (insn))
1595+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1596+ || GET_CODE (insn) == CALL_INSN)
1597+ {
1598+ code = GET_CODE (insn);
1599+ insn->used = 0;
1600+ format_ptr = GET_RTX_FORMAT (code);
1601+
1602+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
1603+ {
1604+ switch (*format_ptr++)
1605+ {
1606+ case 'e':
1607+ reset_used_flags_of_plus (XEXP (insn, i));
1608+ break;
1609+
1610+ case 'E':
1611+ for (j = 0; j < XVECLEN (insn, i); j++)
1612+ reset_used_flags_of_plus (XVECEXP (insn, i, j));
1613+ break;
1614+ }
1615+ }
1616+ }
1617+}
1618+
1619+
1620+/* Reset used flag of every variables in the specified block. */
1621+static void
1622+reset_used_flags_for_decls (tree block)
1623+{
1624+ tree types;
1625+ rtx home;
1626+
1627+ while (block && TREE_CODE(block)==BLOCK)
1628+ {
1629+ types = BLOCK_VARS(block);
1630+
1631+ for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1632+ {
1633+ /* Skip the declaration that refers an external variable and
1634+ also skip an global variable. */
1635+ if (! DECL_EXTERNAL (types))
1636+ {
1637+ if (! DECL_RTL_SET_P (types))
1638+ continue;
1639+ home = DECL_RTL (types);
1640+
1641+ if (GET_CODE (home) == MEM
1642+ && GET_CODE (XEXP (home, 0)) == PLUS
1643+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1644+ {
1645+ XEXP (home, 0)->used = 0;
1646+ }
1647+ }
1648+ }
1649+
1650+ reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
1651+
1652+ block = BLOCK_CHAIN (block);
1653+ }
1654+}
1655+
1656+
1657+/* Reset the used flag of every PLUS rtx derived from the specified rtx. */
1658+static void
1659+reset_used_flags_of_plus (rtx x)
1660+{
1661+ int i, j;
1662+ enum rtx_code code;
1663+ const char *format_ptr;
1664+
1665+ if (x == 0)
1666+ return;
1667+
1668+ code = GET_CODE (x);
1669+
1670+ switch (code)
1671+ {
1672+ /* These types may be freely shared so we needn't do any resetting
1673+ for them. */
1674+ case REG:
1675+ case QUEUED:
1676+ case CONST_INT:
1677+ case CONST_DOUBLE:
1678+ case SYMBOL_REF:
1679+ case CODE_LABEL:
1680+ case PC:
1681+ case CC0:
1682+ return;
1683+
1684+ case INSN:
1685+ case JUMP_INSN:
1686+ case CALL_INSN:
1687+ case NOTE:
1688+ case LABEL_REF:
1689+ case BARRIER:
1690+ /* The chain of insns is not being copied. */
1691+ return;
1692+
1693+ case PLUS:
1694+ x->used = 0;
1695+ break;
1696+
1697+ case CALL_PLACEHOLDER:
1698+ reset_used_flags_for_insns (XEXP (x, 0));
1699+ reset_used_flags_for_insns (XEXP (x, 1));
1700+ reset_used_flags_for_insns (XEXP (x, 2));
1701+ break;
1702+
1703+ default:
1704+ break;
1705+ }
1706+
1707+ format_ptr = GET_RTX_FORMAT (code);
1708+ for (i = 0; i < GET_RTX_LENGTH (code); i++)
1709+ {
1710+ switch (*format_ptr++)
1711+ {
1712+ case 'e':
1713+ reset_used_flags_of_plus (XEXP (x, i));
1714+ break;
1715+
1716+ case 'E':
1717+ for (j = 0; j < XVECLEN (x, i); j++)
1718+ reset_used_flags_of_plus (XVECEXP (x, i, j));
1719+ break;
1720+ }
1721+ }
1722+}
1723+
1724+
1725+/* Generate the prologue insns of the protector into the specified insn. */
1726+static void
1727+rtl_prologue (rtx insn)
1728+{
1729+#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
1730+#undef HAS_INIT_SECTION
1731+#define HAS_INIT_SECTION
1732+#endif
1733+
1734+ rtx _val;
1735+
1736+ for (; insn; insn = NEXT_INSN (insn))
1737+ if (GET_CODE (insn) == NOTE
1738+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1739+ break;
1740+
1741+#if !defined (HAS_INIT_SECTION)
1742+ /* If this function is `main', skip a call to `__main'
1743+ to run guard instruments after global initializers, etc. */
1744+ if (DECL_NAME (current_function_decl)
1745+ && MAIN_NAME_P (DECL_NAME (current_function_decl))
1746+ && DECL_CONTEXT (current_function_decl) == NULL_TREE)
1747+ {
1748+ rtx fbinsn = insn;
1749+ for (; insn; insn = NEXT_INSN (insn))
1750+ if (GET_CODE (insn) == NOTE
1751+ && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
1752+ break;
1753+ if (insn == 0)
1754+ insn = fbinsn;
1755+ }
1756+#endif
1757+
1758+ /* Mark the next insn of FUNCTION_BEG insn. */
1759+ prologue_insert_point = NEXT_INSN (insn);
1760+
1761+ start_sequence ();
1762+
1763+ _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
1764+ emit_move_insn ( guard_area, _guard);
1765+
1766+ _val = get_insns ();
1767+ end_sequence ();
1768+
1769+ emit_insn_before (_val, prologue_insert_point);
1770+}
1771+
1772+
1773+/* Generate the epilogue insns of the protector into the specified insn. */
1774+static void
1775+rtl_epilogue (rtx insn)
1776+{
1777+ rtx if_false_label;
1778+ rtx _val;
1779+ rtx funcname;
1780+ tree funcstr;
1781+ int flag_have_return = FALSE;
1782+
1783+ start_sequence ();
1784+
1785+#ifdef HAVE_return
1786+ if (HAVE_return)
1787+ {
1788+ rtx insn;
1789+ return_label = gen_label_rtx ();
1790+
1791+ for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
1792+ if (GET_CODE (insn) == JUMP_INSN
1793+ && GET_CODE (PATTERN (insn)) == RETURN
1794+ && GET_MODE (PATTERN (insn)) == VOIDmode)
1795+ {
1796+ rtx pat = gen_rtx_SET (VOIDmode,
1797+ pc_rtx,
1798+ gen_rtx_LABEL_REF (VOIDmode,
1799+ return_label));
1800+ PATTERN (insn) = pat;
1801+ flag_have_return = TRUE;
1802+ }
1803+
1804+
1805+ emit_label (return_label);
1806+ }
1807+#endif
1808+
1809+ /* if (guard_area != _guard) */
1810+ compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
1811+
1812+ if_false_label = gen_label_rtx (); /* { */
1813+ emit_jump_insn ( gen_beq(if_false_label));
1814+
1815+ /* generate string for the current function name */
1816+ funcstr = build_string (strlen(current_function_name ())+1,
1817+ current_function_name ());
1818+ TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
1819+ funcname = output_constant_def (funcstr, 1);
1820+
1821+ emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
1822+ 0, VOIDmode, 2,
1823+ XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
1824+
1825+ /* generate RTL to return from the current function */
1826+
1827+ emit_barrier (); /* } */
1828+ emit_label (if_false_label);
1829+
1830+ /* generate RTL to return from the current function */
1831+ if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
1832+ use_return_register ();
1833+
1834+#ifdef HAVE_return
1835+ if (HAVE_return && flag_have_return)
1836+ {
1837+ emit_jump_insn (gen_return ());
1838+ emit_barrier ();
1839+ }
1840+#endif
1841+
1842+ _val = get_insns ();
1843+ end_sequence ();
1844+
1845+ emit_insn_after (_val, insn);
1846+}
1847+
1848+
1849+/* For every variable which type is character array, moves its location
1850+ in the stack frame to the sweep_frame_offset position. */
1851+static void
1852+arrange_var_order (tree block)
1853+{
1854+ tree types;
1855+ HOST_WIDE_INT offset;
1856+
1857+ while (block && TREE_CODE(block)==BLOCK)
1858+ {
1859+ /* arrange the location of character arrays in depth first. */
1860+ arrange_var_order (BLOCK_SUBBLOCKS (block));
1861+
1862+ for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
1863+ {
1864+ /* Skip the declaration that refers an external variable. */
1865+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1866+ && TREE_CODE (types) == VAR_DECL
1867+ && ! DECL_ARTIFICIAL (types)
1868+ && DECL_RTL_SET_P (types)
1869+ && GET_CODE (DECL_RTL (types)) == MEM
1870+ && GET_MODE (DECL_RTL (types)) == BLKmode
1871+
1872+ && (is_array=0,
1873+ search_string_def (TREE_TYPE (types))
1874+ || (! current_function_defines_vulnerable_string && is_array)))
1875+ {
1876+ rtx home = DECL_RTL (types);
1877+
1878+ if (!(GET_CODE (home) == MEM
1879+ && (GET_CODE (XEXP (home, 0)) == MEM
1880+ || (GET_CODE (XEXP (home, 0)) == REG
1881+ && XEXP (home, 0) != virtual_stack_vars_rtx
1882+ && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1883+ && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1884+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1885+ && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1886+#endif
1887+ ))))
1888+ {
1889+ /* Found a string variable. */
1890+ HOST_WIDE_INT var_size =
1891+ ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1892+ / BITS_PER_UNIT);
1893+
1894+ /* Confirmed it is BLKmode. */
1895+ int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1896+ var_size = CEIL_ROUND (var_size, alignment);
1897+
1898+ /* Skip the variable if it is top of the region
1899+ specified by sweep_frame_offset. */
1900+ offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1901+ if (offset == sweep_frame_offset - var_size)
1902+ sweep_frame_offset -= var_size;
1903+
1904+ else if (offset < sweep_frame_offset - var_size)
1905+ sweep_string_variable (DECL_RTL (types), var_size);
1906+ }
1907+ }
1908+ }
1909+
1910+ block = BLOCK_CHAIN (block);
1911+ }
1912+}
1913+
1914+
1915+/* To protect every pointer argument and move character arrays in the argument,
1916+ Copy those variables to the top of the stack frame and move the location of
1917+ character arrays to the posion of sweep_frame_offset. */
1918+static void
1919+copy_args_for_protection (void)
1920+{
1921+ tree parms = DECL_ARGUMENTS (current_function_decl);
1922+ rtx temp_rtx;
1923+
1924+ parms = DECL_ARGUMENTS (current_function_decl);
1925+ for (; parms; parms = TREE_CHAIN (parms))
1926+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1927+ {
1928+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1929+ {
1930+ int string_p;
1931+ rtx seq;
1932+
1933+ string_p = search_string_def (TREE_TYPE(parms));
1934+
1935+ /* Check if it is a candidate to move. */
1936+ if (string_p || search_pointer_def (TREE_TYPE (parms)))
1937+ {
1938+ int arg_size
1939+ = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1940+ / BITS_PER_UNIT);
1941+ tree passed_type = DECL_ARG_TYPE (parms);
1942+ tree nominal_type = TREE_TYPE (parms);
1943+
1944+ start_sequence ();
1945+
1946+ if (GET_CODE (DECL_RTL (parms)) == REG)
1947+ {
1948+ rtx safe = 0;
1949+
1950+ change_arg_use_of_insns (prologue_insert_point,
1951+ DECL_RTL (parms), &safe, 0);
1952+ if (safe)
1953+ {
1954+ /* Generate codes for copying the content. */
1955+ rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1956+
1957+ /* Avoid register elimination in gcse.c. */
1958+ PATTERN (movinsn)->volatil = 1;
1959+
1960+ /* Save debugger info. */
1961+ SET_DECL_RTL (parms, safe);
1962+ }
1963+ }
1964+ else if (GET_CODE (DECL_RTL (parms)) == MEM
1965+ && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1966+ {
1967+ rtx movinsn;
1968+ rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1969+
1970+ /* Generate codes for copying the content. */
1971+ movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1972+ /* Avoid register elimination in gcse.c. */
1973+ PATTERN (movinsn)->volatil = 1;
1974+
1975+ /* Change the addressof information to the newly
1976+ allocated pseudo register. */
1977+ emit_move_insn (DECL_RTL (parms), safe);
1978+
1979+ /* Save debugger info. */
1980+ SET_DECL_RTL (parms, safe);
1981+ }
1982+
1983+ /* See if the frontend wants to pass this by invisible
1984+ reference. */
1985+ else if (passed_type != nominal_type
1986+ && POINTER_TYPE_P (passed_type)
1987+ && TREE_TYPE (passed_type) == nominal_type)
1988+ {
1989+ rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1990+
1991+ change_arg_use_of_insns (prologue_insert_point,
1992+ orig, &safe, 0);
1993+ if (safe)
1994+ {
1995+ /* Generate codes for copying the content. */
1996+ rtx movinsn = emit_move_insn (safe, orig);
1997+
1998+ /* Avoid register elimination in gcse.c */
1999+ PATTERN (movinsn)->volatil = 1;
2000+
2001+ /* Save debugger info. */
2002+ SET_DECL_RTL (parms, safe);
2003+ }
2004+ }
2005+
2006+ else
2007+ {
2008+ /* Declare temporary local variable for parms. */
2009+ temp_rtx
2010+ = assign_stack_local (DECL_MODE (parms), arg_size,
2011+ DECL_MODE (parms) == BLKmode ?
2012+ -1 : 0);
2013+
2014+ MEM_IN_STRUCT_P (temp_rtx)
2015+ = AGGREGATE_TYPE_P (TREE_TYPE (parms));
2016+ set_mem_alias_set (temp_rtx, get_alias_set (parms));
2017+
2018+ /* Generate codes for copying the content. */
2019+ store_expr (parms, temp_rtx, 0);
2020+
2021+ /* Change the reference for each instructions. */
2022+ move_arg_location (prologue_insert_point, DECL_RTL (parms),
2023+ temp_rtx, arg_size);
2024+
2025+ /* Change the location of parms variable. */
2026+ SET_DECL_RTL (parms, temp_rtx);
2027+ }
2028+
2029+ seq = get_insns ();
2030+ end_sequence ();
2031+ emit_insn_before (seq, prologue_insert_point);
2032+
2033+#ifdef FRAME_GROWS_DOWNWARD
2034+ /* Process the string argument. */
2035+ if (string_p && DECL_MODE (parms) == BLKmode)
2036+ {
2037+ int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2038+ arg_size = CEIL_ROUND (arg_size, alignment);
2039+
2040+ /* Change the reference for each instructions. */
2041+ sweep_string_variable (DECL_RTL (parms), arg_size);
2042+ }
2043+#endif
2044+ }
2045+ }
2046+ }
2047+}
2048+
2049+
2050+/* Sweep a string variable to the positon of sweep_frame_offset in the
2051+ stack frame, that is a last position of string variables. */
2052+static void
2053+sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
2054+{
2055+ HOST_WIDE_INT sweep_offset;
2056+
2057+ switch (GET_CODE (sweep_var))
2058+ {
2059+ case MEM:
2060+ if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
2061+ && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
2062+ return;
2063+ sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
2064+ break;
2065+ case CONST_INT:
2066+ sweep_offset = INTVAL (sweep_var);
2067+ break;
2068+ default:
2069+ abort ();
2070+ }
2071+
2072+ /* Scan all declarations of variables and fix the offset address of
2073+ the variable based on the frame pointer. */
2074+ sweep_string_in_decls (DECL_INITIAL (current_function_decl),
2075+ sweep_offset, var_size);
2076+
2077+ /* Scan all argument variable and fix the offset address based on
2078+ the frame pointer. */
2079+ sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
2080+ sweep_offset, var_size);
2081+
2082+ /* For making room for sweep variable, scan all insns and
2083+ fix the offset address of the variable that is based on frame pointer. */
2084+ sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
2085+
2086+
2087+ /* Clear all the USED bits in operands of all insns and declarations of
2088+ local variables. */
2089+ reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2090+ reset_used_flags_for_insns (function_first_insn);
2091+
2092+ sweep_frame_offset -= var_size;
2093+}
2094+
2095+
2096+
2097+/* Move an argument to the local variable addressed by frame_offset. */
2098+static void
2099+move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
2100+{
2101+ /* For making room for sweep variable, scan all insns and
2102+ fix the offset address of the variable that is based on frame pointer. */
2103+ change_arg_use_of_insns (insn, orig, &new, var_size);
2104+
2105+
2106+ /* Clear all the USED bits in operands of all insns and declarations
2107+ of local variables. */
2108+ reset_used_flags_for_insns (insn);
2109+}
2110+
2111+
2112+/* Sweep character arrays declared as local variable. */
2113+static void
2114+sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
2115+ HOST_WIDE_INT sweep_size)
2116+{
2117+ tree types;
2118+ HOST_WIDE_INT offset;
2119+ rtx home;
2120+
2121+ while (block && TREE_CODE(block)==BLOCK)
2122+ {
2123+ for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2124+ {
2125+ /* Skip the declaration that refers an external variable and
2126+ also skip an global variable. */
2127+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
2128+
2129+ if (! DECL_RTL_SET_P (types))
2130+ continue;
2131+
2132+ home = DECL_RTL (types);
2133+
2134+ /* Process for static local variable. */
2135+ if (GET_CODE (home) == MEM
2136+ && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2137+ continue;
2138+
2139+ if (GET_CODE (home) == MEM
2140+ && XEXP (home, 0) == virtual_stack_vars_rtx)
2141+ {
2142+ offset = 0;
2143+
2144+ /* the operand related to the sweep variable. */
2145+ if (sweep_offset <= offset
2146+ && offset < sweep_offset + sweep_size)
2147+ {
2148+ offset = sweep_frame_offset - sweep_size - sweep_offset;
2149+
2150+ XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
2151+ offset);
2152+ XEXP (home, 0)->used = 1;
2153+ }
2154+ else if (sweep_offset <= offset
2155+ && offset < sweep_frame_offset)
2156+ {
2157+ /* the rest of variables under sweep_frame_offset,
2158+ shift the location. */
2159+ XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
2160+ -sweep_size);
2161+ XEXP (home, 0)->used = 1;
2162+ }
2163+ }
2164+
2165+ if (GET_CODE (home) == MEM
2166+ && GET_CODE (XEXP (home, 0)) == MEM)
2167+ {
2168+ /* Process for dynamically allocated array. */
2169+ home = XEXP (home, 0);
2170+ }
2171+
2172+ if (GET_CODE (home) == MEM
2173+ && GET_CODE (XEXP (home, 0)) == PLUS
2174+ && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
2175+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2176+ {
2177+ if (! XEXP (home, 0)->used)
2178+ {
2179+ offset = AUTO_OFFSET(XEXP (home, 0));
2180+
2181+ /* the operand related to the sweep variable. */
2182+ if (sweep_offset <= offset
2183+ && offset < sweep_offset + sweep_size)
2184+ {
2185+
2186+ offset
2187+ += sweep_frame_offset - sweep_size - sweep_offset;
2188+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2189+ offset);
2190+
2191+ /* mark */
2192+ XEXP (home, 0)->used = 1;
2193+ }
2194+ else if (sweep_offset <= offset
2195+ && offset < sweep_frame_offset)
2196+ {
2197+ /* the rest of variables under sweep_frame_offset,
2198+ so shift the location. */
2199+
2200+ XEXP (XEXP (home, 0), 1)
2201+ = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2202+
2203+ /* mark */
2204+ XEXP (home, 0)->used = 1;
2205+ }
2206+ }
2207+ }
2208+ }
2209+ }
2210+
2211+ sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
2212+ sweep_offset, sweep_size);
2213+
2214+ block = BLOCK_CHAIN (block);
2215+ }
2216+}
2217+
2218+
2219+/* Sweep character arrays declared as argument. */
2220+static void
2221+sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
2222+ HOST_WIDE_INT sweep_size)
2223+{
2224+ rtx home;
2225+ HOST_WIDE_INT offset;
2226+
2227+ for (; parms; parms = TREE_CHAIN (parms))
2228+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2229+ {
2230+ if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2231+ {
2232+ home = DECL_INCOMING_RTL (parms);
2233+
2234+ if (XEXP (home, 0)->used)
2235+ continue;
2236+
2237+ offset = AUTO_OFFSET(XEXP (home, 0));
2238+
2239+ /* the operand related to the sweep variable. */
2240+ if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
2241+ {
2242+ if (sweep_offset <= offset
2243+ && offset < sweep_offset + sweep_size)
2244+ {
2245+ offset += sweep_frame_offset - sweep_size - sweep_offset;
2246+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2247+ offset);
2248+
2249+ /* mark */
2250+ XEXP (home, 0)->used = 1;
2251+ }
2252+ else if (sweep_offset <= offset
2253+ && offset < sweep_frame_offset)
2254+ {
2255+ /* the rest of variables under sweep_frame_offset,
2256+ shift the location. */
2257+ XEXP (XEXP (home, 0), 1)
2258+ = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2259+
2260+ /* mark */
2261+ XEXP (home, 0)->used = 1;
2262+ }
2263+ }
2264+ }
2265+ }
2266+}
2267+
2268+
2269+/* Set to 1 when the instruction contains virtual registers. */
2270+static int has_virtual_reg;
2271+
2272+/* Sweep the specified character array for every insns. The array starts from
2273+ the sweep_offset and its size is sweep_size. */
2274+static void
2275+sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
2276+ HOST_WIDE_INT sweep_size)
2277+{
2278+ for (; insn; insn = NEXT_INSN (insn))
2279+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2280+ || GET_CODE (insn) == CALL_INSN)
2281+ {
2282+ has_virtual_reg = FALSE;
2283+ sweep_string_in_operand (insn, &PATTERN (insn),
2284+ sweep_offset, sweep_size);
2285+ sweep_string_in_operand (insn, &REG_NOTES (insn),
2286+ sweep_offset, sweep_size);
2287+ }
2288+}
2289+
2290+
2291+/* Sweep the specified character array, which starts from the sweep_offset and
2292+ its size is sweep_size.
2293+
2294+ When a pointer is given,
2295+ if it points the address higher than the array, it stays.
2296+ if it points the address inside the array, it changes to point inside
2297+ the sweeped array.
2298+ if it points the address lower than the array, it shifts higher address by
2299+ the sweep_size. */
2300+static void
2301+sweep_string_in_operand (rtx insn, rtx *loc,
2302+ HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
2303+{
2304+ rtx x = *loc;
2305+ enum rtx_code code;
2306+ int i, j, k = 0;
2307+ HOST_WIDE_INT offset;
2308+ const char *fmt;
2309+
2310+ if (x == 0)
2311+ return;
2312+
2313+ code = GET_CODE (x);
2314+
2315+ switch (code)
2316+ {
2317+ case CONST_INT:
2318+ case CONST_DOUBLE:
2319+ case CONST:
2320+ case SYMBOL_REF:
2321+ case CODE_LABEL:
2322+ case PC:
2323+ case CC0:
2324+ case ASM_INPUT:
2325+ case ADDR_VEC:
2326+ case ADDR_DIFF_VEC:
2327+ case RETURN:
2328+ case ADDRESSOF:
2329+ return;
2330+
2331+ case REG:
2332+ if (x == virtual_incoming_args_rtx
2333+ || x == virtual_stack_vars_rtx
2334+ || x == virtual_stack_dynamic_rtx
2335+ || x == virtual_outgoing_args_rtx
2336+ || x == virtual_cfa_rtx)
2337+ has_virtual_reg = TRUE;
2338+ return;
2339+
2340+ case SET:
2341+ /*
2342+ skip setjmp setup insn and setjmp restore insn
2343+ Example:
2344+ (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
2345+ (set (virtual_stack_vars_rtx) (REG))
2346+ */
2347+ if (GET_CODE (XEXP (x, 0)) == MEM
2348+ && XEXP (x, 1) == virtual_stack_vars_rtx)
2349+ return;
2350+ if (XEXP (x, 0) == virtual_stack_vars_rtx
2351+ && GET_CODE (XEXP (x, 1)) == REG)
2352+ return;
2353+ break;
2354+
2355+ case PLUS:
2356+ /* Handle typical case of frame register plus constant. */
2357+ if (XEXP (x, 0) == virtual_stack_vars_rtx
2358+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
2359+ {
2360+ if (x->used)
2361+ goto single_use_of_virtual_reg;
2362+
2363+ offset = AUTO_OFFSET(x);
2364+
2365+ /* When arguments grow downward, the virtual incoming
2366+ args pointer points to the top of the argument block,
2367+ so block is identified by the pointer - 1.
2368+ The flag is set at the copy_rtx_and_substitute in integrate.c */
2369+ if (RTX_INTEGRATED_P (x))
2370+ k = -1;
2371+
2372+ /* the operand related to the sweep variable. */
2373+ if (sweep_offset <= offset + k
2374+ && offset + k < sweep_offset + sweep_size)
2375+ {
2376+ offset += sweep_frame_offset - sweep_size - sweep_offset;
2377+
2378+ XEXP (x, 0) = virtual_stack_vars_rtx;
2379+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2380+ x->used = 1;
2381+ }
2382+ else if (sweep_offset <= offset + k
2383+ && offset + k < sweep_frame_offset)
2384+ {
2385+ /* the rest of variables under sweep_frame_offset,
2386+ shift the location. */
2387+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2388+ x->used = 1;
2389+ }
2390+
2391+ single_use_of_virtual_reg:
2392+ if (has_virtual_reg) {
2393+ /* excerpt from insn_invalid_p in recog.c */
2394+ int icode = recog_memoized (insn);
2395+
2396+ if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2397+ {
2398+ rtx temp, seq;
2399+
2400+ start_sequence ();
2401+ temp = force_operand (x, NULL_RTX);
2402+ seq = get_insns ();
2403+ end_sequence ();
2404+
2405+ emit_insn_before (seq, insn);
2406+ if (! validate_change (insn, loc, temp, 0)
2407+ && !validate_replace_rtx (x, temp, insn))
2408+ fatal_insn ("sweep_string_in_operand", insn);
2409+ }
2410+ }
2411+
2412+ has_virtual_reg = TRUE;
2413+ return;
2414+ }
2415+
2416+#ifdef FRAME_GROWS_DOWNWARD
2417+ /* Alert the case of frame register plus constant given by reg. */
2418+ else if (XEXP (x, 0) == virtual_stack_vars_rtx
2419+ && GET_CODE (XEXP (x, 1)) == REG)
2420+ fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
2421+#endif
2422+
2423+ /*
2424+ process further subtree:
2425+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2426+ (const_int 5))
2427+ */
2428+ break;
2429+
2430+ case CALL_PLACEHOLDER:
2431+ for (i = 0; i < 3; i++)
2432+ {
2433+ rtx seq = XEXP (x, i);
2434+ if (seq)
2435+ {
2436+ push_to_sequence (seq);
2437+ sweep_string_use_of_insns (XEXP (x, i),
2438+ sweep_offset, sweep_size);
2439+ XEXP (x, i) = get_insns ();
2440+ end_sequence ();
2441+ }
2442+ }
2443+ break;
2444+
2445+ default:
2446+ break;
2447+ }
2448+
2449+ /* Scan all subexpressions. */
2450+ fmt = GET_RTX_FORMAT (code);
2451+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2452+ if (*fmt == 'e')
2453+ {
2454+ /*
2455+ virtual_stack_vars_rtx without offset
2456+ Example:
2457+ (set (reg:SI xx) (reg:SI 78))
2458+ (set (reg:SI xx) (MEM (reg:SI 78)))
2459+ */
2460+ if (XEXP (x, i) == virtual_stack_vars_rtx)
2461+ fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
2462+ sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
2463+ }
2464+ else if (*fmt == 'E')
2465+ for (j = 0; j < XVECLEN (x, i); j++)
2466+ sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
2467+}
2468+
2469+
2470+/* Change the use of an argument to the use of the duplicated variable for
2471+ every insns, The variable is addressed by new rtx. */
2472+static void
2473+change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
2474+{
2475+ for (; insn; insn = NEXT_INSN (insn))
2476+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2477+ || GET_CODE (insn) == CALL_INSN)
2478+ {
2479+ rtx seq;
2480+
2481+ start_sequence ();
2482+ change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
2483+
2484+ seq = get_insns ();
2485+ end_sequence ();
2486+ emit_insn_before (seq, insn);
2487+
2488+ /* load_multiple insn from virtual_incoming_args_rtx have several
2489+ load insns. If every insn change the load address of arg
2490+ to frame region, those insns are moved before the PARALLEL insn
2491+ and remove the PARALLEL insn. */
2492+ if (GET_CODE (PATTERN (insn)) == PARALLEL
2493+ && XVECLEN (PATTERN (insn), 0) == 0)
2494+ delete_insn (insn);
2495+ }
2496+}
2497+
2498+
2499+/* Change the use of an argument to the use of the duplicated variable for
2500+ every rtx derived from the x. */
2501+static void
2502+change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
2503+{
2504+ enum rtx_code code;
2505+ int i, j;
2506+ HOST_WIDE_INT offset;
2507+ const char *fmt;
2508+
2509+ if (x == 0)
2510+ return;
2511+
2512+ code = GET_CODE (x);
2513+
2514+ switch (code)
2515+ {
2516+ case CONST_INT:
2517+ case CONST_DOUBLE:
2518+ case CONST:
2519+ case SYMBOL_REF:
2520+ case CODE_LABEL:
2521+ case PC:
2522+ case CC0:
2523+ case ASM_INPUT:
2524+ case ADDR_VEC:
2525+ case ADDR_DIFF_VEC:
2526+ case RETURN:
2527+ case REG:
2528+ case ADDRESSOF:
2529+ return;
2530+
2531+ case MEM:
2532+ /* Handle special case of MEM (incoming_args). */
2533+ if (GET_CODE (orig) == MEM
2534+ && XEXP (x, 0) == virtual_incoming_args_rtx)
2535+ {
2536+ offset = 0;
2537+
2538+ /* the operand related to the sweep variable. */
2539+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2540+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2541+
2542+ offset = AUTO_OFFSET(XEXP (*new, 0))
2543+ + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2544+
2545+ XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
2546+ XEXP (x, 0)->used = 1;
2547+
2548+ return;
2549+ }
2550+ }
2551+ break;
2552+
2553+ case PLUS:
2554+ /* Handle special case of frame register plus constant. */
2555+ if (GET_CODE (orig) == MEM
2556+ && XEXP (x, 0) == virtual_incoming_args_rtx
2557+ && GET_CODE (XEXP (x, 1)) == CONST_INT
2558+ && ! x->used)
2559+ {
2560+ offset = AUTO_OFFSET(x);
2561+
2562+ /* the operand related to the sweep variable. */
2563+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2564+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2565+ {
2566+
2567+ offset = (AUTO_OFFSET(XEXP (*new, 0))
2568+ + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2569+
2570+ XEXP (x, 0) = virtual_stack_vars_rtx;
2571+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2572+ x->used = 1;
2573+
2574+ return;
2575+ }
2576+
2577+ /*
2578+ process further subtree:
2579+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2580+ (const_int 5))
2581+ */
2582+ }
2583+ break;
2584+
2585+ case SET:
2586+ /* Handle special case of "set (REG or MEM) (incoming_args)".
2587+ It means that the the address of the 1st argument is stored. */
2588+ if (GET_CODE (orig) == MEM
2589+ && XEXP (x, 1) == virtual_incoming_args_rtx)
2590+ {
2591+ offset = 0;
2592+
2593+ /* the operand related to the sweep variable. */
2594+ if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2595+ offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2596+ {
2597+ offset = (AUTO_OFFSET(XEXP (*new, 0))
2598+ + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2599+
2600+ XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
2601+ offset), NULL_RTX);
2602+ XEXP (x, 1)->used = 1;
2603+
2604+ return;
2605+ }
2606+ }
2607+ break;
2608+
2609+ case CALL_PLACEHOLDER:
2610+ for (i = 0; i < 3; i++)
2611+ {
2612+ rtx seq = XEXP (x, i);
2613+ if (seq)
2614+ {
2615+ push_to_sequence (seq);
2616+ change_arg_use_of_insns (XEXP (x, i), orig, new, size);
2617+ XEXP (x, i) = get_insns ();
2618+ end_sequence ();
2619+ }
2620+ }
2621+ break;
2622+
2623+ case PARALLEL:
2624+ for (j = 0; j < XVECLEN (x, 0); j++)
2625+ {
2626+ change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
2627+ }
2628+ if (recog_memoized (insn) < 0)
2629+ {
2630+ for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
2631+ {
2632+ /* if parallel insn has a insn used virtual_incoming_args_rtx,
2633+ the insn is removed from this PARALLEL insn. */
2634+ if (check_used_flag (XVECEXP (x, 0, j)))
2635+ {
2636+ emit_insn (XVECEXP (x, 0, j));
2637+ XVECEXP (x, 0, j) = NULL;
2638+ }
2639+ else
2640+ XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
2641+ }
2642+ PUT_NUM_ELEM (XVEC (x, 0), i);
2643+ }
2644+ return;
2645+
2646+ default:
2647+ break;
2648+ }
2649+
2650+ /* Scan all subexpressions. */
2651+ fmt = GET_RTX_FORMAT (code);
2652+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2653+ if (*fmt == 'e')
2654+ {
2655+ if (XEXP (x, i) == orig)
2656+ {
2657+ if (*new == 0)
2658+ *new = gen_reg_rtx (GET_MODE (orig));
2659+ XEXP (x, i) = *new;
2660+ continue;
2661+ }
2662+ change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
2663+ }
2664+ else if (*fmt == 'E')
2665+ for (j = 0; j < XVECLEN (x, i); j++)
2666+ {
2667+ if (XVECEXP (x, i, j) == orig)
2668+ {
2669+ if (*new == 0)
2670+ *new = gen_reg_rtx (GET_MODE (orig));
2671+ XVECEXP (x, i, j) = *new;
2672+ continue;
2673+ }
2674+ change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
2675+ }
2676+}
2677+
2678+
2679+/* Validate every instructions from the specified instruction.
2680+
2681+ The stack protector prohibits to generate machine specific frame addressing
2682+ for the first rtl generation. The prepare_stack_protection must convert
2683+ machine independent frame addressing to machine specific frame addressing,
2684+ so instructions for inline functions, which skip the conversion of
2685+ the stack protection, validate every instructions. */
2686+static void
2687+validate_insns_of_varrefs (rtx insn)
2688+{
2689+ rtx next;
2690+
2691+ /* Initialize recognition, indicating that volatile is OK. */
2692+ init_recog ();
2693+
2694+ for (; insn; insn = next)
2695+ {
2696+ next = NEXT_INSN (insn);
2697+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2698+ || GET_CODE (insn) == CALL_INSN)
2699+ {
2700+ /* excerpt from insn_invalid_p in recog.c */
2701+ int icode = recog_memoized (insn);
2702+
2703+ if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2704+ validate_operand_of_varrefs (insn, &PATTERN (insn));
2705+ }
2706+ }
2707+
2708+ init_recog_no_volatile ();
2709+}
2710+
2711+
2712+/* Validate frame addressing of the rtx and covert it to machine specific one. */
2713+static void
2714+validate_operand_of_varrefs (rtx insn, rtx *loc)
2715+{
2716+ enum rtx_code code;
2717+ rtx x, temp, seq;
2718+ int i, j;
2719+ const char *fmt;
2720+
2721+ x = *loc;
2722+ if (x == 0)
2723+ return;
2724+
2725+ code = GET_CODE (x);
2726+
2727+ switch (code)
2728+ {
2729+ case USE:
2730+ case CONST_INT:
2731+ case CONST_DOUBLE:
2732+ case CONST:
2733+ case SYMBOL_REF:
2734+ case CODE_LABEL:
2735+ case PC:
2736+ case CC0:
2737+ case ASM_INPUT:
2738+ case ADDR_VEC:
2739+ case ADDR_DIFF_VEC:
2740+ case RETURN:
2741+ case REG:
2742+ case ADDRESSOF:
2743+ return;
2744+
2745+ case PLUS:
2746+ /* validate insn of frame register plus constant. */
2747+ if (GET_CODE (x) == PLUS
2748+ && XEXP (x, 0) == virtual_stack_vars_rtx
2749+ && GET_CODE (XEXP (x, 1)) == CONST_INT)
2750+ {
2751+ start_sequence ();
2752+
2753+ { /* excerpt from expand_binop in optabs.c */
2754+ optab binoptab = add_optab;
2755+ enum machine_mode mode = GET_MODE (x);
2756+ int icode = (int) binoptab->handlers[(int) mode].insn_code;
2757+ enum machine_mode mode1 = insn_data[icode].operand[2].mode;
2758+ rtx pat;
2759+ rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
2760+ temp = gen_reg_rtx (mode);
2761+
2762+ /* Now, if insn's predicates don't allow offset operands,
2763+ put them into pseudo regs. */
2764+
2765+ if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
2766+ && mode1 != VOIDmode)
2767+ xop1 = copy_to_mode_reg (mode1, xop1);
2768+
2769+ pat = GEN_FCN (icode) (temp, xop0, xop1);
2770+ if (pat)
2771+ emit_insn (pat);
2772+ else
2773+ abort (); /* there must be add_optab handler. */
2774+ }
2775+ seq = get_insns ();
2776+ end_sequence ();
2777+
2778+ emit_insn_before (seq, insn);
2779+ if (! validate_change (insn, loc, temp, 0))
2780+ abort ();
2781+ return;
2782+ }
2783+ break;
2784+
2785+
2786+ case CALL_PLACEHOLDER:
2787+ for (i = 0; i < 3; i++)
2788+ {
2789+ rtx seq = XEXP (x, i);
2790+ if (seq)
2791+ {
2792+ push_to_sequence (seq);
2793+ validate_insns_of_varrefs (XEXP (x, i));
2794+ XEXP (x, i) = get_insns ();
2795+ end_sequence ();
2796+ }
2797+ }
2798+ break;
2799+
2800+ default:
2801+ break;
2802+ }
2803+
2804+ /* Scan all subexpressions. */
2805+ fmt = GET_RTX_FORMAT (code);
2806+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2807+ if (*fmt == 'e')
2808+ validate_operand_of_varrefs (insn, &XEXP (x, i));
2809+ else if (*fmt == 'E')
2810+ for (j = 0; j < XVECLEN (x, i); j++)
2811+ validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
2812+}
2813+
2814+
2815+
2816+/* Return size that is not allocated for stack frame. It will be allocated
2817+ to modify the home of pseudo registers called from global_alloc. */
2818+HOST_WIDE_INT
2819+get_frame_free_size (void)
2820+{
2821+ if (! flag_propolice_protection)
2822+ return 0;
2823+
2824+ return push_allocated_offset - push_frame_offset;
2825+}
2826+
2827+
2828+/* The following codes are invoked after the instantiation of pseudo registers.
2829+
2830+ Reorder local variables to place a peudo register after buffers to avoid
2831+ the corruption of local variables that could be used to further corrupt
2832+ arbitrary memory locations. */
2833+#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2834+static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
2835+static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2836+static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2837+static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2838+static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2839+static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
2840+static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
2841+static void reset_used_flags_for_push_frame (void);
2842+static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
2843+static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
2844+#endif
2845+
2846+
2847+/* Assign stack local at the stage of register allocater. if a pseudo reg is
2848+ spilled out from such an allocation, it is allocated on the stack.
2849+ The protector keep the location be lower stack region than the location of
2850+ sweeped arrays. */
2851+rtx
2852+assign_stack_local_for_pseudo_reg (enum machine_mode mode,
2853+ HOST_WIDE_INT size, int align)
2854+{
2855+#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
2856+ return assign_stack_local (mode, size, align);
2857+#else
2858+ tree blocks = DECL_INITIAL (current_function_decl);
2859+ rtx new;
2860+ HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2861+ int first_call_from_purge_addressof, first_call_from_global_alloc;
2862+
2863+ if (! flag_propolice_protection
2864+ || size == 0
2865+ || ! blocks
2866+ || current_function_is_inlinable
2867+ || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
2868+ || current_function_contains_functions)
2869+ return assign_stack_local (mode, size, align);
2870+
2871+ first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2872+ first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2873+ saved_cse_not_expected = cse_not_expected;
2874+
2875+ starting_frame = ((STARTING_FRAME_OFFSET)
2876+ ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2877+ units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2878+ GET_MODE_SIZE (mode));
2879+
2880+ if (first_call_from_purge_addressof)
2881+ {
2882+ push_frame_offset = push_allocated_offset;
2883+ if (check_out_of_frame_access (get_insns (), starting_frame))
2884+ {
2885+ /* After the purge_addressof stage, there may be an instruction which
2886+ have the pointer less than the starting_frame.
2887+ if there is an access below frame, push dummy region to seperate
2888+ the address of instantiated variables. */
2889+ push_frame (GET_MODE_SIZE (DImode), 0);
2890+ assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2891+ }
2892+ }
2893+
2894+ if (first_call_from_global_alloc)
2895+ {
2896+ push_frame_offset = push_allocated_offset = 0;
2897+ if (check_out_of_frame_access (get_insns (), starting_frame))
2898+ {
2899+ if (STARTING_FRAME_OFFSET)
2900+ {
2901+ /* if there is an access below frame, push dummy region
2902+ to seperate the address of instantiated variables. */
2903+ push_frame (GET_MODE_SIZE (DImode), 0);
2904+ assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2905+ }
2906+ else
2907+ push_allocated_offset = starting_frame;
2908+ }
2909+ }
2910+
2911+ saved_frame_offset = frame_offset;
2912+ frame_offset = push_frame_offset;
2913+
2914+ new = assign_stack_local (mode, size, align);
2915+
2916+ push_frame_offset = frame_offset;
2917+ frame_offset = saved_frame_offset;
2918+
2919+ if (push_frame_offset > push_allocated_offset)
2920+ {
2921+ push_frame (units_per_push,
2922+ push_allocated_offset + STARTING_FRAME_OFFSET);
2923+
2924+ assign_stack_local (BLKmode, units_per_push, -1);
2925+ push_allocated_offset += units_per_push;
2926+ }
2927+
2928+ /* At the second call from global alloc, alpha push frame and assign
2929+ a local variable to the top of the stack. */
2930+ if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2931+ push_frame_offset = push_allocated_offset = 0;
2932+
2933+ return new;
2934+#endif
2935+}
2936+
2937+
2938+#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2939+
2940+/* push frame infomation for instantiating pseudo register at the top of stack.
2941+ This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2942+ not defined.
2943+
2944+ It is called by purge_addressof function and global_alloc (or reload)
2945+ function. */
2946+static void
2947+push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2948+{
2949+ reset_used_flags_for_push_frame();
2950+
2951+ /* Scan all declarations of variables and fix the offset address of
2952+ the variable based on the frame pointer. */
2953+ push_frame_in_decls (DECL_INITIAL (current_function_decl),
2954+ var_size, boundary);
2955+
2956+ /* Scan all argument variable and fix the offset address based on
2957+ the frame pointer. */
2958+ push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2959+ var_size, boundary);
2960+
2961+ /* Scan all operands of all insns and fix the offset address
2962+ based on the frame pointer. */
2963+ push_frame_of_insns (get_insns (), var_size, boundary);
2964+
2965+ /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2966+ push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2967+ push_frame_of_reg_equiv_constant (var_size, boundary);
2968+
2969+ reset_used_flags_for_push_frame();
2970+}
2971+
2972+
2973+/* Reset used flag of every insns, reg_equiv_memory_loc,
2974+ and reg_equiv_constant. */
2975+static void
2976+reset_used_flags_for_push_frame(void)
2977+{
2978+ int i;
2979+ extern rtx *reg_equiv_memory_loc;
2980+ extern rtx *reg_equiv_constant;
2981+
2982+ /* Clear all the USED bits in operands of all insns and declarations of
2983+ local vars. */
2984+ reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2985+ reset_used_flags_for_insns (get_insns ());
2986+
2987+
2988+ /* The following codes are processed if the push_frame is called from
2989+ global_alloc (or reload) function. */
2990+ if (reg_equiv_memory_loc == 0)
2991+ return;
2992+
2993+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2994+ if (reg_equiv_memory_loc[i])
2995+ {
2996+ rtx x = reg_equiv_memory_loc[i];
2997+
2998+ if (GET_CODE (x) == MEM
2999+ && GET_CODE (XEXP (x, 0)) == PLUS
3000+ && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
3001+ {
3002+ /* reset */
3003+ XEXP (x, 0)->used = 0;
3004+ }
3005+ }
3006+
3007+
3008+ if (reg_equiv_constant == 0)
3009+ return;
3010+
3011+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
3012+ if (reg_equiv_constant[i])
3013+ {
3014+ rtx x = reg_equiv_constant[i];
3015+
3016+ if (GET_CODE (x) == PLUS
3017+ && AUTO_BASEPTR (x) == frame_pointer_rtx)
3018+ {
3019+ /* reset */
3020+ x->used = 0;
3021+ }
3022+ }
3023+}
3024+
3025+
3026+/* Push every variables declared as a local variable and make a room for
3027+ instantiated register. */
3028+static void
3029+push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
3030+ HOST_WIDE_INT boundary)
3031+{
3032+ tree types;
3033+ HOST_WIDE_INT offset;
3034+ rtx home;
3035+
3036+ while (block && TREE_CODE(block)==BLOCK)
3037+ {
3038+ for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
3039+ {
3040+ /* Skip the declaration that refers an external variable and
3041+ also skip an global variable. */
3042+ if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
3043+ {
3044+ if (! DECL_RTL_SET_P (types))
3045+ continue;
3046+
3047+ home = DECL_RTL (types);
3048+
3049+ /* Process for static local variable. */
3050+ if (GET_CODE (home) == MEM
3051+ && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
3052+ continue;
3053+
3054+ if (GET_CODE (home) == MEM
3055+ && GET_CODE (XEXP (home, 0)) == REG)
3056+ {
3057+ if (XEXP (home, 0) != frame_pointer_rtx
3058+ || boundary != 0)
3059+ continue;
3060+
3061+ XEXP (home, 0) = plus_constant (frame_pointer_rtx,
3062+ push_size);
3063+
3064+ /* mark */
3065+ XEXP (home, 0)->used = 1;
3066+ }
3067+
3068+ if (GET_CODE (home) == MEM
3069+ && GET_CODE (XEXP (home, 0)) == MEM)
3070+ {
3071+ /* Process for dynamically allocated array. */
3072+ home = XEXP (home, 0);
3073+ }
3074+
3075+ if (GET_CODE (home) == MEM
3076+ && GET_CODE (XEXP (home, 0)) == PLUS
3077+ && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
3078+ {
3079+ offset = AUTO_OFFSET(XEXP (home, 0));
3080+
3081+ if (! XEXP (home, 0)->used
3082+ && offset >= boundary)
3083+ {
3084+ offset += push_size;
3085+ XEXP (XEXP (home, 0), 1)
3086+ = gen_rtx_CONST_INT (VOIDmode, offset);
3087+
3088+ /* mark */
3089+ XEXP (home, 0)->used = 1;
3090+ }
3091+ }
3092+ }
3093+ }
3094+
3095+ push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
3096+ block = BLOCK_CHAIN (block);
3097+ }
3098+}
3099+
3100+
3101+/* Push every variables declared as an argument and make a room for
3102+ instantiated register. */
3103+static void
3104+push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
3105+ HOST_WIDE_INT boundary)
3106+{
3107+ rtx home;
3108+ HOST_WIDE_INT offset;
3109+
3110+ for (; parms; parms = TREE_CHAIN (parms))
3111+ if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
3112+ {
3113+ if (PARM_PASSED_IN_MEMORY (parms))
3114+ {
3115+ home = DECL_INCOMING_RTL (parms);
3116+ offset = AUTO_OFFSET(XEXP (home, 0));
3117+
3118+ if (XEXP (home, 0)->used || offset < boundary)
3119+ continue;
3120+
3121+ /* the operand related to the sweep variable. */
3122+ if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
3123+ {
3124+ if (XEXP (home, 0) == frame_pointer_rtx)
3125+ XEXP (home, 0) = plus_constant (frame_pointer_rtx,
3126+ push_size);
3127+ else {
3128+ offset += push_size;
3129+ XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
3130+ offset);
3131+ }
3132+
3133+ /* mark */
3134+ XEXP (home, 0)->used = 1;
3135+ }
3136+ }
3137+ }
3138+}
3139+
3140+
3141+/* Set to 1 when the instruction has the reference to be pushed. */
3142+static int insn_pushed;
3143+
3144+/* Tables of equivalent registers with frame pointer. */
3145+static int *fp_equiv = 0;
3146+
3147+
3148+/* Push the frame region to make a room for allocated local variable. */
3149+static void
3150+push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
3151+{
3152+ /* init fp_equiv */
3153+ fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
3154+
3155+ for (; insn; insn = NEXT_INSN (insn))
3156+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3157+ || GET_CODE (insn) == CALL_INSN)
3158+ {
3159+ rtx last;
3160+
3161+ insn_pushed = FALSE;
3162+
3163+ /* Push frame in INSN operation. */
3164+ push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
3165+
3166+ /* Push frame in NOTE. */
3167+ push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
3168+
3169+ /* Push frame in CALL EXPR_LIST. */
3170+ if (GET_CODE (insn) == CALL_INSN)
3171+ push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
3172+ push_size, boundary);
3173+
3174+ /* Pushed frame addressing style may not be machine specific one.
3175+ so the instruction should be converted to use the machine specific
3176+ frame addressing. */
3177+ if (insn_pushed
3178+ && (last = try_split (PATTERN (insn), insn, 1)) != insn)
3179+ {
3180+ rtx first = NEXT_INSN (insn);
3181+ rtx trial = NEXT_INSN (first);
3182+ rtx pattern = PATTERN (trial);
3183+ rtx set;
3184+
3185+ /* Update REG_EQUIV info to the first splitted insn. */
3186+ if ((set = single_set (insn))
3187+ && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
3188+ && GET_CODE (PATTERN (first)) == SET)
3189+ {
3190+ REG_NOTES (first)
3191+ = gen_rtx_EXPR_LIST (REG_EQUIV,
3192+ SET_SRC (PATTERN (first)),
3193+ REG_NOTES (first));
3194+ }
3195+
3196+ /* copy the first insn of splitted insns to the original insn and
3197+ delete the first insn,
3198+ because the original insn is pointed from records:
3199+ insn_chain, reg_equiv_init, used for global_alloc. */
3200+ if (cse_not_expected)
3201+ {
3202+ add_insn_before (insn, first);
3203+
3204+ /* Copy the various flags, and other information. */
3205+ memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
3206+ PATTERN (insn) = PATTERN (first);
3207+ INSN_CODE (insn) = INSN_CODE (first);
3208+ LOG_LINKS (insn) = LOG_LINKS (first);
3209+ REG_NOTES (insn) = REG_NOTES (first);
3210+
3211+ /* then remove the first insn of splitted insns. */
3212+ remove_insn (first);
3213+ INSN_DELETED_P (first) = 1;
3214+ }
3215+
3216+ if (GET_CODE (pattern) == SET
3217+ && GET_CODE (XEXP (pattern, 0)) == REG
3218+ && GET_CODE (XEXP (pattern, 1)) == PLUS
3219+ && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
3220+ && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
3221+ {
3222+ rtx offset = XEXP (XEXP (pattern, 1), 1);
3223+ fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
3224+
3225+ delete_insn (trial);
3226+ }
3227+
3228+ insn = last;
3229+ }
3230+ }
3231+
3232+ /* Clean up. */
3233+ free (fp_equiv);
3234+}
3235+
3236+
3237+/* Push the frame region by changing the operand that points the frame. */
3238+static void
3239+push_frame_in_operand (rtx insn, rtx orig,
3240+ HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
3241+{
3242+ rtx x = orig;
3243+ enum rtx_code code;
3244+ int i, j;
3245+ HOST_WIDE_INT offset;
3246+ const char *fmt;
3247+
3248+ if (x == 0)
3249+ return;
3250+
3251+ code = GET_CODE (x);
3252+
3253+ switch (code)
3254+ {
3255+ case CONST_INT:
3256+ case CONST_DOUBLE:
3257+ case CONST:
3258+ case SYMBOL_REF:
3259+ case CODE_LABEL:
3260+ case PC:
3261+ case CC0:
3262+ case ASM_INPUT:
3263+ case ADDR_VEC:
3264+ case ADDR_DIFF_VEC:
3265+ case RETURN:
3266+ case REG:
3267+ case ADDRESSOF:
3268+ case USE:
3269+ return;
3270+
3271+ case SET:
3272+ /*
3273+ Skip setjmp setup insn and setjmp restore insn
3274+ alpha case:
3275+ (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
3276+ (set (frame_pointer_rtx) (REG))
3277+ */
3278+ if (GET_CODE (XEXP (x, 0)) == MEM
3279+ && XEXP (x, 1) == frame_pointer_rtx)
3280+ return;
3281+ if (XEXP (x, 0) == frame_pointer_rtx
3282+ && GET_CODE (XEXP (x, 1)) == REG)
3283+ return;
3284+
3285+ /*
3286+ powerpc case: restores setjmp address
3287+ (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
3288+ or
3289+ (set (reg) (plus frame_pointer_rtx const_int -n))
3290+ (set (frame_pointer_rtx) (reg))
3291+ */
3292+ if (GET_CODE (XEXP (x, 0)) == REG
3293+ && GET_CODE (XEXP (x, 1)) == PLUS
3294+ && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
3295+ && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3296+ && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
3297+ {
3298+ x = XEXP (x, 1);
3299+ offset = AUTO_OFFSET(x);
3300+ if (x->used || -offset < boundary)
3301+ return;
3302+
3303+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
3304+ x->used = 1; insn_pushed = TRUE;
3305+ return;
3306+ }
3307+
3308+ /* Reset fp_equiv register. */
3309+ else if (GET_CODE (XEXP (x, 0)) == REG
3310+ && fp_equiv[REGNO (XEXP (x, 0))])
3311+ fp_equiv[REGNO (XEXP (x, 0))] = 0;
3312+
3313+ /* Propagete fp_equiv register. */
3314+ else if (GET_CODE (XEXP (x, 0)) == REG
3315+ && GET_CODE (XEXP (x, 1)) == REG
3316+ && fp_equiv[REGNO (XEXP (x, 1))])
3317+ if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
3318+ || reg_renumber[REGNO (XEXP (x, 0))] > 0)
3319+ fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
3320+ break;
3321+
3322+ case MEM:
3323+ if (XEXP (x, 0) == frame_pointer_rtx
3324+ && boundary == 0)
3325+ {
3326+ XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3327+ XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3328+ return;
3329+ }
3330+ break;
3331+
3332+ case PLUS:
3333+ /* Handle special case of frame register plus constant. */
3334+ if (GET_CODE (XEXP (x, 1)) == CONST_INT
3335+ && XEXP (x, 0) == frame_pointer_rtx)
3336+ {
3337+ offset = AUTO_OFFSET(x);
3338+
3339+ if (x->used || offset < boundary)
3340+ return;
3341+
3342+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3343+ x->used = 1; insn_pushed = TRUE;
3344+
3345+ return;
3346+ }
3347+ /*
3348+ Handle alpha case:
3349+ (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
3350+ */
3351+ if (GET_CODE (XEXP (x, 1)) == CONST_INT
3352+ && GET_CODE (XEXP (x, 0)) == SUBREG
3353+ && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
3354+ {
3355+ offset = AUTO_OFFSET(x);
3356+
3357+ if (x->used || offset < boundary)
3358+ return;
3359+
3360+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3361+ x->used = 1; insn_pushed = TRUE;
3362+
3363+ return;
3364+ }
3365+ /*
3366+ Handle powerpc case:
3367+ (set (reg x) (plus fp const))
3368+ (set (.....) (... (plus (reg x) (const B))))
3369+ */
3370+ else if (GET_CODE (XEXP (x, 1)) == CONST_INT
3371+ && GET_CODE (XEXP (x, 0)) == REG
3372+ && fp_equiv[REGNO (XEXP (x, 0))])
3373+ {
3374+ offset = AUTO_OFFSET(x);
3375+
3376+ if (x->used)
3377+ return;
3378+
3379+ offset += fp_equiv[REGNO (XEXP (x, 0))];
3380+
3381+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3382+ x->used = 1; insn_pushed = TRUE;
3383+
3384+ return;
3385+ }
3386+ /*
3387+ Handle special case of frame register plus reg (constant).
3388+ (set (reg x) (const B))
3389+ (set (....) (...(plus fp (reg x))))
3390+ */
3391+ else if (XEXP (x, 0) == frame_pointer_rtx
3392+ && GET_CODE (XEXP (x, 1)) == REG
3393+ && PREV_INSN (insn)
3394+ && PATTERN (PREV_INSN (insn))
3395+ && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
3396+ && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
3397+ {
3398+ offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
3399+
3400+ if (x->used || offset < boundary)
3401+ return;
3402+
3403+ SET_SRC (PATTERN (PREV_INSN (insn)))
3404+ = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3405+ x->used = 1;
3406+ XEXP (x, 1)->used = 1;
3407+
3408+ return;
3409+ }
3410+ /*
3411+ Handle special case of frame register plus reg (used).
3412+ The register already have a pushed offset, just mark this frame
3413+ addressing.
3414+ */
3415+ else if (XEXP (x, 0) == frame_pointer_rtx
3416+ && XEXP (x, 1)->used)
3417+ {
3418+ x->used = 1;
3419+ return;
3420+ }
3421+ /*
3422+ Process further subtree:
3423+ Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
3424+ (const_int 5))
3425+ */
3426+ break;
3427+
3428+ case CALL_PLACEHOLDER:
3429+ push_frame_of_insns (XEXP (x, 0), push_size, boundary);
3430+ push_frame_of_insns (XEXP (x, 1), push_size, boundary);
3431+ push_frame_of_insns (XEXP (x, 2), push_size, boundary);
3432+ break;
3433+
3434+ default:
3435+ break;
3436+ }
3437+
3438+ /* Scan all subexpressions. */
3439+ fmt = GET_RTX_FORMAT (code);
3440+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3441+ if (*fmt == 'e')
3442+ {
3443+ if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
3444+ fatal_insn ("push_frame_in_operand", insn);
3445+ push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
3446+ }
3447+ else if (*fmt == 'E')
3448+ for (j = 0; j < XVECLEN (x, i); j++)
3449+ push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
3450+}
3451+
3452+
3453+/* Change the location pointed in reg_equiv_memory_loc. */
3454+static void
3455+push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
3456+ HOST_WIDE_INT boundary)
3457+{
3458+ int i;
3459+ extern rtx *reg_equiv_memory_loc;
3460+
3461+ /* This function is processed if the push_frame is called from
3462+ global_alloc (or reload) function. */
3463+ if (reg_equiv_memory_loc == 0)
3464+ return;
3465+
3466+ for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
3467+ if (reg_equiv_memory_loc[i])
3468+ {
3469+ rtx x = reg_equiv_memory_loc[i];
3470+ int offset;
3471+
3472+ if (GET_CODE (x) == MEM
3473+ && GET_CODE (XEXP (x, 0)) == PLUS
3474+ && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
3475+ {
3476+ offset = AUTO_OFFSET(XEXP (x, 0));
3477+
3478+ if (! XEXP (x, 0)->used
3479+ && offset >= boundary)
3480+ {
3481+ offset += push_size;
3482+ XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3483+
3484+ /* mark */
3485+ XEXP (x, 0)->used = 1;
3486+ }
3487+ }
3488+ else if (GET_CODE (x) == MEM
3489+ && XEXP (x, 0) == frame_pointer_rtx
3490+ && boundary == 0)
3491+ {
3492+ XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3493+ XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3494+ }
3495+ }
3496+}
3497+
3498+
3499+/* Change the location pointed in reg_equiv_constant. */
3500+static void
3501+push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
3502+ HOST_WIDE_INT boundary)
3503+{
3504+ int i;
3505+ extern rtx *reg_equiv_constant;
3506+
3507+ /* This function is processed if the push_frame is called from
3508+ global_alloc (or reload) function. */
3509+ if (reg_equiv_constant == 0)
3510+ return;
3511+
3512+ for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
3513+ if (reg_equiv_constant[i])
3514+ {
3515+ rtx x = reg_equiv_constant[i];
3516+ int offset;
3517+
3518+ if (GET_CODE (x) == PLUS
3519+ && XEXP (x, 0) == frame_pointer_rtx)
3520+ {
3521+ offset = AUTO_OFFSET(x);
3522+
3523+ if (! x->used
3524+ && offset >= boundary)
3525+ {
3526+ offset += push_size;
3527+ XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3528+
3529+ /* mark */
3530+ x->used = 1;
3531+ }
3532+ }
3533+ else if (x == frame_pointer_rtx
3534+ && boundary == 0)
3535+ {
3536+ reg_equiv_constant[i]
3537+ = plus_constant (frame_pointer_rtx, push_size);
3538+ reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
3539+ }
3540+ }
3541+}
3542+
3543+
3544+/* Check every instructions if insn's memory reference is out of frame. */
3545+static int
3546+check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
3547+{
3548+ for (; insn; insn = NEXT_INSN (insn))
3549+ if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3550+ || GET_CODE (insn) == CALL_INSN)
3551+ {
3552+ if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
3553+ return TRUE;
3554+ }
3555+ return FALSE;
3556+}
3557+
3558+
3559+/* Check every operands if the reference is out of frame. */
3560+static int
3561+check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
3562+{
3563+ rtx x = orig;
3564+ enum rtx_code code;
3565+ int i, j;
3566+ const char *fmt;
3567+
3568+ if (x == 0)
3569+ return FALSE;
3570+
3571+ code = GET_CODE (x);
3572+
3573+ switch (code)
3574+ {
3575+ case CONST_INT:
3576+ case CONST_DOUBLE:
3577+ case CONST:
3578+ case SYMBOL_REF:
3579+ case CODE_LABEL:
3580+ case PC:
3581+ case CC0:
3582+ case ASM_INPUT:
3583+ case ADDR_VEC:
3584+ case ADDR_DIFF_VEC:
3585+ case RETURN:
3586+ case REG:
3587+ case ADDRESSOF:
3588+ return FALSE;
3589+
3590+ case MEM:
3591+ if (XEXP (x, 0) == frame_pointer_rtx)
3592+ if (0 < boundary)
3593+ return TRUE;
3594+ break;
3595+
3596+ case PLUS:
3597+ /* Handle special case of frame register plus constant. */
3598+ if (GET_CODE (XEXP (x, 1)) == CONST_INT
3599+ && XEXP (x, 0) == frame_pointer_rtx)
3600+ {
3601+ if (0 <= AUTO_OFFSET(x)
3602+ && AUTO_OFFSET(x) < boundary)
3603+ return TRUE;
3604+ return FALSE;
3605+ }
3606+ /*
3607+ Process further subtree:
3608+ Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
3609+ (const_int 5))
3610+ */
3611+ break;
3612+
3613+ case CALL_PLACEHOLDER:
3614+ if (check_out_of_frame_access (XEXP (x, 0), boundary))
3615+ return TRUE;
3616+ if (check_out_of_frame_access (XEXP (x, 1), boundary))
3617+ return TRUE;
3618+ if (check_out_of_frame_access (XEXP (x, 2), boundary))
3619+ return TRUE;
3620+ break;
3621+
3622+ default:
3623+ break;
3624+ }
3625+
3626+ /* Scan all subexpressions. */
3627+ fmt = GET_RTX_FORMAT (code);
3628+ for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3629+ if (*fmt == 'e')
3630+ {
3631+ if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
3632+ return TRUE;
3633+ }
3634+ else if (*fmt == 'E')
3635+ for (j = 0; j < XVECLEN (x, i); j++)
3636+ if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
3637+ return TRUE;
3638+
3639+ return FALSE;
3640+}
3641+#endif
3642diff -uNr gcc-3.4.3.orig/gcc/protector.h gcc-3.4.3/gcc/protector.h
3643--- gcc-3.4.3.orig/gcc/protector.h 1970-01-01 01:00:00.000000000 +0100
3644+++ gcc-3.4.3/gcc/protector.h 2004-01-20 03:01:39.000000000 +0100
3645@@ -0,0 +1,55 @@
3646+/* RTL buffer overflow protection function for GNU C compiler
3647+ Copyright (C) 2003 Free Software Foundation, Inc.
3648+
3649+This file is part of GCC.
3650+
3651+GCC is free software; you can redistribute it and/or modify it under
3652+the terms of the GNU General Public License as published by the Free
3653+Software Foundation; either version 2, or (at your option) any later
3654+version.
3655+
3656+GCC is distributed in the hope that it will be useful, but WITHOUT ANY
3657+WARRANTY; without even the implied warranty of MERCHANTABILITY or
3658+FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
3659+for more details.
3660+
3661+You should have received a copy of the GNU General Public License
3662+along with GCC; see the file COPYING. If not, write to the Free
3663+Software Foundation, 59 Temple Place - Suite 330, Boston, MA
3664+02111-1307, USA. */
3665+
3666+
3667+/* Declare GUARD variable. */
3668+#define GUARD_m Pmode
3669+#define UNITS_PER_GUARD \
3670+ MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
3671+
3672+#ifndef L_stack_smash_handler
3673+
3674+/* Insert a guard variable before a character buffer and change the order
3675+ of pointer variables, character buffers and pointer arguments. */
3676+
3677+extern void prepare_stack_protection (int);
3678+
3679+#ifdef TREE_CODE
3680+/* Search a character array from the specified type tree. */
3681+
3682+extern int search_string_def (tree);
3683+#endif
3684+
3685+/* Examine whether the input contains frame pointer addressing. */
3686+
3687+extern int contains_fp (rtx);
3688+
3689+/* Return size that is not allocated for stack frame. It will be allocated
3690+ to modify the home of pseudo registers called from global_alloc. */
3691+
3692+extern HOST_WIDE_INT get_frame_free_size (void);
3693+
3694+/* Allocate a local variable in the stack area before character buffers
3695+ to avoid the corruption of it. */
3696+
3697+extern rtx assign_stack_local_for_pseudo_reg (enum machine_mode,
3698+ HOST_WIDE_INT, int);
3699+
3700+#endif
3701diff -uNr gcc-3.4.3.orig/gcc/reload1.c gcc-3.4.3/gcc/reload1.c
3702--- gcc-3.4.3.orig/gcc/reload1.c 2004-05-02 14:37:17.000000000 +0200
3703+++ gcc-3.4.3/gcc/reload1.c 2004-11-24 18:35:31.812641048 +0100
3704@@ -43,6 +43,7 @@
3705 #include "toplev.h"
3706 #include "except.h"
3707 #include "tree.h"
3708+#include "protector.h"
3709
3710 /* This file contains the reload pass of the compiler, which is
3711 run after register allocation has been done. It checks that
3712@@ -891,7 +892,7 @@
3713 if (cfun->stack_alignment_needed)
3714 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
3715
3716- starting_frame_size = get_frame_size ();
3717+ starting_frame_size = get_frame_size () - get_frame_free_size ();
3718
3719 set_initial_elim_offsets ();
3720 set_initial_label_offsets ();
3721@@ -955,7 +956,7 @@
3722 setup_save_areas ();
3723
3724 /* If we allocated another stack slot, redo elimination bookkeeping. */
3725- if (starting_frame_size != get_frame_size ())
3726+ if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3727 continue;
3728
3729 if (caller_save_needed)
3730@@ -974,7 +975,7 @@
3731
3732 /* If we allocated any new memory locations, make another pass
3733 since it might have changed elimination offsets. */
3734- if (starting_frame_size != get_frame_size ())
3735+ if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3736 something_changed = 1;
3737
3738 {
3739@@ -1066,11 +1067,11 @@
3740 if (insns_need_reload != 0 || something_needs_elimination
3741 || something_needs_operands_changed)
3742 {
3743- HOST_WIDE_INT old_frame_size = get_frame_size ();
3744+ HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
3745
3746 reload_as_needed (global);
3747
3748- if (old_frame_size != get_frame_size ())
3749+ if (old_frame_size != get_frame_size () - get_frame_free_size ())
3750 abort ();
3751
3752 if (num_eliminable)
3753@@ -1957,8 +1958,10 @@
3754 inherent space, and no less total space, then the previous slot. */
3755 if (from_reg == -1)
3756 {
3757- /* No known place to spill from => no slot to reuse. */
3758- x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
3759+ /* No known place to spill from => no slot to reuse.
3760+ For the stack protection, an allocated slot should be placed in
3761+ the safe region from the stack smaching attack. */
3762+ x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
3763 inherent_size == total_size ? 0 : -1);
3764 if (BYTES_BIG_ENDIAN)
3765 /* Cancel the big-endian correction done in assign_stack_local.
3766diff -uNr gcc-3.4.3.orig/gcc/rtl.h gcc-3.4.3/gcc/rtl.h
3767--- gcc-3.4.3.orig/gcc/rtl.h 2004-10-13 01:35:32.000000000 +0200
3768+++ gcc-3.4.3/gcc/rtl.h 2004-11-24 18:35:31.830638312 +0100
3769@@ -473,6 +473,18 @@
3770 __FUNCTION__); \
3771 _rtx; })
3772
3773+#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) \
3774+ __extension__ \
3775+({ rtx const _rtx = (RTX); \
3776+ if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2 \
3777+ && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4 \
3778+ && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6 \
3779+ && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8 \
3780+ && GET_CODE(_rtx) != C9) \
3781+ rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
3782+ __FUNCTION__); \
3783+ _rtx; })
3784+
3785 extern void rtl_check_failed_flag (const char *, rtx, const char *,
3786 int, const char *)
3787 ATTRIBUTE_NORETURN
3788@@ -488,6 +500,7 @@
3789 #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX)
3790 #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX)
3791 #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8) (RTX)
3792+#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) (RTX)
3793 #endif
3794
3795 #define CLEAR_RTX_FLAGS(RTX) \
3796@@ -583,9 +596,9 @@
3797 #define LOG_LINKS(INSN) XEXP(INSN, 7)
3798
3799 #define RTX_INTEGRATED_P(RTX) \
3800- (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3801+ (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3802 JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST, \
3803- NOTE)->integrated)
3804+ PLUS, NOTE)->integrated)
3805 #define RTX_UNCHANGING_P(RTX) \
3806 (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
3807 #define RTX_FRAME_RELATED_P(RTX) \
3808@@ -1125,6 +1138,10 @@
3809 (RTL_FLAG_CHECK3("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \
3810 ASM_INPUT)->volatil)
3811
3812+/* 1 if RTX is an SET rtx that is not eliminated for the stack protection. */
3813+#define SET_VOLATILE_P(RTX) \
3814+ (RTL_FLAG_CHECK1("SET_VOLATILE_P", (RTX), SET)->volatil)
3815+
3816 /* 1 if RTX is a mem that refers to an aggregate, either to the
3817 aggregate itself of to a field of the aggregate. If zero, RTX may
3818 or may not be such a reference. */
3819diff -uNr gcc-3.4.3.orig/gcc/simplify-rtx.c gcc-3.4.3/gcc/simplify-rtx.c
3820--- gcc-3.4.3.orig/gcc/simplify-rtx.c 2004-10-10 23:53:35.000000000 +0200
3821+++ gcc-3.4.3/gcc/simplify-rtx.c 2004-11-24 18:35:31.858634056 +0100
3822@@ -2287,6 +2287,7 @@
3823 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
3824 int first, changed;
3825 int i, j;
3826+ HOST_WIDE_INT fp_offset = 0;
3827
3828 memset (ops, 0, sizeof ops);
3829
3830@@ -2312,6 +2313,10 @@
3831 switch (this_code)
3832 {
3833 case PLUS:
3834+ if (flag_propolice_protection
3835+ && XEXP (this_op, 0) == virtual_stack_vars_rtx
3836+ && GET_CODE (XEXP (this_op, 1)) == CONST_INT)
3837+ fp_offset = INTVAL (XEXP (this_op, 1));
3838 case MINUS:
3839 if (n_ops == 7)
3840 return NULL_RTX;
3841@@ -2473,11 +2478,24 @@
3842 && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3843 && CONSTANT_P (ops[n_ops - 2].op))
3844 {
3845- rtx value = ops[n_ops - 1].op;
3846- if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3847- value = neg_const_int (mode, value);
3848- ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3849- n_ops--;
3850+ if (!flag_propolice_protection)
3851+ {
3852+ rtx value = ops[n_ops - 1].op;
3853+ if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3854+ value = neg_const_int (mode, value);
3855+ ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3856+ n_ops--;
3857+ }
3858+ /* The stack protector keeps the addressing style of a local variable,
3859+ so it doesn't use neg_const_int function not to change
3860+ the offset value. */
3861+ else {
3862+ HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3863+ if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3864+ value = -value;
3865+ ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
3866+ n_ops--;
3867+ }
3868 }
3869
3870 /* Count the number of CONSTs that we generated. */
3871@@ -2495,6 +2513,59 @@
3872 || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
3873 return NULL_RTX;
3874
3875+ if (flag_propolice_protection)
3876+ {
3877+ /* keep the addressing style of local variables
3878+ as (plus (virtual_stack_vars_rtx) (CONST_int x)).
3879+ For the case array[r-1],
3880+ converts from (+ (+VFP c1) (+r -1)) to (SET R (+VFP c1)) (+ R (+r -1)).
3881+
3882+ This loop finds ops[i] which is the register for the frame
3883+ addressing, Then, makes the frame addressing using the register and
3884+ the constant of ops[n_ops - 1]. */
3885+ for (i = 0; i < n_ops; i++)
3886+#ifdef FRAME_GROWS_DOWNWARD
3887+ if (ops[i].op == virtual_stack_vars_rtx)
3888+#else
3889+ if (ops[i].op == virtual_stack_vars_rtx
3890+ || ops[i].op == frame_pointer_rtx)
3891+#endif
3892+ {
3893+ if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
3894+ {
3895+ HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3896+ if (value >= fp_offset)
3897+ {
3898+ ops[i].op = plus_constant (ops[i].op, value);
3899+ n_ops--;
3900+ }
3901+ else
3902+ {
3903+ if (!force
3904+ && (n_ops + 1 + n_consts > input_ops
3905+ || (n_ops + 1 + n_consts == input_ops
3906+ && n_consts <= input_consts)))
3907+ return NULL_RTX;
3908+ ops[n_ops - 1].op = GEN_INT (value-fp_offset);
3909+ ops[i].op = plus_constant (ops[i].op, fp_offset);
3910+ }
3911+ }
3912+ /* keep the following address pattern;
3913+ (1) buf[BUFSIZE] is the first assigned variable.
3914+ (+ (+ fp -BUFSIZE) BUFSIZE)
3915+ (2) ((+ (+ fp 1) r) -1). */
3916+ else if (fp_offset != 0)
3917+ return NULL_RTX;
3918+ /* keep the (+ fp 0) pattern for the following case;
3919+ (1) buf[i]: i: REG, buf: (+ fp 0) in !FRAME_GROWS_DOWNWARD
3920+ (2) argument: the address is (+ fp 0). */
3921+ else if (fp_offset == 0)
3922+ return NULL_RTX;
3923+
3924+ break;
3925+ }
3926+ }
3927+
3928 /* Put a non-negated operand first, if possible. */
3929
3930 for (i = 0; i < n_ops && ops[i].neg; i++)
3931diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.dg/ssp-warn.c gcc-3.4.3/gcc/testsuite/gcc.dg/ssp-warn.c
3932--- gcc-3.4.3.orig/gcc/testsuite/gcc.dg/ssp-warn.c 1970-01-01 01:00:00.000000000 +0100
3933+++ gcc-3.4.3/gcc/testsuite/gcc.dg/ssp-warn.c 2003-11-21 09:41:19.000000000 +0100
3934@@ -0,0 +1,32 @@
3935+/* { dg-do compile } */
3936+/* { dg-options "-fstack-protector" } */
3937+void
3938+test1()
3939+{
3940+ void intest1(int *a)
3941+ {
3942+ *a ++;
3943+ }
3944+
3945+ char buf[80];
3946+
3947+ buf[0] = 0;
3948+} /* { dg-bogus "not protecting function: it contains functions" } */
3949+
3950+void
3951+test2(int n)
3952+{
3953+ char buf[80];
3954+ char vbuf[n];
3955+
3956+ buf[0] = 0;
3957+ vbuf[0] = 0;
3958+} /* { dg-bogus "not protecting variables: it has a variable length buffer" } */
3959+
3960+void
3961+test3()
3962+{
3963+ char buf[5];
3964+
3965+ buf[0] = 0;
3966+} /* { dg-bogus "not protecting function: buffer is less than 8 bytes long" } */
3967diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute1.c
3968--- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 1970-01-01 01:00:00.000000000 +0100
3969+++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 2004-02-16 06:15:39.000000000 +0100
3970@@ -0,0 +1,54 @@
3971+/* Test location changes of character array. */
3972+
3973+void
3974+test(int i)
3975+{
3976+ int ibuf1[10];
3977+ char buf[50];
3978+ int ibuf2[10];
3979+ char buf2[50000];
3980+ int ibuf3[10];
3981+ char *p;
3982+
3983+ /* c1: the frame offset of buf[0]
3984+ c2: the frame offset of buf2[0]
3985+ */
3986+ p= &buf[0]; *p=1; /* expected rtl: (+ fp -c1) */
3987+ if (*p != buf[0])
3988+ abort();
3989+ p= &buf[5]; *p=2; /* expected rtl: (+ fp -c1+5) */
3990+ if (*p != buf[5])
3991+ abort();
3992+ p= &buf[-1]; *p=3; /* expected rtl: (+ (+ fp -c1) -1) */
3993+ if (*p != buf[-1])
3994+ abort();
3995+ p= &buf[49]; *p=4; /* expected rtl: (+ fp -c1+49) */
3996+ if (*p != buf[49])
3997+ abort();
3998+ p = &buf[i+5]; *p=5; /* expected rtl: (+ (+ fp -c1) (+ i 5)) */
3999+ if (*p != buf[i+5])
4000+ abort ();
4001+ p = buf - 1; *p=6; /* expected rtl: (+ (+ fp -c1) -1) */
4002+ if (*p != buf[-1])
4003+ abort ();
4004+ p = 1 + buf; *p=7; /* expected rtl: (+ (+ fp -c1) 1) */
4005+ if (*p != buf[1])
4006+ abort ();
4007+ p = &buf[1] - 1; *p=8; /* expected rtl: (+ (+ fp -c1+1) -1) */
4008+ if (*p != buf[0])
4009+ abort ();
4010+
4011+ /* test big offset which is greater than the max value of signed 16 bit integer. */
4012+ p = &buf2[45555]; *p=9; /* expected rtl: (+ fp -c2+45555) */
4013+ if (*p != buf2[45555])
4014+ abort ();
4015+}
4016+
4017+int main()
4018+{
4019+ test(10);
4020+ exit(0);
4021+}
4022+
4023+
4024+
4025diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute2.c
4026--- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 1970-01-01 01:00:00.000000000 +0100
4027+++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 2003-11-22 09:44:33.000000000 +0100
4028@@ -0,0 +1,49 @@
4029+void
4030+test(int i, char *j, int k)
4031+{
4032+ int a[10];
4033+ char b;
4034+ int c;
4035+ long *d;
4036+ char buf[50];
4037+ long e[10];
4038+ int n;
4039+
4040+ a[0] = 4;
4041+ b = 5;
4042+ c = 6;
4043+ d = (long*)7;
4044+ e[0] = 8;
4045+
4046+ /* overflow buffer */
4047+ for (n = 0; n < 120; n++)
4048+ buf[n] = 0;
4049+
4050+ if (j == 0 || *j != 2)
4051+ abort ();
4052+ if (a[0] == 0)
4053+ abort ();
4054+ if (b == 0)
4055+ abort ();
4056+ if (c == 0)
4057+ abort ();
4058+ if (d == 0)
4059+ abort ();
4060+ if (e[0] == 0)
4061+ abort ();
4062+
4063+ exit (0);
4064+}
4065+
4066+int main()
4067+{
4068+ int i, k;
4069+ int j[40];
4070+ i = 1;
4071+ j[39] = 2;
4072+ k = 3;
4073+ test(i, &j[39], k);
4074+}
4075+
4076+
4077+
4078diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute.exp
4079--- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 1970-01-01 01:00:00.000000000 +0100
4080+++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 2004-06-02 13:23:36.000000000 +0200
4081@@ -0,0 +1,35 @@
4082+# Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4083+
4084+# This program is free software; you can redistribute it and/or modify
4085+# it under the terms of the GNU General Public License as published by
4086+# the Free Software Foundation; either version 2 of the License, or
4087+# (at your option) any later version.
4088+#
4089+# This program is distributed in the hope that it will be useful,
4090+# but WITHOUT ANY WARRANTY; without even the implied warranty of
4091+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4092+# GNU General Public License for more details.
4093+#
4094+# You should have received a copy of the GNU General Public License
4095+# along with this program; if not, write to the Free Software
4096+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
4097+
4098+if $tracelevel then {
4099+ strace $tracelevel
4100+}
4101+
4102+# Load support procs.
4103+load_lib c-torture.exp
4104+
4105+#
4106+# main test loop
4107+#
4108+
4109+foreach src [lsort [glob -nocomplain $srcdir/$subdir/ssp-execute*.c]] {
4110+ # If we're only testing specific files and this isn't one of them, skip it.
4111+ if ![runtest_file_p $runtests $src] then {
4112+ continue
4113+ }
4114+
4115+ c-torture-execute $src -fstack-protector
4116+}
4117diff -uNr gcc-3.4.3.orig/gcc/toplev.c gcc-3.4.3/gcc/toplev.c
4118--- gcc-3.4.3.orig/gcc/toplev.c 2004-07-26 16:42:11.000000000 +0200
4119+++ gcc-3.4.3/gcc/toplev.c 2004-11-24 18:35:31.000000000 +0100
4120@@ -79,6 +79,7 @@
4121 #include "coverage.h"
4122 #include "value-prof.h"
4123 #include "alloc-pool.h"
4124+#include "protector.h"
4125
4126 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
4127 #include "dwarf2out.h"
4128@@ -97,6 +98,10 @@
4129 declarations for e.g. AIX 4.x. */
4130 #endif
4131
4132+#ifdef STACK_PROTECTOR
4133+#include "protector.h"
4134+#endif
4135+
4136 #ifndef HAVE_conditional_execution
4137 #define HAVE_conditional_execution 0
4138 #endif
4139@@ -979,6 +984,15 @@
4140 minimum function alignment. Zero means no alignment is forced. */
4141 int force_align_functions_log;
4142
4143+#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
4144+/* Nonzero means use propolice as a stack protection method */
4145+int flag_propolice_protection = 1;
4146+int flag_stack_protection = 0;
4147+#else
4148+int flag_propolice_protection = 0;
4149+int flag_stack_protection = 0;
4150+#endif
4151+
4152 typedef struct
4153 {
4154 const char *const string;
4155@@ -1154,7 +1168,9 @@
4156 {"mem-report", &mem_report, 1 },
4157 { "trapv", &flag_trapv, 1 },
4158 { "wrapv", &flag_wrapv, 1 },
4159- { "new-ra", &flag_new_regalloc, 1 }
4160+ { "new-ra", &flag_new_regalloc, 1 },
4161+ {"stack-protector", &flag_propolice_protection, 1 },
4162+ {"stack-protector-all", &flag_stack_protection, 1 }
4163 };
4164
4165 /* Here is a table, controlled by the tm.h file, listing each -m switch
4166@@ -2686,6 +2702,9 @@
4167
4168 insns = get_insns ();
4169
4170+ if (flag_propolice_protection)
4171+ prepare_stack_protection (inlinable);
4172+
4173 /* Dump the rtl code if we are dumping rtl. */
4174
4175 if (open_dump_file (DFI_rtl, decl))
4176@@ -4483,6 +4502,12 @@
4177 /* The presence of IEEE signaling NaNs, implies all math can trap. */
4178 if (flag_signaling_nans)
4179 flag_trapping_math = 1;
4180+
4181+ /* This combination makes optimized frame addressings and causes
4182+ a internal compilation error at prepare_stack_protection.
4183+ so don't allow it. */
4184+ if (flag_stack_protection && !flag_propolice_protection)
4185+ flag_propolice_protection = TRUE;
4186 }
4187
4188 /* Initialize the compiler back end. */
4189diff -uNr gcc-3.4.3.orig/gcc/tree.h gcc-3.4.3/gcc/tree.h
4190--- gcc-3.4.3.orig/gcc/tree.h 2004-11-24 18:04:19.000000000 +0100
4191+++ gcc-3.4.3/gcc/tree.h 2004-11-24 18:35:31.000000000 +0100
4192@@ -1489,6 +1489,10 @@
4193 where it is called. */
4194 #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
4195
4196+/* In a VAR_DECL, nonzero if the declaration is copied for inlining.
4197+ The stack protector should keep its location in the stack. */
4198+#define DECL_COPIED(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
4199+
4200 /* Nonzero in a FUNCTION_DECL means that this function was declared inline,
4201 such as via the `inline' keyword in C/C++. This flag controls the linkage
4202 semantics of 'inline'; whether or not the function is inlined is
This page took 0.700182 seconds and 4 git commands to generate.