1 diff -uNr gcc-3.4.3.orig/gcc/calls.c gcc-3.4.3/gcc/calls.c
2 --- gcc-3.4.3.orig/gcc/calls.c 2004-06-24 09:26:50.000000000 +0200
3 +++ gcc-3.4.3/gcc/calls.c 2004-11-24 18:35:31.000000000 +0100
6 /* For variable-sized objects, we must be called with a target
7 specified. If we were to allocate space on the stack here,
8 - we would have no way of knowing when to free it. */
9 - rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
10 + we would have no way of knowing when to free it.
12 + This is the structure of a function return object and it isn't
13 + a character array for the stack protection, so it is
14 + marked using the assignment of the KEEP argument to 5. */
15 + rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
17 mark_temp_addr_taken (d);
18 structure_value_addr = XEXP (d, 0);
19 diff -uNr gcc-3.4.3.orig/gcc/c-cppbuiltin.c gcc-3.4.3/gcc/c-cppbuiltin.c
20 --- gcc-3.4.3.orig/gcc/c-cppbuiltin.c 2004-03-04 11:24:54.000000000 +0100
21 +++ gcc-3.4.3/gcc/c-cppbuiltin.c 2004-11-24 18:35:31.000000000 +0100
23 if (c_dialect_objc () && flag_next_runtime)
24 cpp_define (pfile, "__NEXT_RUNTIME__");
26 + /* Make the choice of the stack protector runtime visible to source code. */
27 + if (flag_propolice_protection)
28 + cpp_define (pfile, "__SSP__=1");
29 + if (flag_stack_protection)
30 + cpp_define (pfile, "__SSP_ALL__=2");
32 /* A straightforward target hook doesn't work, because of problems
33 linking that hook's body when part of non-C front ends. */
34 # define preprocessing_asm_p() (cpp_get_options (pfile)->lang == CLK_ASM)
35 diff -uNr gcc-3.4.3.orig/gcc/combine.c gcc-3.4.3/gcc/combine.c
36 --- gcc-3.4.3.orig/gcc/combine.c 2004-10-13 01:35:29.000000000 +0200
37 +++ gcc-3.4.3/gcc/combine.c 2004-11-24 18:35:31.000000000 +0100
38 @@ -1401,6 +1401,10 @@
39 && ! fixed_regs[REGNO (dest)]
40 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
42 + /* Never combine loads and stores protecting argument that use set insn
43 + with used flag on. */
44 + if (SET_VOLATILE_P (set))
49 @@ -3781,7 +3785,20 @@
50 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
51 rtx inner_op1 = XEXP (x, 1);
55 +#ifndef FRAME_GROWS_DOWNWARD
56 + /* For the case where the frame grows upward,
57 + the stack protector keeps the offset of the frame pointer
58 + positive integer. */
59 + if (flag_propolice_protection
61 + && other == frame_pointer_rtx
62 + && GET_CODE (inner_op0) == CONST_INT
63 + && GET_CODE (inner_op1) == CONST_INT
64 + && INTVAL (inner_op0) > 0
65 + && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
68 /* Make sure we pass the constant operand if any as the second
69 one if this is a commutative operation. */
70 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
71 @@ -4146,6 +4163,13 @@
72 they are now checked elsewhere. */
73 if (GET_CODE (XEXP (x, 0)) == PLUS
74 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
75 +#ifndef FRAME_GROWS_DOWNWARD
76 + /* The stack protector keeps the addressing style of a local variable
77 + to be able to change its stack position. */
78 + if (! (flag_propolice_protection
79 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
80 + && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
82 return gen_binary (PLUS, mode,
83 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
85 @@ -4273,8 +4297,14 @@
88 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
90 - if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
93 + The stack protector keeps the addressing style of
94 + a local variable. */
95 + if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
96 + && (! (flag_propolice_protection
97 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
98 + && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
99 return gen_binary (MINUS, mode,
100 gen_binary (MINUS, mode, XEXP (x, 0),
101 XEXP (XEXP (x, 1), 0)),
102 diff -uNr gcc-3.4.3.orig/gcc/common.opt gcc-3.4.3/gcc/common.opt
103 --- gcc-3.4.3.orig/gcc/common.opt 2004-11-24 18:04:19.000000000 +0100
104 +++ gcc-3.4.3/gcc/common.opt 2004-11-24 18:35:31.000000000 +0100
107 Warn when a variable is unused
111 +Warn when not issuing stack smashing protection for some reason
115 -aux-info <file> Emit declaration information into <file>
118 Put zero initialized data in the bss section
122 +Enables stack protection
124 +fstack-protector-all
126 +Enables stack protection of every function
129 Common JoinedOrMissing
130 Generate debug information in default format
131 diff -uNr gcc-3.4.3.orig/gcc/config/arm/arm.md gcc-3.4.3/gcc/config/arm/arm.md
132 --- gcc-3.4.3.orig/gcc/config/arm/arm.md 2004-08-25 17:46:19.000000000 +0200
133 +++ gcc-3.4.3/gcc/config/arm/arm.md 2004-11-24 18:35:31.000000000 +0100
134 @@ -3840,7 +3840,13 @@
135 (match_operand:DI 1 "general_operand" ""))]
141 + /* Everything except mem = const or mem = mem can be done easily */
142 + if (GET_CODE (operands[0]) == MEM)
143 + operands[1] = force_reg (DImode, operands[1]);
145 + else /* TARGET_THUMB.... */
149 diff -uNr gcc-3.4.3.orig/gcc/config/t-linux gcc-3.4.3/gcc/config/t-linux
150 --- gcc-3.4.3.orig/gcc/config/t-linux 2003-09-23 20:55:57.000000000 +0200
151 +++ gcc-3.4.3/gcc/config/t-linux 2004-11-24 18:35:31.000000000 +0100
153 # Compile crtbeginS.o and crtendS.o with pic.
154 CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
155 # Compile libgcc2.a with pic.
156 -TARGET_LIBGCC2_CFLAGS = -fPIC
157 +TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
159 # Override t-slibgcc-elf-ver to export some libgcc symbols with
160 # the symbol versions that glibc used.
161 diff -uNr gcc-3.4.3.orig/gcc/configure gcc-3.4.3/gcc/configure
162 --- gcc-3.4.3.orig/gcc/configure 2004-11-05 05:14:05.000000000 +0100
163 +++ gcc-3.4.3/gcc/configure 2004-11-24 18:44:13.000000000 +0100
164 @@ -4809,6 +4809,9 @@
171 # -------------------------
172 # Checks for other programs
173 # -------------------------
174 @@ -13036,6 +13039,7 @@
175 s,@TARGET_SYSTEM_ROOT_DEFINE@,$TARGET_SYSTEM_ROOT_DEFINE,;t t
176 s,@CROSS_SYSTEM_HEADER_DIR@,$CROSS_SYSTEM_HEADER_DIR,;t t
177 s,@onestep@,$onestep,;t t
178 +s,@ENABLESSP@,$ENABLESSP,;t t
179 s,@SET_MAKE@,$SET_MAKE,;t t
182 diff -uNr gcc-3.4.3.orig/gcc/configure.ac gcc-3.4.3/gcc/configure.ac
183 --- gcc-3.4.3.orig/gcc/configure.ac 2004-11-24 18:04:19.000000000 +0100
184 +++ gcc-3.4.3/gcc/configure.ac 2004-11-24 18:46:57.000000000 +0100
192 # -------------------------
193 # Checks for other programs
194 # -------------------------
195 diff -uNr gcc-3.4.3.orig/gcc/cse.c gcc-3.4.3/gcc/cse.c
196 --- gcc-3.4.3.orig/gcc/cse.c 2004-10-26 20:05:42.000000000 +0200
197 +++ gcc-3.4.3/gcc/cse.c 2004-11-24 18:35:31.000000000 +0100
198 @@ -4212,7 +4212,14 @@
203 +#ifndef FRAME_GROWS_DOWNWARD
204 + if (flag_propolice_protection
205 + && GET_CODE (y) == PLUS
206 + && XEXP (y, 0) == frame_pointer_rtx
207 + && INTVAL (inner_const) > 0
208 + && INTVAL (new_const) <= 0)
211 /* If we are associating shift operations, don't let this
212 produce a shift of the size of the object or larger.
213 This could occur when we follow a sign-extend by a right
214 @@ -4744,6 +4751,14 @@
215 if (SET_DEST (x) == pc_rtx
216 && GET_CODE (SET_SRC (x)) == LABEL_REF)
218 + /* cut the reg propagation of stack-protected argument. */
219 + else if (SET_VOLATILE_P (x)) {
220 + rtx x1 = SET_DEST (x);
221 + if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
222 + x1 = SUBREG_REG (x1);
223 + if (! REGNO_QTY_VALID_P(REGNO (x1)))
224 + make_new_qty (REGNO (x1), GET_MODE (x1));
227 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
228 The hard function value register is used only once, to copy to
229 diff -uNr gcc-3.4.3.orig/gcc/doc/invoke.texi gcc-3.4.3/gcc/doc/invoke.texi
230 --- gcc-3.4.3.orig/gcc/doc/invoke.texi 2004-11-24 18:04:19.000000000 +0100
231 +++ gcc-3.4.3/gcc/doc/invoke.texi 2004-11-24 18:35:32.000000000 +0100
233 -Wno-multichar -Wnonnull -Wpacked -Wpadded @gol
234 -Wparentheses -Wpointer-arith -Wredundant-decls @gol
235 -Wreturn-type -Wsequence-point -Wshadow @gol
236 --Wsign-compare -Wstrict-aliasing @gol
237 +-Wsign-compare -Wstack-protector -Wstrict-aliasing @gol
238 -Wswitch -Wswitch-default -Wswitch-enum @gol
239 -Wsystem-headers -Wtrigraphs -Wundef -Wuninitialized @gol
240 -Wunknown-pragmas -Wunreachable-code @gol
242 -fshort-double -fshort-wchar @gol
243 -fverbose-asm -fpack-struct -fstack-check @gol
244 -fstack-limit-register=@var{reg} -fstack-limit-symbol=@var{sym} @gol
245 +-fstack-protector -fstack-protector-all @gol
246 -fargument-alias -fargument-noalias @gol
247 -fargument-noalias-global -fleading-underscore @gol
248 -ftls-model=@var{model} @gol
249 @@ -3006,6 +3007,10 @@
250 complex; GCC will refuse to optimize programs when the optimization
251 itself is likely to take inordinate amounts of time.
253 +@item -Wstack-protector
254 +@opindex Wstack-protector
255 +Warn when not issuing stack smashing protection for some reason.
259 Make all warnings into errors.
260 @@ -11202,6 +11207,24 @@
261 @option{-Wl,--defsym,__stack_limit=0x7ffe0000} to enforce a stack limit
262 of 128KB@. Note that this may only work with the GNU linker.
264 +@item -fstack-protector
265 +@item -fstack-protector-all
266 +@opindex fstack-protector
267 +@opindex fstack-protector-all
268 +@opindex fno-stack-protector
269 +Generate code to protect an application from a stack smashing
270 +attack. The features are (1) the insertion of random value next to the
271 +frame pointer to detect the integrity of the stack, (2) the reordering
272 +of local variables to place buffers after pointers to avoid the
273 +corruption of pointers that could be used to further corrupt arbitrary
274 +memory locations, (3) the copying of pointers in function arguments to
275 +an area preceding local variable buffers to prevent the corruption of
276 +pointers that could be used to further corrupt arbitrary memory
277 +locations, and the (4) omission of instrumentation code from some
278 +functions to decrease the performance overhead. If the integrity
279 +would be broken, the program is aborted. If stack-protector-all is
280 +specified, instrumentation codes are generated at every functions.
282 @cindex aliasing of parameters
283 @cindex parameters, aliased
284 @item -fargument-alias
285 diff -uNr gcc-3.4.3.orig/gcc/explow.c gcc-3.4.3/gcc/explow.c
286 --- gcc-3.4.3.orig/gcc/explow.c 2004-04-03 01:05:26.000000000 +0200
287 +++ gcc-3.4.3/gcc/explow.c 2004-11-24 18:35:31.000000000 +0100
290 int all_constant = 0;
294 + && ! (flag_propolice_protection && x == virtual_stack_vars_rtx))
303 + /* For the use of stack protection, keep the frame and offset pattern
304 + even if the offset is zero. */
306 + || (flag_propolice_protection && x == virtual_stack_vars_rtx))
307 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
309 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
311 if (memory_address_p (mode, oldx))
314 + /* The stack protector keeps the addressing style of a local variable.
315 + LEGITIMIZE_ADDRESS changes the addressing to the machine-dependent
316 + style, so the protector split the frame address to a register using
318 + if (flag_propolice_protection)
320 +#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \
321 + && XEXP (X, 0) == virtual_stack_vars_rtx \
322 + && GET_CODE (XEXP (X, 1)) == CONST_INT)
324 + if (FRAMEADDR_P (x))
326 + for (y = x; y != 0 && GET_CODE (y) == PLUS; y = XEXP (y, 0))
328 + if (FRAMEADDR_P (XEXP (y, 0)))
329 + XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
330 + if (FRAMEADDR_P (XEXP (y, 1)))
331 + XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
334 /* Perform machine-dependent transformations on X
335 in certain cases. This is not necessary since the code
336 below can handle all possible cases, but machine-dependent
337 diff -uNr gcc-3.4.3.orig/gcc/expr.c gcc-3.4.3/gcc/expr.c
338 --- gcc-3.4.3.orig/gcc/expr.c 2004-05-27 21:35:17.000000000 +0200
339 +++ gcc-3.4.3/gcc/expr.c 2004-11-24 18:35:31.000000000 +0100
344 +#include "protector.h"
346 /* Decide whether a function's arguments should be processed
347 from first to last or from last to first.
348 @@ -1060,7 +1061,11 @@
350 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
351 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
355 + When the stack protector is used at the reverse move, it starts the move
356 + instruction from the address within the region of a variable.
357 + So it eliminates the first address decrement instruction. */
360 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
361 @@ -1123,6 +1128,8 @@
363 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
365 + if (flag_propolice_protection)
366 + len = len - GET_MODE_SIZE (mode);
367 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
368 data.autinc_from = 1;
369 data.explicit_inc_from = -1;
370 @@ -1137,6 +1144,8 @@
371 data.from_addr = copy_addr_to_reg (from_addr);
372 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
374 + if (flag_propolice_protection)
375 + len = len - GET_MODE_SIZE (mode);
376 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
378 data.explicit_inc_to = -1;
379 @@ -1280,11 +1289,15 @@
380 from1 = adjust_address (data->from, mode, data->offset);
382 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
383 - emit_insn (gen_add2_insn (data->to_addr,
384 - GEN_INT (-(HOST_WIDE_INT)size)));
385 + /* The stack protector skips the first address decrement instruction
386 + at the reverse move. */
387 + if (!flag_propolice_protection || data->explicit_inc_to < -1)
388 + emit_insn (gen_add2_insn (data->to_addr,
389 + GEN_INT (-(HOST_WIDE_INT)size)));
390 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
391 - emit_insn (gen_add2_insn (data->from_addr,
392 - GEN_INT (-(HOST_WIDE_INT)size)));
393 + if (!flag_propolice_protection || data->explicit_inc_from < -1)
394 + emit_insn (gen_add2_insn (data->from_addr,
395 + GEN_INT (-(HOST_WIDE_INT)size)));
398 emit_insn ((*genfun) (to1, from1));
399 @@ -2475,7 +2488,12 @@
401 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
403 - data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
404 + int len = data->len;
405 + /* The stack protector starts the store instruction from
406 + the address within the region of a variable. */
407 + if (flag_propolice_protection)
408 + len -= GET_MODE_SIZE (mode);
409 + data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
411 data->explicit_inc_to = -1;
413 @@ -2544,8 +2562,11 @@
414 to1 = adjust_address (data->to, mode, data->offset);
416 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
417 - emit_insn (gen_add2_insn (data->to_addr,
418 - GEN_INT (-(HOST_WIDE_INT) size)));
419 + /* The stack protector skips the first address decrement instruction
420 + at the reverse store. */
421 + if (!flag_propolice_protection || data->explicit_inc_to < -1)
422 + emit_insn (gen_add2_insn (data->to_addr,
423 + GEN_INT (-(HOST_WIDE_INT) size)));
425 cst = (*data->constfun) (data->constfundata, data->offset, mode);
426 emit_insn ((*genfun) (to1, cst));
427 @@ -5701,7 +5722,9 @@
428 && GET_CODE (XEXP (value, 0)) == PLUS
429 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
430 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
431 - && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
432 + && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
433 + && (!flag_propolice_protection
434 + || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
436 rtx temp = expand_simple_binop (GET_MODE (value), code,
437 XEXP (XEXP (value, 0), 0), op2,
438 diff -uNr gcc-3.4.3.orig/gcc/flags.h gcc-3.4.3/gcc/flags.h
439 --- gcc-3.4.3.orig/gcc/flags.h 2004-11-24 18:04:19.000000000 +0100
440 +++ gcc-3.4.3/gcc/flags.h 2004-11-24 18:35:31.492689688 +0100
443 extern bool warn_strict_aliasing;
445 +/* Warn when not issuing stack smashing protection for some reason. */
447 +extern bool warn_stack_protector;
449 /* Nonzero if generating code to do profiling. */
451 extern int profile_flag;
453 #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
454 (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && flag_rounding_math)
456 +/* Nonzero means use propolice as a stack protection method. */
458 +extern int flag_propolice_protection;
460 +/* Nonzero means use a stack protection method for every function. */
462 +extern int flag_stack_protection;
464 #endif /* ! GCC_FLAGS_H */
465 diff -uNr gcc-3.4.3.orig/gcc/function.c gcc-3.4.3/gcc/function.c
466 --- gcc-3.4.3.orig/gcc/function.c 2004-10-14 01:18:13.000000000 +0200
467 +++ gcc-3.4.3/gcc/function.c 2004-11-24 18:35:31.542682088 +0100
469 #include "integrate.h"
470 #include "langhooks.h"
472 +#include "protector.h"
474 #ifndef TRAMPOLINE_ALIGNMENT
475 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
477 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
479 static GTY(()) varray_type sibcall_epilogue;
481 +/* Current boundary mark for character arrays. */
482 +static int temp_boundary_mark = 0;
485 /* In order to evaluate some expressions, such as function calls returning
486 structures in memory, we need to temporarily allocate stack locations.
488 /* The size of the slot, including extra space for alignment. This
489 info is for combine_temp_slots. */
490 HOST_WIDE_INT full_size;
491 + /* Boundary mark of a character array and the others. This info is for propolice. */
495 /* This structure is used to record MEMs or pseudos used to replace VAR, any
497 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
498 if we are to allocate something at an inner level to be treated as
499 a variable in the block (e.g., a SAVE_EXPR).
500 + KEEP is 5 if we allocate a place to return structure.
502 TYPE is the type that will be used for the stack slot. */
506 struct temp_slot *p, *best_p = 0;
508 + int char_array = (flag_propolice_protection
509 + && keep == 1 && search_string_def (type));
511 /* If SIZE is -1 it means that somebody tried to allocate a temporary
512 of a variable size. */
515 && objects_must_conflict_p (p->type, type)
516 && (best_p == 0 || best_p->size > p->size
517 - || (best_p->size == p->size && best_p->align > p->align)))
518 + || (best_p->size == p->size && best_p->align > p->align))
519 + && (! char_array || p->boundary_mark != 0))
521 if (p->align == align && p->size == size)
526 p->type = best_p->type;
527 + p->boundary_mark = best_p->boundary_mark;
528 p->next = temp_slots;
532 p->full_size = frame_offset - frame_offset_old;
535 + p->boundary_mark = char_array ? ++temp_boundary_mark : 0;
536 p->next = temp_slots;
539 @@ -932,14 +945,16 @@
541 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
543 - if (p->base_offset + p->full_size == q->base_offset)
544 + if (p->base_offset + p->full_size == q->base_offset &&
545 + p->boundary_mark == q->boundary_mark)
547 /* Q comes after P; combine Q into P. */
549 p->full_size += q->full_size;
552 - else if (q->base_offset + q->full_size == p->base_offset)
553 + else if (q->base_offset + q->full_size == p->base_offset &&
554 + p->boundary_mark == q->boundary_mark)
556 /* P comes after Q; combine P into Q. */
558 @@ -1449,7 +1464,9 @@
562 - new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
564 + assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func)
565 + : assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
568 PUT_MODE (reg, decl_mode);
569 @@ -3937,10 +3954,13 @@
572 /* Otherwise copy the new constant into a register and replace
573 - constant with that register. */
574 + constant with that register.
575 + At the use of stack protection, stop to replace the frame
576 + offset with a register. */
577 temp = gen_reg_rtx (Pmode);
579 - if (validate_change (object, &XEXP (x, 1), temp, 0))
580 + if (validate_change (object, &XEXP (x, 1), temp, 0)
581 + && !flag_propolice_protection)
582 emit_insn_before (gen_move_insn (temp, new_offset), object);
585 diff -uNr gcc-3.4.3.orig/gcc/gcse.c gcc-3.4.3/gcc/gcse.c
586 --- gcc-3.4.3.orig/gcc/gcse.c 2004-10-30 20:02:53.000000000 +0200
587 +++ gcc-3.4.3/gcc/gcse.c 2004-11-24 18:35:31.583675856 +0100
588 @@ -4176,9 +4176,13 @@
591 /* Find an assignment that sets reg_used and is available
592 - at the start of the block. */
593 + at the start of the block.
595 + Skip the copy propagation not to eliminate the register that is
596 + the duplicated pointer of a function argument. It is used for
597 + the function argument protection. */
598 set = find_avail_set (regno, insn);
600 + if (! set || SET_VOLATILE_P (set->expr))
604 diff -uNr gcc-3.4.3.orig/gcc/integrate.c gcc-3.4.3/gcc/integrate.c
605 --- gcc-3.4.3.orig/gcc/integrate.c 2004-01-24 00:36:00.000000000 +0100
606 +++ gcc-3.4.3/gcc/integrate.c 2004-11-24 18:35:31.603672816 +0100
608 /* These args would always appear unused, if not for this. */
609 TREE_USED (copy) = 1;
611 + /* The inlined variable is marked as INLINE not to change the location
612 + by stack protector. */
613 + if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
614 + DECL_COPIED (copy) = 1;
616 /* Set the context for the new declaration. */
617 if (!DECL_CONTEXT (decl))
618 /* Globals stay global. */
619 @@ -1970,6 +1975,12 @@
623 +#ifdef ARGS_GROWS_DOWNWARD
624 + /* Mark this pointer as the top of the argument
625 + block. The pointer minus one is in the block. */
626 + if (flag_propolice_protection && GET_CODE (seq) == SET)
627 + RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
629 emit_insn_after (seq, map->insns_at_start);
632 diff -uNr gcc-3.4.3.orig/gcc/libgcc2.c gcc-3.4.3/gcc/libgcc2.c
633 --- gcc-3.4.3.orig/gcc/libgcc2.c 2004-09-26 22:47:14.000000000 +0200
634 +++ gcc-3.4.3/gcc/libgcc2.c 2004-11-24 18:35:31.627669168 +0100
635 @@ -1678,3 +1678,124 @@
636 #endif /* no INIT_SECTION_ASM_OP and not CTOR_LISTS_DEFINED_EXTERNALLY */
640 +#ifdef L_stack_smash_handler
641 +#ifndef _LIBC_PROVIDES_SSP_
647 +#ifdef _POSIX_SOURCE
651 +#if defined(HAVE_SYSLOG)
652 +#include <sys/types.h>
653 +#include <sys/socket.h>
656 +#include <sys/syslog.h>
658 +#define _PATH_LOG "/dev/log"
662 +long __guard[8] = {0, 0, 0, 0, 0, 0, 0, 0};
663 +static void __guard_setup (void) __attribute__ ((constructor));
666 +__guard_setup (void)
669 + if (__guard[0] != 0)
671 + fd = open ("/dev/urandom", 0);
673 + ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
675 + if (size == sizeof(__guard))
678 + /* If a random generator can't be used, the protector switches the guard
679 + to the "terminator canary". */
680 + ((char*)__guard)[0] = 0;
681 + ((char*)__guard)[1] = 0;
682 + ((char*)__guard)[2] = '\n';
683 + ((char*)__guard)[3] = 255;
686 +extern void __stack_smash_handler (char func[], ATTRIBUTE_UNUSED int damaged);
688 +__stack_smash_handler (char func[], ATTRIBUTE_UNUSED int damaged)
690 +#if defined (__GNU_LIBRARY__)
691 + extern char * __progname;
693 + const char message[] = ": stack smashing attack in function ";
694 + int bufsz = 256, len;
696 +#if defined(HAVE_SYSLOG)
698 + struct sockaddr_un sys_log_addr; /* AF_UNIX address of local logger. */
700 +#ifdef _POSIX_SOURCE
703 + sigfillset (&mask);
704 + /* Block all signal handlers except SIGABRT. */
705 + sigdelset (&mask, SIGABRT);
706 + sigprocmask (SIG_BLOCK, &mask, NULL);
710 + /* send LOG_CRIT. */
711 + strcpy (buf, "<2>"); len=3;
712 +#if defined (__GNU_LIBRARY__)
713 + strncat (buf, __progname, bufsz - len - 1);
714 + len = strlen (buf);
718 + strncat (buf, message, bufsz - len - 1);
719 + len = strlen (buf);
723 + strncat (buf, func, bufsz - len - 1);
724 + len = strlen (buf);
727 + /* Print error message. */
728 + write (STDERR_FILENO, buf + 3, len - 3);
729 +#if defined(HAVE_SYSLOG)
730 + if ((log_file = socket (AF_UNIX, SOCK_DGRAM, 0)) != -1)
733 + /* Send "found" message to the "/dev/log" path. */
734 + sys_log_addr.sun_family = AF_UNIX;
735 + (void)strncpy (sys_log_addr.sun_path, _PATH_LOG,
736 + sizeof (sys_log_addr.sun_path) - 1);
737 + sys_log_addr.sun_path[sizeof (sys_log_addr.sun_path) - 1] = '\0';
738 + sendto(log_file, buf, len, 0, (struct sockaddr *)&sys_log_addr,
739 + sizeof (sys_log_addr));
743 +#ifdef _POSIX_SOURCE
745 + /* Make sure the default handler is associated with SIGABRT. */
746 + struct sigaction sa;
748 + memset (&sa, 0, sizeof(struct sigaction));
749 + sigfillset (&sa.sa_mask); /* Block all signals. */
751 + sa.sa_handler = SIG_DFL;
752 + sigaction (SIGABRT, &sa, NULL);
753 + (void)kill (getpid(), SIGABRT);
758 +#endif /* _LIBC_PROVIDES_SSP_ */
759 +#endif /* L_stack_smash_handler */
760 diff -uNr gcc-3.4.3.orig/gcc/libgcc-std.ver gcc-3.4.3/gcc/libgcc-std.ver
761 --- gcc-3.4.3.orig/gcc/libgcc-std.ver 2004-09-01 21:14:33.000000000 +0200
762 +++ gcc-3.4.3/gcc/libgcc-std.ver 2004-11-24 18:35:31.620670232 +0100
764 _Unwind_SjLj_RaiseException
765 _Unwind_SjLj_ForcedUnwind
768 +%if !defined(_LIBC_PROVIDES_SSP_)
769 + # stack smash handler symbols
771 + __stack_smash_handler
775 %inherit GCC_3.3 GCC_3.0
776 diff -uNr gcc-3.4.3.orig/gcc/loop.c gcc-3.4.3/gcc/loop.c
777 --- gcc-3.4.3.orig/gcc/loop.c 2004-07-13 17:29:08.000000000 +0200
778 +++ gcc-3.4.3/gcc/loop.c 2004-11-24 18:35:31.680661112 +0100
779 @@ -6514,6 +6514,14 @@
780 if (GET_CODE (*mult_val) == USE)
781 *mult_val = XEXP (*mult_val, 0);
783 +#ifndef FRAME_GROWS_DOWNWARD
784 + if (flag_propolice_protection
785 + && GET_CODE (*add_val) == PLUS
786 + && (XEXP (*add_val, 0) == frame_pointer_rtx
787 + || XEXP (*add_val, 1) == frame_pointer_rtx))
792 *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
794 diff -uNr gcc-3.4.3.orig/gcc/Makefile.in gcc-3.4.3/gcc/Makefile.in
795 --- gcc-3.4.3.orig/gcc/Makefile.in 2004-11-24 18:04:18.000000000 +0100
796 +++ gcc-3.4.3/gcc/Makefile.in 2004-11-24 18:35:31.038758696 +0100
798 sibcall.o simplify-rtx.o sreal.o stmt.o stor-layout.o stringpool.o \
799 targhooks.o timevar.o toplev.o tracer.o tree.o tree-dump.o unroll.o \
800 varasm.o varray.o version.o vmsdbgout.o xcoffout.o alloc-pool.o \
801 - et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o
802 + et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o protector.o
804 OBJS-md = $(out_object_file)
805 OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) hashtable.o tree-inline.o \
806 @@ -1549,7 +1549,7 @@
807 langhooks.h insn-flags.h cfglayout.h real.h cfgloop.h \
808 hosthooks.h $(LANGHOOKS_DEF_H) cgraph.h $(COVERAGE_H) alloc-pool.h
809 $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) \
810 - -DTARGET_NAME=\"$(target_noncanonical)\" \
811 + -DTARGET_NAME=\"$(target_noncanonical)\" @ENABLESSP@ \
812 -c $(srcdir)/toplev.c $(OUTPUT_OPTION)
813 main.o : main.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) toplev.h
815 @@ -1852,6 +1852,10 @@
816 params.o : params.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(PARAMS_H) toplev.h
817 hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
818 pretty-print.o: $(CONFIG_H) $(SYSTEM_H) pretty-print.c $(PRETTY_PRINT_H)
819 +protector.o : protector.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
820 + flags.h function.h $(EXPR_H) $(OPTABS_H) $(REGS_H) toplev.h hard-reg-set.h \
821 + insn-config.h insn-flags.h $(RECOG_H) output.h toplev.h except.h reload.h \
822 + $(TM_P_H) conditions.h $(INSN_ATTR_H) real.h protector.h
824 $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) $(GGC_H) \
825 $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
826 diff -uNr gcc-3.4.3.orig/gcc/mklibgcc.in gcc-3.4.3/gcc/mklibgcc.in
827 --- gcc-3.4.3.orig/gcc/mklibgcc.in 2004-10-18 18:00:43.000000000 +0200
828 +++ gcc-3.4.3/gcc/mklibgcc.in 2004-11-24 18:35:31.699658224 +0100
830 _enable_execute_stack _trampoline __main _absvsi2 _absvdi2 _addvsi3
831 _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
832 _ffssi2 _ffsdi2 _clz _clzsi2 _clzdi2 _ctzsi2 _ctzdi2 _popcount_tab
833 - _popcountsi2 _popcountdi2 _paritysi2 _paritydi2'
834 + _popcountsi2 _popcountdi2 _paritysi2 _paritydi2 _stack_smash_handler'
836 # Disable SHLIB_LINK if shared libgcc not enabled.
837 if [ "@enable_shared@" = "no" ]; then
838 diff -uNr gcc-3.4.3.orig/gcc/optabs.c gcc-3.4.3/gcc/optabs.c
839 --- gcc-3.4.3.orig/gcc/optabs.c 2004-03-03 01:45:01.000000000 +0100
840 +++ gcc-3.4.3/gcc/optabs.c 2004-11-24 18:35:31.739652144 +0100
843 target = protect_from_queue (target, 1);
845 + /* Keep the frame and offset pattern at the use of stack protection. */
846 + if (flag_propolice_protection
847 + && binoptab->code == PLUS
848 + && op0 == virtual_stack_vars_rtx
849 + && GET_CODE(op1) == CONST_INT)
851 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
855 + temp = gen_reg_rtx (mode);
857 + if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
858 + || GET_CODE (temp) != REG)
859 + temp = gen_reg_rtx (mode);
861 + emit_insn (gen_rtx_SET (VOIDmode, temp,
862 + gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
868 /* Load duplicate non-volatile operands once. */
869 diff -uNr gcc-3.4.3.orig/gcc/opts.c gcc-3.4.3/gcc/opts.c
870 --- gcc-3.4.3.orig/gcc/opts.c 2004-11-24 18:04:19.000000000 +0100
871 +++ gcc-3.4.3/gcc/opts.c 2004-11-24 18:35:31.762648648 +0100
873 bool warn_unused_variable;
874 bool warn_unused_value;
876 +/* Warn when not issuing stack smashing protection for some reason */
877 +bool warn_stack_protector;
879 /* Hack for cooperation between set_Wunused and set_Wextra. */
880 static bool maybe_warn_unused_parameter;
883 warn_unused_variable = value;
886 + case OPT_Wstack_protector:
887 + warn_stack_protector = value;
892 aux_info_file_name = arg;
893 @@ -1367,6 +1374,14 @@
894 stack_limit_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (arg));
897 + case OPT_fstack_protector:
898 + flag_propolice_protection = value;
901 + case OPT_fstack_protector_all:
902 + flag_stack_protection = value;
905 case OPT_fstrength_reduce:
906 flag_strength_reduce = value;
908 diff -uNr gcc-3.4.3.orig/gcc/protector.c gcc-3.4.3/gcc/protector.c
909 --- gcc-3.4.3.orig/gcc/protector.c 1970-01-01 01:00:00.000000000 +0100
910 +++ gcc-3.4.3/gcc/protector.c 2004-09-02 11:36:11.000000000 +0200
912 +/* RTL buffer overflow protection function for GNU C compiler
913 + Copyright (C) 2003 Free Software Foundation, Inc.
915 +This file is part of GCC.
917 +GCC is free software; you can redistribute it and/or modify it under
918 +the terms of the GNU General Public License as published by the Free
919 +Software Foundation; either version 2, or (at your option) any later
922 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
923 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
924 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
927 +You should have received a copy of the GNU General Public License
928 +along with GCC; see the file COPYING. If not, write to the Free
929 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
932 +/* This file contains several memory arrangement functions to protect
933 + the return address and the frame pointer of the stack
934 + from a stack-smashing attack. It also
935 + provides the function that protects pointer variables. */
939 +#include "coretypes.h"
941 +#include "machmode.h"
947 +#include "insn-config.h"
948 +#include "insn-flags.h"
952 +#include "hard-reg-set.h"
954 +#include "function.h"
957 +#include "conditions.h"
958 +#include "insn-attr.h"
961 +#include "protector.h"
964 +/* Round a value to the lowest integer less than it that is a multiple of
965 + the required alignment. Avoid using division in case the value is
966 + negative. Assume the alignment is a power of two. */
967 +#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
969 +/* Similar, but round to the next highest integer that meets the
971 +#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
974 +/* Nonzero if function being compiled can define string buffers that may be
975 + damaged by the stack-smash attack. */
976 +static int current_function_defines_vulnerable_string;
977 +static int current_function_defines_short_string;
978 +static int current_function_has_variable_string;
979 +static int current_function_defines_vsized_array;
980 +static int current_function_is_inlinable;
982 +/* Nonzero if search_string_def finds the variable which contains an array. */
983 +static int is_array;
985 +/* Nonzero if search_string_def finds a byte-pointer variable,
986 + which may be assigned to alloca output. */
987 +static int may_have_alloca_pointer;
989 +static rtx guard_area, _guard;
990 +static rtx function_first_insn, prologue_insert_point;
992 +/* Offset to end of sweeped area for gathering character arrays. */
993 +static HOST_WIDE_INT sweep_frame_offset;
995 +/* Offset to end of allocated area for instantiating pseudo registers. */
996 +static HOST_WIDE_INT push_allocated_offset = 0;
998 +/* Offset to end of assigned area for instantiating pseudo registers. */
999 +static HOST_WIDE_INT push_frame_offset = 0;
1001 +/* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
1002 + which stage assign_stack_local_for_pseudo_reg is called from. */
1003 +static int saved_cse_not_expected = 0;
1005 +static int search_string_from_argsandvars (int);
1006 +static int search_string_from_local_vars (tree);
1007 +static int search_pointer_def (tree);
1008 +static int search_func_pointer (tree);
1009 +static int check_used_flag (rtx);
1010 +static void reset_used_flags_for_insns (rtx);
1011 +static void reset_used_flags_for_decls (tree);
1012 +static void reset_used_flags_of_plus (rtx);
1013 +static void rtl_prologue (rtx);
1014 +static void rtl_epilogue (rtx);
1015 +static void arrange_var_order (tree);
1016 +static void copy_args_for_protection (void);
1017 +static void sweep_string_variable (rtx, HOST_WIDE_INT);
1018 +static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1019 +static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
1020 +static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
1021 +static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
1022 +static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
1023 +static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
1024 +static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
1025 +static void validate_insns_of_varrefs (rtx);
1026 +static void validate_operand_of_varrefs (rtx, rtx *);
1028 +/* Specify which size of buffers should be protected from a stack smashing
1029 + attack. Because small buffers are not used in situations which may
1030 + overflow buffer, the default size sets to the size of 64 bit register. */
1031 +#ifndef SUSPICIOUS_BUF_SIZE
1032 +#define SUSPICIOUS_BUF_SIZE 8
1035 +#define AUTO_BASEPTR(X) \
1036 + (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
1037 +#define AUTO_OFFSET(X) \
1038 + (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
1039 +#undef PARM_PASSED_IN_MEMORY
1040 +#define PARM_PASSED_IN_MEMORY(PARM) \
1041 + (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
1042 +#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
1044 +/* Argument values for calling search_string_from_argsandvars. */
1045 +#define CALL_FROM_PREPARE_STACK_PROTECTION 0
1046 +#define CALL_FROM_PUSH_FRAME 1
1049 +/* Prepare several stack protection instruments for the current function
1050 + if the function has an array as a local variable, which may be vulnerable
1051 + from a stack smashing attack, and it is not inlinable.
1053 + The overall steps are as follows;
1054 + (1)search an array,
1055 + (2)insert guard_area on the stack,
1056 + (3)duplicate pointer arguments into local variables, and
1057 + (4)arrange the location of local variables. */
1059 +prepare_stack_protection (int inlinable)
1061 + tree blocks = DECL_INITIAL (current_function_decl);
1062 + current_function_is_inlinable = inlinable && !flag_no_inline;
1063 + push_frame_offset = push_allocated_offset = 0;
1064 + saved_cse_not_expected = 0;
1066 + /* Skip the protection if the function has no block
1067 + or it is an inline function. */
1068 + if (current_function_is_inlinable)
1069 + validate_insns_of_varrefs (get_insns ());
1070 + if (! blocks || current_function_is_inlinable)
1073 + current_function_defines_vulnerable_string
1074 + = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
1076 + if (current_function_defines_vulnerable_string
1077 + || flag_stack_protection)
1079 + function_first_insn = get_insns ();
1081 + if (current_function_contains_functions)
1083 + if (warn_stack_protector)
1084 + warning ("not protecting function: it contains functions");
1088 + /* Initialize recognition, indicating that volatile is OK. */
1091 + sweep_frame_offset = 0;
1093 +#ifdef STACK_GROWS_DOWNWARD
1094 + /* frame_offset: offset to end of allocated area of stack frame.
1095 + It is defined in the function.c. */
1097 + /* the location must be before buffers. */
1098 + guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
1099 + PUT_MODE (guard_area, GUARD_m);
1100 + MEM_VOLATILE_P (guard_area) = 1;
1102 +#ifndef FRAME_GROWS_DOWNWARD
1103 + sweep_frame_offset = frame_offset;
1106 + /* For making room for guard value, scan all insns and fix the offset
1107 + address of the variable that is based on frame pointer.
1108 + Scan all declarations of variables and fix the offset address
1109 + of the variable that is based on the frame pointer. */
1110 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
1113 + /* the location of guard area moves to the beginning of stack frame. */
1114 + if (AUTO_OFFSET(XEXP (guard_area, 0)))
1115 + XEXP (XEXP (guard_area, 0), 1)
1116 + = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
1119 + /* Insert prologue rtl instructions. */
1120 + rtl_prologue (function_first_insn);
1122 + if (! current_function_has_variable_string)
1124 + /* Generate argument saving instruction. */
1125 + copy_args_for_protection ();
1127 +#ifndef FRAME_GROWS_DOWNWARD
1128 + /* If frame grows upward, character arrays for protecting args
1129 + may copy to the top of the guard variable.
1130 + So sweep the guard variable again. */
1131 + sweep_frame_offset = CEIL_ROUND (frame_offset,
1132 + BIGGEST_ALIGNMENT / BITS_PER_UNIT);
1133 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
1136 + /* Variable can't be protected from the overflow of variable length
1137 + buffer. But variable reordering is still effective against
1138 + the overflow of fixed size character arrays. */
1139 + else if (warn_stack_protector)
1140 + warning ("not protecting variables: it has a variable length buffer");
1142 +#ifndef FRAME_GROWS_DOWNWARD
1143 + if (STARTING_FRAME_OFFSET == 0)
1145 + /* This part may be only for alpha. */
1146 + push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1147 + assign_stack_local (BLKmode, push_allocated_offset, -1);
1148 + sweep_frame_offset = frame_offset;
1149 + sweep_string_variable (const0_rtx, -push_allocated_offset);
1150 + sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
1154 + /* Arrange the order of local variables. */
1155 + arrange_var_order (blocks);
1157 +#ifdef STACK_GROWS_DOWNWARD
1158 + /* Insert epilogue rtl instructions. */
1159 + rtl_epilogue (get_last_insn ());
1161 + init_recog_no_volatile ();
1163 + else if (current_function_defines_short_string
1164 + && warn_stack_protector)
1165 + warning ("not protecting function: buffer is less than %d bytes long",
1166 + SUSPICIOUS_BUF_SIZE);
1170 + Search string from arguments and local variables.
1171 + caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
1172 + CALL_FROM_PUSH_FRAME (1)
1175 +search_string_from_argsandvars (int caller)
1177 + tree blocks, parms;
1180 + /* Saves a latest search result as a cached infomation. */
1181 + static tree __latest_search_decl = 0;
1182 + static int __latest_search_result = FALSE;
1184 + if (__latest_search_decl == current_function_decl)
1185 + return __latest_search_result;
1187 + if (caller == CALL_FROM_PUSH_FRAME)
1190 + __latest_search_decl = current_function_decl;
1191 + __latest_search_result = TRUE;
1193 + current_function_defines_short_string = FALSE;
1194 + current_function_has_variable_string = FALSE;
1195 + current_function_defines_vsized_array = FALSE;
1196 + may_have_alloca_pointer = FALSE;
1198 + /* Search a string variable from local variables. */
1199 + blocks = DECL_INITIAL (current_function_decl);
1200 + string_p = search_string_from_local_vars (blocks);
1202 + if (! current_function_defines_vsized_array
1203 + && may_have_alloca_pointer
1204 + && current_function_calls_alloca)
1206 + current_function_has_variable_string = TRUE;
1213 +#ifdef STACK_GROWS_DOWNWARD
1214 + /* Search a string variable from arguments. */
1215 + parms = DECL_ARGUMENTS (current_function_decl);
1217 + for (; parms; parms = TREE_CHAIN (parms))
1218 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1220 + if (PARM_PASSED_IN_MEMORY (parms))
1222 + string_p = search_string_def (TREE_TYPE(parms));
1229 + __latest_search_result = FALSE;
1234 +/* Search string from local variables in the specified scope. */
1236 +search_string_from_local_vars (tree block)
1239 + int found = FALSE;
1241 + while (block && TREE_CODE(block)==BLOCK)
1243 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1245 + /* Skip the declaration that refers an external variable. */
1246 + /* name: types.decl.name.identifier.id */
1247 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1248 + && TREE_CODE (types) == VAR_DECL
1249 + && ! DECL_ARTIFICIAL (types)
1250 + && DECL_RTL_SET_P (types)
1251 + && GET_CODE (DECL_RTL (types)) == MEM
1253 + && search_string_def (TREE_TYPE (types)))
1255 + rtx home = DECL_RTL (types);
1257 + if (GET_CODE (home) == MEM
1258 + && (GET_CODE (XEXP (home, 0)) == MEM
1259 + || (GET_CODE (XEXP (home, 0)) == REG
1260 + && XEXP (home, 0) != virtual_stack_vars_rtx
1261 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1262 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1263 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1264 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1267 + /* If the value is indirect by memory or by a register
1268 + that isn't the frame pointer then it means the object is
1269 + variable-sized and address through
1270 + that register or stack slot.
1271 + The protection has no way to hide pointer variables
1272 + behind the array, so all we can do is staying
1273 + the order of variables and arguments. */
1275 + current_function_has_variable_string = TRUE;
1278 + /* Found character array. */
1283 + if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
1288 + block = BLOCK_CHAIN (block);
1295 +/* Search a character array from the specified type tree. */
1297 +search_string_def (tree type)
1304 + switch (TREE_CODE (type))
1307 + /* Check if the array is a variable-sized array. */
1308 + if (TYPE_DOMAIN (type) == 0
1309 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1310 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1311 + current_function_defines_vsized_array = TRUE;
1313 + /* Check if the array is related to char array. */
1314 + if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1315 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1316 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1318 + /* Check if the string is a variable string. */
1319 + if (TYPE_DOMAIN (type) == 0
1320 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1321 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1324 + /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
1325 + if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1326 + && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
1327 + >= SUSPICIOUS_BUF_SIZE))
1330 + current_function_defines_short_string = TRUE;
1333 + /* to protect every functions, sweep any arrays to the frame top. */
1336 + return search_string_def(TREE_TYPE(type));
1339 + case QUAL_UNION_TYPE:
1341 + /* Check if each field has character arrays. */
1342 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1344 + /* Omit here local type decls until we know how to support them. */
1345 + if ((TREE_CODE (tem) == TYPE_DECL)
1346 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1349 + if (search_string_def(TREE_TYPE(tem)))
1354 + case POINTER_TYPE:
1355 + /* Check if pointer variables, which may be a pointer assigned
1356 + by alloca function call, are declared. */
1357 + if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1358 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1359 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1360 + may_have_alloca_pointer = TRUE;
1363 + case REFERENCE_TYPE:
1373 +/* Examine whether the input contains frame pointer addressing. */
1375 +contains_fp (rtx op)
1377 + enum rtx_code code;
1386 + code = GET_CODE (x);
1391 + case CONST_DOUBLE:
1400 + /* This case is not generated at the stack protection.
1401 + see plus_constant_wide and simplify_plus_minus function. */
1402 + if (XEXP (x, 0) == virtual_stack_vars_rtx)
1406 + if (XEXP (x, 0) == virtual_stack_vars_rtx
1407 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
1414 + /* Scan all subexpressions. */
1415 + fmt = GET_RTX_FORMAT (code);
1416 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1419 + if (contains_fp (XEXP (x, i)))
1422 + else if (*fmt == 'E')
1423 + for (j = 0; j < XVECLEN (x, i); j++)
1424 + if (contains_fp (XVECEXP (x, i, j)))
1431 +/* Examine whether the input contains any pointer. */
1433 +search_pointer_def (tree type)
1440 + switch (TREE_CODE (type))
1443 + case QUAL_UNION_TYPE:
1445 + /* Check if each field has a pointer. */
1446 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1448 + if ((TREE_CODE (tem) == TYPE_DECL)
1449 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1452 + if (search_pointer_def (TREE_TYPE(tem)))
1458 + return search_pointer_def (TREE_TYPE(type));
1460 + case POINTER_TYPE:
1461 + case REFERENCE_TYPE:
1463 + if (TYPE_READONLY (TREE_TYPE (type)))
1465 + /* If this pointer contains function pointer,
1466 + it should be protected. */
1467 + return search_func_pointer (TREE_TYPE (type));
1479 +/* Examine whether the input contains function pointer. */
1481 +search_func_pointer (tree type)
1488 + switch (TREE_CODE (type))
1491 + case QUAL_UNION_TYPE:
1493 + if (! TREE_VISITED (type))
1495 + /* Mark the type as having been visited already. */
1496 + TREE_VISITED (type) = 1;
1498 + /* Check if each field has a function pointer. */
1499 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1501 + if (TREE_CODE (tem) == FIELD_DECL
1502 + && search_func_pointer (TREE_TYPE(tem)))
1504 + TREE_VISITED (type) = 0;
1509 + TREE_VISITED (type) = 0;
1514 + return search_func_pointer (TREE_TYPE(type));
1516 + case POINTER_TYPE:
1517 + case REFERENCE_TYPE:
1519 + if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
1521 + return search_func_pointer (TREE_TYPE(type));
1531 +/* Check whether the specified rtx contains PLUS rtx with used flag. */
1533 +check_used_flag (rtx x)
1535 + register int i, j;
1536 + register enum rtx_code code;
1537 + register const char *format_ptr;
1542 + code = GET_CODE (x);
1549 + case CONST_DOUBLE:
1564 + format_ptr = GET_RTX_FORMAT (code);
1565 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1567 + switch (*format_ptr++)
1570 + if (check_used_flag (XEXP (x, i)))
1575 + for (j = 0; j < XVECLEN (x, i); j++)
1576 + if (check_used_flag (XVECEXP (x, i, j)))
1586 +/* Reset used flag of every insns after the spcecified insn. */
1588 +reset_used_flags_for_insns (rtx insn)
1591 + enum rtx_code code;
1592 + const char *format_ptr;
1594 + for (; insn; insn = NEXT_INSN (insn))
1595 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1596 + || GET_CODE (insn) == CALL_INSN)
1598 + code = GET_CODE (insn);
1600 + format_ptr = GET_RTX_FORMAT (code);
1602 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1604 + switch (*format_ptr++)
1607 + reset_used_flags_of_plus (XEXP (insn, i));
1611 + for (j = 0; j < XVECLEN (insn, i); j++)
1612 + reset_used_flags_of_plus (XVECEXP (insn, i, j));
1620 +/* Reset used flag of every variables in the specified block. */
1622 +reset_used_flags_for_decls (tree block)
1627 + while (block && TREE_CODE(block)==BLOCK)
1629 + types = BLOCK_VARS(block);
1631 + for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1633 + /* Skip the declaration that refers an external variable and
1634 + also skip an global variable. */
1635 + if (! DECL_EXTERNAL (types))
1637 + if (! DECL_RTL_SET_P (types))
1639 + home = DECL_RTL (types);
1641 + if (GET_CODE (home) == MEM
1642 + && GET_CODE (XEXP (home, 0)) == PLUS
1643 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1645 + XEXP (home, 0)->used = 0;
1650 + reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
1652 + block = BLOCK_CHAIN (block);
1657 +/* Reset the used flag of every PLUS rtx derived from the specified rtx. */
1659 +reset_used_flags_of_plus (rtx x)
1662 + enum rtx_code code;
1663 + const char *format_ptr;
1668 + code = GET_CODE (x);
1672 + /* These types may be freely shared so we needn't do any resetting
1677 + case CONST_DOUBLE:
1690 + /* The chain of insns is not being copied. */
1697 + case CALL_PLACEHOLDER:
1698 + reset_used_flags_for_insns (XEXP (x, 0));
1699 + reset_used_flags_for_insns (XEXP (x, 1));
1700 + reset_used_flags_for_insns (XEXP (x, 2));
1707 + format_ptr = GET_RTX_FORMAT (code);
1708 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1710 + switch (*format_ptr++)
1713 + reset_used_flags_of_plus (XEXP (x, i));
1717 + for (j = 0; j < XVECLEN (x, i); j++)
1718 + reset_used_flags_of_plus (XVECEXP (x, i, j));
1725 +/* Generate the prologue insns of the protector into the specified insn. */
1727 +rtl_prologue (rtx insn)
1729 +#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
1730 +#undef HAS_INIT_SECTION
1731 +#define HAS_INIT_SECTION
1736 + for (; insn; insn = NEXT_INSN (insn))
1737 + if (GET_CODE (insn) == NOTE
1738 + && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1741 +#if !defined (HAS_INIT_SECTION)
1742 + /* If this function is `main', skip a call to `__main'
1743 + to run guard instruments after global initializers, etc. */
1744 + if (DECL_NAME (current_function_decl)
1745 + && MAIN_NAME_P (DECL_NAME (current_function_decl))
1746 + && DECL_CONTEXT (current_function_decl) == NULL_TREE)
1748 + rtx fbinsn = insn;
1749 + for (; insn; insn = NEXT_INSN (insn))
1750 + if (GET_CODE (insn) == NOTE
1751 + && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
1758 + /* Mark the next insn of FUNCTION_BEG insn. */
1759 + prologue_insert_point = NEXT_INSN (insn);
1761 + start_sequence ();
1763 + _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
1764 + emit_move_insn ( guard_area, _guard);
1766 + _val = get_insns ();
1769 + emit_insn_before (_val, prologue_insert_point);
1773 +/* Generate the epilogue insns of the protector into the specified insn. */
1775 +rtl_epilogue (rtx insn)
1777 + rtx if_false_label;
1781 + int flag_have_return = FALSE;
1783 + start_sequence ();
1789 + return_label = gen_label_rtx ();
1791 + for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
1792 + if (GET_CODE (insn) == JUMP_INSN
1793 + && GET_CODE (PATTERN (insn)) == RETURN
1794 + && GET_MODE (PATTERN (insn)) == VOIDmode)
1796 + rtx pat = gen_rtx_SET (VOIDmode,
1798 + gen_rtx_LABEL_REF (VOIDmode,
1800 + PATTERN (insn) = pat;
1801 + flag_have_return = TRUE;
1805 + emit_label (return_label);
1809 + /* if (guard_area != _guard) */
1810 + compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
1812 + if_false_label = gen_label_rtx (); /* { */
1813 + emit_jump_insn ( gen_beq(if_false_label));
1815 + /* generate string for the current function name */
1816 + funcstr = build_string (strlen(current_function_name ())+1,
1817 + current_function_name ());
1818 + TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
1819 + funcname = output_constant_def (funcstr, 1);
1821 + emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
1823 + XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
1825 + /* generate RTL to return from the current function */
1827 + emit_barrier (); /* } */
1828 + emit_label (if_false_label);
1830 + /* generate RTL to return from the current function */
1831 + if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
1832 + use_return_register ();
1835 + if (HAVE_return && flag_have_return)
1837 + emit_jump_insn (gen_return ());
1842 + _val = get_insns ();
1845 + emit_insn_after (_val, insn);
1849 +/* For every variable which type is character array, moves its location
1850 + in the stack frame to the sweep_frame_offset position. */
1852 +arrange_var_order (tree block)
1855 + HOST_WIDE_INT offset;
1857 + while (block && TREE_CODE(block)==BLOCK)
1859 + /* arrange the location of character arrays in depth first. */
1860 + arrange_var_order (BLOCK_SUBBLOCKS (block));
1862 + for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
1864 + /* Skip the declaration that refers an external variable. */
1865 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1866 + && TREE_CODE (types) == VAR_DECL
1867 + && ! DECL_ARTIFICIAL (types)
1868 + && DECL_RTL_SET_P (types)
1869 + && GET_CODE (DECL_RTL (types)) == MEM
1870 + && GET_MODE (DECL_RTL (types)) == BLKmode
1873 + search_string_def (TREE_TYPE (types))
1874 + || (! current_function_defines_vulnerable_string && is_array)))
1876 + rtx home = DECL_RTL (types);
1878 + if (!(GET_CODE (home) == MEM
1879 + && (GET_CODE (XEXP (home, 0)) == MEM
1880 + || (GET_CODE (XEXP (home, 0)) == REG
1881 + && XEXP (home, 0) != virtual_stack_vars_rtx
1882 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1883 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1884 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1885 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1889 + /* Found a string variable. */
1890 + HOST_WIDE_INT var_size =
1891 + ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1894 + /* Confirmed it is BLKmode. */
1895 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1896 + var_size = CEIL_ROUND (var_size, alignment);
1898 + /* Skip the variable if it is top of the region
1899 + specified by sweep_frame_offset. */
1900 + offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1901 + if (offset == sweep_frame_offset - var_size)
1902 + sweep_frame_offset -= var_size;
1904 + else if (offset < sweep_frame_offset - var_size)
1905 + sweep_string_variable (DECL_RTL (types), var_size);
1910 + block = BLOCK_CHAIN (block);
1915 +/* To protect every pointer argument and move character arrays in the argument,
1916 + Copy those variables to the top of the stack frame and move the location of
1917 + character arrays to the posion of sweep_frame_offset. */
1919 +copy_args_for_protection (void)
1921 + tree parms = DECL_ARGUMENTS (current_function_decl);
1924 + parms = DECL_ARGUMENTS (current_function_decl);
1925 + for (; parms; parms = TREE_CHAIN (parms))
1926 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1928 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1933 + string_p = search_string_def (TREE_TYPE(parms));
1935 + /* Check if it is a candidate to move. */
1936 + if (string_p || search_pointer_def (TREE_TYPE (parms)))
1939 + = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1941 + tree passed_type = DECL_ARG_TYPE (parms);
1942 + tree nominal_type = TREE_TYPE (parms);
1944 + start_sequence ();
1946 + if (GET_CODE (DECL_RTL (parms)) == REG)
1950 + change_arg_use_of_insns (prologue_insert_point,
1951 + DECL_RTL (parms), &safe, 0);
1954 + /* Generate codes for copying the content. */
1955 + rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1957 + /* Avoid register elimination in gcse.c. */
1958 + PATTERN (movinsn)->volatil = 1;
1960 + /* Save debugger info. */
1961 + SET_DECL_RTL (parms, safe);
1964 + else if (GET_CODE (DECL_RTL (parms)) == MEM
1965 + && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1968 + rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1970 + /* Generate codes for copying the content. */
1971 + movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1972 + /* Avoid register elimination in gcse.c. */
1973 + PATTERN (movinsn)->volatil = 1;
1975 + /* Change the addressof information to the newly
1976 + allocated pseudo register. */
1977 + emit_move_insn (DECL_RTL (parms), safe);
1979 + /* Save debugger info. */
1980 + SET_DECL_RTL (parms, safe);
1983 + /* See if the frontend wants to pass this by invisible
1985 + else if (passed_type != nominal_type
1986 + && POINTER_TYPE_P (passed_type)
1987 + && TREE_TYPE (passed_type) == nominal_type)
1989 + rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1991 + change_arg_use_of_insns (prologue_insert_point,
1995 + /* Generate codes for copying the content. */
1996 + rtx movinsn = emit_move_insn (safe, orig);
1998 + /* Avoid register elimination in gcse.c */
1999 + PATTERN (movinsn)->volatil = 1;
2001 + /* Save debugger info. */
2002 + SET_DECL_RTL (parms, safe);
2008 + /* Declare temporary local variable for parms. */
2010 + = assign_stack_local (DECL_MODE (parms), arg_size,
2011 + DECL_MODE (parms) == BLKmode ?
2014 + MEM_IN_STRUCT_P (temp_rtx)
2015 + = AGGREGATE_TYPE_P (TREE_TYPE (parms));
2016 + set_mem_alias_set (temp_rtx, get_alias_set (parms));
2018 + /* Generate codes for copying the content. */
2019 + store_expr (parms, temp_rtx, 0);
2021 + /* Change the reference for each instructions. */
2022 + move_arg_location (prologue_insert_point, DECL_RTL (parms),
2023 + temp_rtx, arg_size);
2025 + /* Change the location of parms variable. */
2026 + SET_DECL_RTL (parms, temp_rtx);
2029 + seq = get_insns ();
2031 + emit_insn_before (seq, prologue_insert_point);
2033 +#ifdef FRAME_GROWS_DOWNWARD
2034 + /* Process the string argument. */
2035 + if (string_p && DECL_MODE (parms) == BLKmode)
2037 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2038 + arg_size = CEIL_ROUND (arg_size, alignment);
2040 + /* Change the reference for each instructions. */
2041 + sweep_string_variable (DECL_RTL (parms), arg_size);
2050 +/* Sweep a string variable to the positon of sweep_frame_offset in the
2051 + stack frame, that is a last position of string variables. */
2053 +sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
2055 + HOST_WIDE_INT sweep_offset;
2057 + switch (GET_CODE (sweep_var))
2060 + if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
2061 + && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
2063 + sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
2066 + sweep_offset = INTVAL (sweep_var);
2072 + /* Scan all declarations of variables and fix the offset address of
2073 + the variable based on the frame pointer. */
2074 + sweep_string_in_decls (DECL_INITIAL (current_function_decl),
2075 + sweep_offset, var_size);
2077 + /* Scan all argument variable and fix the offset address based on
2078 + the frame pointer. */
2079 + sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
2080 + sweep_offset, var_size);
2082 + /* For making room for sweep variable, scan all insns and
2083 + fix the offset address of the variable that is based on frame pointer. */
2084 + sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
2087 + /* Clear all the USED bits in operands of all insns and declarations of
2088 + local variables. */
2089 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2090 + reset_used_flags_for_insns (function_first_insn);
2092 + sweep_frame_offset -= var_size;
2097 +/* Move an argument to the local variable addressed by frame_offset. */
2099 +move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
2101 + /* For making room for sweep variable, scan all insns and
2102 + fix the offset address of the variable that is based on frame pointer. */
2103 + change_arg_use_of_insns (insn, orig, &new, var_size);
2106 + /* Clear all the USED bits in operands of all insns and declarations
2107 + of local variables. */
2108 + reset_used_flags_for_insns (insn);
2112 +/* Sweep character arrays declared as local variable. */
2114 +sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
2115 + HOST_WIDE_INT sweep_size)
2118 + HOST_WIDE_INT offset;
2121 + while (block && TREE_CODE(block)==BLOCK)
2123 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2125 + /* Skip the declaration that refers an external variable and
2126 + also skip an global variable. */
2127 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
2129 + if (! DECL_RTL_SET_P (types))
2132 + home = DECL_RTL (types);
2134 + /* Process for static local variable. */
2135 + if (GET_CODE (home) == MEM
2136 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2139 + if (GET_CODE (home) == MEM
2140 + && XEXP (home, 0) == virtual_stack_vars_rtx)
2144 + /* the operand related to the sweep variable. */
2145 + if (sweep_offset <= offset
2146 + && offset < sweep_offset + sweep_size)
2148 + offset = sweep_frame_offset - sweep_size - sweep_offset;
2150 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
2152 + XEXP (home, 0)->used = 1;
2154 + else if (sweep_offset <= offset
2155 + && offset < sweep_frame_offset)
2157 + /* the rest of variables under sweep_frame_offset,
2158 + shift the location. */
2159 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
2161 + XEXP (home, 0)->used = 1;
2165 + if (GET_CODE (home) == MEM
2166 + && GET_CODE (XEXP (home, 0)) == MEM)
2168 + /* Process for dynamically allocated array. */
2169 + home = XEXP (home, 0);
2172 + if (GET_CODE (home) == MEM
2173 + && GET_CODE (XEXP (home, 0)) == PLUS
2174 + && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
2175 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2177 + if (! XEXP (home, 0)->used)
2179 + offset = AUTO_OFFSET(XEXP (home, 0));
2181 + /* the operand related to the sweep variable. */
2182 + if (sweep_offset <= offset
2183 + && offset < sweep_offset + sweep_size)
2187 + += sweep_frame_offset - sweep_size - sweep_offset;
2188 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2192 + XEXP (home, 0)->used = 1;
2194 + else if (sweep_offset <= offset
2195 + && offset < sweep_frame_offset)
2197 + /* the rest of variables under sweep_frame_offset,
2198 + so shift the location. */
2200 + XEXP (XEXP (home, 0), 1)
2201 + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2204 + XEXP (home, 0)->used = 1;
2211 + sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
2212 + sweep_offset, sweep_size);
2214 + block = BLOCK_CHAIN (block);
2219 +/* Sweep character arrays declared as argument. */
2221 +sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
2222 + HOST_WIDE_INT sweep_size)
2225 + HOST_WIDE_INT offset;
2227 + for (; parms; parms = TREE_CHAIN (parms))
2228 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2230 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2232 + home = DECL_INCOMING_RTL (parms);
2234 + if (XEXP (home, 0)->used)
2237 + offset = AUTO_OFFSET(XEXP (home, 0));
2239 + /* the operand related to the sweep variable. */
2240 + if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
2242 + if (sweep_offset <= offset
2243 + && offset < sweep_offset + sweep_size)
2245 + offset += sweep_frame_offset - sweep_size - sweep_offset;
2246 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2250 + XEXP (home, 0)->used = 1;
2252 + else if (sweep_offset <= offset
2253 + && offset < sweep_frame_offset)
2255 + /* the rest of variables under sweep_frame_offset,
2256 + shift the location. */
2257 + XEXP (XEXP (home, 0), 1)
2258 + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2261 + XEXP (home, 0)->used = 1;
2269 +/* Set to 1 when the instruction contains virtual registers. */
2270 +static int has_virtual_reg;
2272 +/* Sweep the specified character array for every insns. The array starts from
2273 + the sweep_offset and its size is sweep_size. */
2275 +sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
2276 + HOST_WIDE_INT sweep_size)
2278 + for (; insn; insn = NEXT_INSN (insn))
2279 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2280 + || GET_CODE (insn) == CALL_INSN)
2282 + has_virtual_reg = FALSE;
2283 + sweep_string_in_operand (insn, &PATTERN (insn),
2284 + sweep_offset, sweep_size);
2285 + sweep_string_in_operand (insn, ®_NOTES (insn),
2286 + sweep_offset, sweep_size);
2291 +/* Sweep the specified character array, which starts from the sweep_offset and
2292 + its size is sweep_size.
2294 + When a pointer is given,
2295 + if it points the address higher than the array, it stays.
2296 + if it points the address inside the array, it changes to point inside
2297 + the sweeped array.
2298 + if it points the address lower than the array, it shifts higher address by
2299 + the sweep_size. */
2301 +sweep_string_in_operand (rtx insn, rtx *loc,
2302 + HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
2305 + enum rtx_code code;
2307 + HOST_WIDE_INT offset;
2313 + code = GET_CODE (x);
2318 + case CONST_DOUBLE:
2326 + case ADDR_DIFF_VEC:
2332 + if (x == virtual_incoming_args_rtx
2333 + || x == virtual_stack_vars_rtx
2334 + || x == virtual_stack_dynamic_rtx
2335 + || x == virtual_outgoing_args_rtx
2336 + || x == virtual_cfa_rtx)
2337 + has_virtual_reg = TRUE;
2342 + skip setjmp setup insn and setjmp restore insn
2344 + (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
2345 + (set (virtual_stack_vars_rtx) (REG))
2347 + if (GET_CODE (XEXP (x, 0)) == MEM
2348 + && XEXP (x, 1) == virtual_stack_vars_rtx)
2350 + if (XEXP (x, 0) == virtual_stack_vars_rtx
2351 + && GET_CODE (XEXP (x, 1)) == REG)
2356 + /* Handle typical case of frame register plus constant. */
2357 + if (XEXP (x, 0) == virtual_stack_vars_rtx
2358 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
2361 + goto single_use_of_virtual_reg;
2363 + offset = AUTO_OFFSET(x);
2365 + /* When arguments grow downward, the virtual incoming
2366 + args pointer points to the top of the argument block,
2367 + so block is identified by the pointer - 1.
2368 + The flag is set at the copy_rtx_and_substitute in integrate.c */
2369 + if (RTX_INTEGRATED_P (x))
2372 + /* the operand related to the sweep variable. */
2373 + if (sweep_offset <= offset + k
2374 + && offset + k < sweep_offset + sweep_size)
2376 + offset += sweep_frame_offset - sweep_size - sweep_offset;
2378 + XEXP (x, 0) = virtual_stack_vars_rtx;
2379 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2382 + else if (sweep_offset <= offset + k
2383 + && offset + k < sweep_frame_offset)
2385 + /* the rest of variables under sweep_frame_offset,
2386 + shift the location. */
2387 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2391 + single_use_of_virtual_reg:
2392 + if (has_virtual_reg) {
2393 + /* excerpt from insn_invalid_p in recog.c */
2394 + int icode = recog_memoized (insn);
2396 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2400 + start_sequence ();
2401 + temp = force_operand (x, NULL_RTX);
2402 + seq = get_insns ();
2405 + emit_insn_before (seq, insn);
2406 + if (! validate_change (insn, loc, temp, 0)
2407 + && !validate_replace_rtx (x, temp, insn))
2408 + fatal_insn ("sweep_string_in_operand", insn);
2412 + has_virtual_reg = TRUE;
2416 +#ifdef FRAME_GROWS_DOWNWARD
2417 + /* Alert the case of frame register plus constant given by reg. */
2418 + else if (XEXP (x, 0) == virtual_stack_vars_rtx
2419 + && GET_CODE (XEXP (x, 1)) == REG)
2420 + fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
2424 + process further subtree:
2425 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2430 + case CALL_PLACEHOLDER:
2431 + for (i = 0; i < 3; i++)
2433 + rtx seq = XEXP (x, i);
2436 + push_to_sequence (seq);
2437 + sweep_string_use_of_insns (XEXP (x, i),
2438 + sweep_offset, sweep_size);
2439 + XEXP (x, i) = get_insns ();
2449 + /* Scan all subexpressions. */
2450 + fmt = GET_RTX_FORMAT (code);
2451 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2455 + virtual_stack_vars_rtx without offset
2457 + (set (reg:SI xx) (reg:SI 78))
2458 + (set (reg:SI xx) (MEM (reg:SI 78)))
2460 + if (XEXP (x, i) == virtual_stack_vars_rtx)
2461 + fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
2462 + sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
2464 + else if (*fmt == 'E')
2465 + for (j = 0; j < XVECLEN (x, i); j++)
2466 + sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
2470 +/* Change the use of an argument to the use of the duplicated variable for
2471 + every insns, The variable is addressed by new rtx. */
2473 +change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
2475 + for (; insn; insn = NEXT_INSN (insn))
2476 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2477 + || GET_CODE (insn) == CALL_INSN)
2481 + start_sequence ();
2482 + change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
2484 + seq = get_insns ();
2486 + emit_insn_before (seq, insn);
2488 + /* load_multiple insn from virtual_incoming_args_rtx have several
2489 + load insns. If every insn change the load address of arg
2490 + to frame region, those insns are moved before the PARALLEL insn
2491 + and remove the PARALLEL insn. */
2492 + if (GET_CODE (PATTERN (insn)) == PARALLEL
2493 + && XVECLEN (PATTERN (insn), 0) == 0)
2494 + delete_insn (insn);
2499 +/* Change the use of an argument to the use of the duplicated variable for
2500 + every rtx derived from the x. */
2502 +change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
2504 + enum rtx_code code;
2506 + HOST_WIDE_INT offset;
2512 + code = GET_CODE (x);
2517 + case CONST_DOUBLE:
2525 + case ADDR_DIFF_VEC:
2532 + /* Handle special case of MEM (incoming_args). */
2533 + if (GET_CODE (orig) == MEM
2534 + && XEXP (x, 0) == virtual_incoming_args_rtx)
2538 + /* the operand related to the sweep variable. */
2539 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2540 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2542 + offset = AUTO_OFFSET(XEXP (*new, 0))
2543 + + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2545 + XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
2546 + XEXP (x, 0)->used = 1;
2554 + /* Handle special case of frame register plus constant. */
2555 + if (GET_CODE (orig) == MEM
2556 + && XEXP (x, 0) == virtual_incoming_args_rtx
2557 + && GET_CODE (XEXP (x, 1)) == CONST_INT
2560 + offset = AUTO_OFFSET(x);
2562 + /* the operand related to the sweep variable. */
2563 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2564 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2567 + offset = (AUTO_OFFSET(XEXP (*new, 0))
2568 + + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2570 + XEXP (x, 0) = virtual_stack_vars_rtx;
2571 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2578 + process further subtree:
2579 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2586 + /* Handle special case of "set (REG or MEM) (incoming_args)".
2587 + It means that the the address of the 1st argument is stored. */
2588 + if (GET_CODE (orig) == MEM
2589 + && XEXP (x, 1) == virtual_incoming_args_rtx)
2593 + /* the operand related to the sweep variable. */
2594 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2595 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2597 + offset = (AUTO_OFFSET(XEXP (*new, 0))
2598 + + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2600 + XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
2601 + offset), NULL_RTX);
2602 + XEXP (x, 1)->used = 1;
2609 + case CALL_PLACEHOLDER:
2610 + for (i = 0; i < 3; i++)
2612 + rtx seq = XEXP (x, i);
2615 + push_to_sequence (seq);
2616 + change_arg_use_of_insns (XEXP (x, i), orig, new, size);
2617 + XEXP (x, i) = get_insns ();
2624 + for (j = 0; j < XVECLEN (x, 0); j++)
2626 + change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
2628 + if (recog_memoized (insn) < 0)
2630 + for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
2632 + /* if parallel insn has a insn used virtual_incoming_args_rtx,
2633 + the insn is removed from this PARALLEL insn. */
2634 + if (check_used_flag (XVECEXP (x, 0, j)))
2636 + emit_insn (XVECEXP (x, 0, j));
2637 + XVECEXP (x, 0, j) = NULL;
2640 + XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
2642 + PUT_NUM_ELEM (XVEC (x, 0), i);
2650 + /* Scan all subexpressions. */
2651 + fmt = GET_RTX_FORMAT (code);
2652 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2655 + if (XEXP (x, i) == orig)
2658 + *new = gen_reg_rtx (GET_MODE (orig));
2659 + XEXP (x, i) = *new;
2662 + change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
2664 + else if (*fmt == 'E')
2665 + for (j = 0; j < XVECLEN (x, i); j++)
2667 + if (XVECEXP (x, i, j) == orig)
2670 + *new = gen_reg_rtx (GET_MODE (orig));
2671 + XVECEXP (x, i, j) = *new;
2674 + change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
2679 +/* Validate every instructions from the specified instruction.
2681 + The stack protector prohibits to generate machine specific frame addressing
2682 + for the first rtl generation. The prepare_stack_protection must convert
2683 + machine independent frame addressing to machine specific frame addressing,
2684 + so instructions for inline functions, which skip the conversion of
2685 + the stack protection, validate every instructions. */
2687 +validate_insns_of_varrefs (rtx insn)
2691 + /* Initialize recognition, indicating that volatile is OK. */
2694 + for (; insn; insn = next)
2696 + next = NEXT_INSN (insn);
2697 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2698 + || GET_CODE (insn) == CALL_INSN)
2700 + /* excerpt from insn_invalid_p in recog.c */
2701 + int icode = recog_memoized (insn);
2703 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2704 + validate_operand_of_varrefs (insn, &PATTERN (insn));
2708 + init_recog_no_volatile ();
2712 +/* Validate frame addressing of the rtx and covert it to machine specific one. */
2714 +validate_operand_of_varrefs (rtx insn, rtx *loc)
2716 + enum rtx_code code;
2725 + code = GET_CODE (x);
2731 + case CONST_DOUBLE:
2739 + case ADDR_DIFF_VEC:
2746 + /* validate insn of frame register plus constant. */
2747 + if (GET_CODE (x) == PLUS
2748 + && XEXP (x, 0) == virtual_stack_vars_rtx
2749 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
2751 + start_sequence ();
2753 + { /* excerpt from expand_binop in optabs.c */
2754 + optab binoptab = add_optab;
2755 + enum machine_mode mode = GET_MODE (x);
2756 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
2757 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
2759 + rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
2760 + temp = gen_reg_rtx (mode);
2762 + /* Now, if insn's predicates don't allow offset operands,
2763 + put them into pseudo regs. */
2765 + if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
2766 + && mode1 != VOIDmode)
2767 + xop1 = copy_to_mode_reg (mode1, xop1);
2769 + pat = GEN_FCN (icode) (temp, xop0, xop1);
2773 + abort (); /* there must be add_optab handler. */
2775 + seq = get_insns ();
2778 + emit_insn_before (seq, insn);
2779 + if (! validate_change (insn, loc, temp, 0))
2786 + case CALL_PLACEHOLDER:
2787 + for (i = 0; i < 3; i++)
2789 + rtx seq = XEXP (x, i);
2792 + push_to_sequence (seq);
2793 + validate_insns_of_varrefs (XEXP (x, i));
2794 + XEXP (x, i) = get_insns ();
2804 + /* Scan all subexpressions. */
2805 + fmt = GET_RTX_FORMAT (code);
2806 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2808 + validate_operand_of_varrefs (insn, &XEXP (x, i));
2809 + else if (*fmt == 'E')
2810 + for (j = 0; j < XVECLEN (x, i); j++)
2811 + validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
2816 +/* Return size that is not allocated for stack frame. It will be allocated
2817 + to modify the home of pseudo registers called from global_alloc. */
2819 +get_frame_free_size (void)
2821 + if (! flag_propolice_protection)
2824 + return push_allocated_offset - push_frame_offset;
2828 +/* The following codes are invoked after the instantiation of pseudo registers.
2830 + Reorder local variables to place a peudo register after buffers to avoid
2831 + the corruption of local variables that could be used to further corrupt
2832 + arbitrary memory locations. */
2833 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2834 +static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
2835 +static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2836 +static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2837 +static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2838 +static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2839 +static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
2840 +static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
2841 +static void reset_used_flags_for_push_frame (void);
2842 +static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
2843 +static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
2847 +/* Assign stack local at the stage of register allocater. if a pseudo reg is
2848 + spilled out from such an allocation, it is allocated on the stack.
2849 + The protector keep the location be lower stack region than the location of
2850 + sweeped arrays. */
2852 +assign_stack_local_for_pseudo_reg (enum machine_mode mode,
2853 + HOST_WIDE_INT size, int align)
2855 +#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
2856 + return assign_stack_local (mode, size, align);
2858 + tree blocks = DECL_INITIAL (current_function_decl);
2860 + HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2861 + int first_call_from_purge_addressof, first_call_from_global_alloc;
2863 + if (! flag_propolice_protection
2866 + || current_function_is_inlinable
2867 + || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
2868 + || current_function_contains_functions)
2869 + return assign_stack_local (mode, size, align);
2871 + first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2872 + first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2873 + saved_cse_not_expected = cse_not_expected;
2875 + starting_frame = ((STARTING_FRAME_OFFSET)
2876 + ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2877 + units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2878 + GET_MODE_SIZE (mode));
2880 + if (first_call_from_purge_addressof)
2882 + push_frame_offset = push_allocated_offset;
2883 + if (check_out_of_frame_access (get_insns (), starting_frame))
2885 + /* After the purge_addressof stage, there may be an instruction which
2886 + have the pointer less than the starting_frame.
2887 + if there is an access below frame, push dummy region to seperate
2888 + the address of instantiated variables. */
2889 + push_frame (GET_MODE_SIZE (DImode), 0);
2890 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2894 + if (first_call_from_global_alloc)
2896 + push_frame_offset = push_allocated_offset = 0;
2897 + if (check_out_of_frame_access (get_insns (), starting_frame))
2899 + if (STARTING_FRAME_OFFSET)
2901 + /* if there is an access below frame, push dummy region
2902 + to seperate the address of instantiated variables. */
2903 + push_frame (GET_MODE_SIZE (DImode), 0);
2904 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2907 + push_allocated_offset = starting_frame;
2911 + saved_frame_offset = frame_offset;
2912 + frame_offset = push_frame_offset;
2914 + new = assign_stack_local (mode, size, align);
2916 + push_frame_offset = frame_offset;
2917 + frame_offset = saved_frame_offset;
2919 + if (push_frame_offset > push_allocated_offset)
2921 + push_frame (units_per_push,
2922 + push_allocated_offset + STARTING_FRAME_OFFSET);
2924 + assign_stack_local (BLKmode, units_per_push, -1);
2925 + push_allocated_offset += units_per_push;
2928 + /* At the second call from global alloc, alpha push frame and assign
2929 + a local variable to the top of the stack. */
2930 + if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2931 + push_frame_offset = push_allocated_offset = 0;
2938 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2940 +/* push frame infomation for instantiating pseudo register at the top of stack.
2941 + This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2944 + It is called by purge_addressof function and global_alloc (or reload)
2947 +push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2949 + reset_used_flags_for_push_frame();
2951 + /* Scan all declarations of variables and fix the offset address of
2952 + the variable based on the frame pointer. */
2953 + push_frame_in_decls (DECL_INITIAL (current_function_decl),
2954 + var_size, boundary);
2956 + /* Scan all argument variable and fix the offset address based on
2957 + the frame pointer. */
2958 + push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2959 + var_size, boundary);
2961 + /* Scan all operands of all insns and fix the offset address
2962 + based on the frame pointer. */
2963 + push_frame_of_insns (get_insns (), var_size, boundary);
2965 + /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2966 + push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2967 + push_frame_of_reg_equiv_constant (var_size, boundary);
2969 + reset_used_flags_for_push_frame();
2973 +/* Reset used flag of every insns, reg_equiv_memory_loc,
2974 + and reg_equiv_constant. */
2976 +reset_used_flags_for_push_frame(void)
2979 + extern rtx *reg_equiv_memory_loc;
2980 + extern rtx *reg_equiv_constant;
2982 + /* Clear all the USED bits in operands of all insns and declarations of
2984 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2985 + reset_used_flags_for_insns (get_insns ());
2988 + /* The following codes are processed if the push_frame is called from
2989 + global_alloc (or reload) function. */
2990 + if (reg_equiv_memory_loc == 0)
2993 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2994 + if (reg_equiv_memory_loc[i])
2996 + rtx x = reg_equiv_memory_loc[i];
2998 + if (GET_CODE (x) == MEM
2999 + && GET_CODE (XEXP (x, 0)) == PLUS
3000 + && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
3003 + XEXP (x, 0)->used = 0;
3008 + if (reg_equiv_constant == 0)
3011 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
3012 + if (reg_equiv_constant[i])
3014 + rtx x = reg_equiv_constant[i];
3016 + if (GET_CODE (x) == PLUS
3017 + && AUTO_BASEPTR (x) == frame_pointer_rtx)
3026 +/* Push every variables declared as a local variable and make a room for
3027 + instantiated register. */
3029 +push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
3030 + HOST_WIDE_INT boundary)
3033 + HOST_WIDE_INT offset;
3036 + while (block && TREE_CODE(block)==BLOCK)
3038 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
3040 + /* Skip the declaration that refers an external variable and
3041 + also skip an global variable. */
3042 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
3044 + if (! DECL_RTL_SET_P (types))
3047 + home = DECL_RTL (types);
3049 + /* Process for static local variable. */
3050 + if (GET_CODE (home) == MEM
3051 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
3054 + if (GET_CODE (home) == MEM
3055 + && GET_CODE (XEXP (home, 0)) == REG)
3057 + if (XEXP (home, 0) != frame_pointer_rtx
3061 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
3065 + XEXP (home, 0)->used = 1;
3068 + if (GET_CODE (home) == MEM
3069 + && GET_CODE (XEXP (home, 0)) == MEM)
3071 + /* Process for dynamically allocated array. */
3072 + home = XEXP (home, 0);
3075 + if (GET_CODE (home) == MEM
3076 + && GET_CODE (XEXP (home, 0)) == PLUS
3077 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
3079 + offset = AUTO_OFFSET(XEXP (home, 0));
3081 + if (! XEXP (home, 0)->used
3082 + && offset >= boundary)
3084 + offset += push_size;
3085 + XEXP (XEXP (home, 0), 1)
3086 + = gen_rtx_CONST_INT (VOIDmode, offset);
3089 + XEXP (home, 0)->used = 1;
3095 + push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
3096 + block = BLOCK_CHAIN (block);
3101 +/* Push every variables declared as an argument and make a room for
3102 + instantiated register. */
3104 +push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
3105 + HOST_WIDE_INT boundary)
3108 + HOST_WIDE_INT offset;
3110 + for (; parms; parms = TREE_CHAIN (parms))
3111 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
3113 + if (PARM_PASSED_IN_MEMORY (parms))
3115 + home = DECL_INCOMING_RTL (parms);
3116 + offset = AUTO_OFFSET(XEXP (home, 0));
3118 + if (XEXP (home, 0)->used || offset < boundary)
3121 + /* the operand related to the sweep variable. */
3122 + if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
3124 + if (XEXP (home, 0) == frame_pointer_rtx)
3125 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
3128 + offset += push_size;
3129 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
3134 + XEXP (home, 0)->used = 1;
3141 +/* Set to 1 when the instruction has the reference to be pushed. */
3142 +static int insn_pushed;
3144 +/* Tables of equivalent registers with frame pointer. */
3145 +static int *fp_equiv = 0;
3148 +/* Push the frame region to make a room for allocated local variable. */
3150 +push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
3152 + /* init fp_equiv */
3153 + fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
3155 + for (; insn; insn = NEXT_INSN (insn))
3156 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3157 + || GET_CODE (insn) == CALL_INSN)
3161 + insn_pushed = FALSE;
3163 + /* Push frame in INSN operation. */
3164 + push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
3166 + /* Push frame in NOTE. */
3167 + push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
3169 + /* Push frame in CALL EXPR_LIST. */
3170 + if (GET_CODE (insn) == CALL_INSN)
3171 + push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
3172 + push_size, boundary);
3174 + /* Pushed frame addressing style may not be machine specific one.
3175 + so the instruction should be converted to use the machine specific
3176 + frame addressing. */
3178 + && (last = try_split (PATTERN (insn), insn, 1)) != insn)
3180 + rtx first = NEXT_INSN (insn);
3181 + rtx trial = NEXT_INSN (first);
3182 + rtx pattern = PATTERN (trial);
3185 + /* Update REG_EQUIV info to the first splitted insn. */
3186 + if ((set = single_set (insn))
3187 + && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
3188 + && GET_CODE (PATTERN (first)) == SET)
3191 + = gen_rtx_EXPR_LIST (REG_EQUIV,
3192 + SET_SRC (PATTERN (first)),
3193 + REG_NOTES (first));
3196 + /* copy the first insn of splitted insns to the original insn and
3197 + delete the first insn,
3198 + because the original insn is pointed from records:
3199 + insn_chain, reg_equiv_init, used for global_alloc. */
3200 + if (cse_not_expected)
3202 + add_insn_before (insn, first);
3204 + /* Copy the various flags, and other information. */
3205 + memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
3206 + PATTERN (insn) = PATTERN (first);
3207 + INSN_CODE (insn) = INSN_CODE (first);
3208 + LOG_LINKS (insn) = LOG_LINKS (first);
3209 + REG_NOTES (insn) = REG_NOTES (first);
3211 + /* then remove the first insn of splitted insns. */
3212 + remove_insn (first);
3213 + INSN_DELETED_P (first) = 1;
3216 + if (GET_CODE (pattern) == SET
3217 + && GET_CODE (XEXP (pattern, 0)) == REG
3218 + && GET_CODE (XEXP (pattern, 1)) == PLUS
3219 + && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
3220 + && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
3222 + rtx offset = XEXP (XEXP (pattern, 1), 1);
3223 + fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
3225 + delete_insn (trial);
3237 +/* Push the frame region by changing the operand that points the frame. */
3239 +push_frame_in_operand (rtx insn, rtx orig,
3240 + HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
3243 + enum rtx_code code;
3245 + HOST_WIDE_INT offset;
3251 + code = GET_CODE (x);
3256 + case CONST_DOUBLE:
3264 + case ADDR_DIFF_VEC:
3273 + Skip setjmp setup insn and setjmp restore insn
3275 + (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
3276 + (set (frame_pointer_rtx) (REG))
3278 + if (GET_CODE (XEXP (x, 0)) == MEM
3279 + && XEXP (x, 1) == frame_pointer_rtx)
3281 + if (XEXP (x, 0) == frame_pointer_rtx
3282 + && GET_CODE (XEXP (x, 1)) == REG)
3286 + powerpc case: restores setjmp address
3287 + (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
3289 + (set (reg) (plus frame_pointer_rtx const_int -n))
3290 + (set (frame_pointer_rtx) (reg))
3292 + if (GET_CODE (XEXP (x, 0)) == REG
3293 + && GET_CODE (XEXP (x, 1)) == PLUS
3294 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
3295 + && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3296 + && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
3299 + offset = AUTO_OFFSET(x);
3300 + if (x->used || -offset < boundary)
3303 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
3304 + x->used = 1; insn_pushed = TRUE;
3308 + /* Reset fp_equiv register. */
3309 + else if (GET_CODE (XEXP (x, 0)) == REG
3310 + && fp_equiv[REGNO (XEXP (x, 0))])
3311 + fp_equiv[REGNO (XEXP (x, 0))] = 0;
3313 + /* Propagete fp_equiv register. */
3314 + else if (GET_CODE (XEXP (x, 0)) == REG
3315 + && GET_CODE (XEXP (x, 1)) == REG
3316 + && fp_equiv[REGNO (XEXP (x, 1))])
3317 + if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
3318 + || reg_renumber[REGNO (XEXP (x, 0))] > 0)
3319 + fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
3323 + if (XEXP (x, 0) == frame_pointer_rtx
3326 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3327 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3333 + /* Handle special case of frame register plus constant. */
3334 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3335 + && XEXP (x, 0) == frame_pointer_rtx)
3337 + offset = AUTO_OFFSET(x);
3339 + if (x->used || offset < boundary)
3342 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3343 + x->used = 1; insn_pushed = TRUE;
3348 + Handle alpha case:
3349 + (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
3351 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3352 + && GET_CODE (XEXP (x, 0)) == SUBREG
3353 + && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
3355 + offset = AUTO_OFFSET(x);
3357 + if (x->used || offset < boundary)
3360 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3361 + x->used = 1; insn_pushed = TRUE;
3366 + Handle powerpc case:
3367 + (set (reg x) (plus fp const))
3368 + (set (.....) (... (plus (reg x) (const B))))
3370 + else if (GET_CODE (XEXP (x, 1)) == CONST_INT
3371 + && GET_CODE (XEXP (x, 0)) == REG
3372 + && fp_equiv[REGNO (XEXP (x, 0))])
3374 + offset = AUTO_OFFSET(x);
3379 + offset += fp_equiv[REGNO (XEXP (x, 0))];
3381 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3382 + x->used = 1; insn_pushed = TRUE;
3387 + Handle special case of frame register plus reg (constant).
3388 + (set (reg x) (const B))
3389 + (set (....) (...(plus fp (reg x))))
3391 + else if (XEXP (x, 0) == frame_pointer_rtx
3392 + && GET_CODE (XEXP (x, 1)) == REG
3393 + && PREV_INSN (insn)
3394 + && PATTERN (PREV_INSN (insn))
3395 + && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
3396 + && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
3398 + offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
3400 + if (x->used || offset < boundary)
3403 + SET_SRC (PATTERN (PREV_INSN (insn)))
3404 + = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3406 + XEXP (x, 1)->used = 1;
3411 + Handle special case of frame register plus reg (used).
3412 + The register already have a pushed offset, just mark this frame
3415 + else if (XEXP (x, 0) == frame_pointer_rtx
3416 + && XEXP (x, 1)->used)
3422 + Process further subtree:
3423 + Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
3428 + case CALL_PLACEHOLDER:
3429 + push_frame_of_insns (XEXP (x, 0), push_size, boundary);
3430 + push_frame_of_insns (XEXP (x, 1), push_size, boundary);
3431 + push_frame_of_insns (XEXP (x, 2), push_size, boundary);
3438 + /* Scan all subexpressions. */
3439 + fmt = GET_RTX_FORMAT (code);
3440 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3443 + if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
3444 + fatal_insn ("push_frame_in_operand", insn);
3445 + push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
3447 + else if (*fmt == 'E')
3448 + for (j = 0; j < XVECLEN (x, i); j++)
3449 + push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
3453 +/* Change the location pointed in reg_equiv_memory_loc. */
3455 +push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
3456 + HOST_WIDE_INT boundary)
3459 + extern rtx *reg_equiv_memory_loc;
3461 + /* This function is processed if the push_frame is called from
3462 + global_alloc (or reload) function. */
3463 + if (reg_equiv_memory_loc == 0)
3466 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
3467 + if (reg_equiv_memory_loc[i])
3469 + rtx x = reg_equiv_memory_loc[i];
3472 + if (GET_CODE (x) == MEM
3473 + && GET_CODE (XEXP (x, 0)) == PLUS
3474 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
3476 + offset = AUTO_OFFSET(XEXP (x, 0));
3478 + if (! XEXP (x, 0)->used
3479 + && offset >= boundary)
3481 + offset += push_size;
3482 + XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3485 + XEXP (x, 0)->used = 1;
3488 + else if (GET_CODE (x) == MEM
3489 + && XEXP (x, 0) == frame_pointer_rtx
3492 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3493 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3499 +/* Change the location pointed in reg_equiv_constant. */
3501 +push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
3502 + HOST_WIDE_INT boundary)
3505 + extern rtx *reg_equiv_constant;
3507 + /* This function is processed if the push_frame is called from
3508 + global_alloc (or reload) function. */
3509 + if (reg_equiv_constant == 0)
3512 + for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
3513 + if (reg_equiv_constant[i])
3515 + rtx x = reg_equiv_constant[i];
3518 + if (GET_CODE (x) == PLUS
3519 + && XEXP (x, 0) == frame_pointer_rtx)
3521 + offset = AUTO_OFFSET(x);
3524 + && offset >= boundary)
3526 + offset += push_size;
3527 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3533 + else if (x == frame_pointer_rtx
3536 + reg_equiv_constant[i]
3537 + = plus_constant (frame_pointer_rtx, push_size);
3538 + reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
3544 +/* Check every instructions if insn's memory reference is out of frame. */
3546 +check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
3548 + for (; insn; insn = NEXT_INSN (insn))
3549 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3550 + || GET_CODE (insn) == CALL_INSN)
3552 + if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
3559 +/* Check every operands if the reference is out of frame. */
3561 +check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
3564 + enum rtx_code code;
3571 + code = GET_CODE (x);
3576 + case CONST_DOUBLE:
3584 + case ADDR_DIFF_VEC:
3591 + if (XEXP (x, 0) == frame_pointer_rtx)
3597 + /* Handle special case of frame register plus constant. */
3598 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3599 + && XEXP (x, 0) == frame_pointer_rtx)
3601 + if (0 <= AUTO_OFFSET(x)
3602 + && AUTO_OFFSET(x) < boundary)
3607 + Process further subtree:
3608 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
3613 + case CALL_PLACEHOLDER:
3614 + if (check_out_of_frame_access (XEXP (x, 0), boundary))
3616 + if (check_out_of_frame_access (XEXP (x, 1), boundary))
3618 + if (check_out_of_frame_access (XEXP (x, 2), boundary))
3626 + /* Scan all subexpressions. */
3627 + fmt = GET_RTX_FORMAT (code);
3628 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3631 + if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
3634 + else if (*fmt == 'E')
3635 + for (j = 0; j < XVECLEN (x, i); j++)
3636 + if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
3642 diff -uNr gcc-3.4.3.orig/gcc/protector.h gcc-3.4.3/gcc/protector.h
3643 --- gcc-3.4.3.orig/gcc/protector.h 1970-01-01 01:00:00.000000000 +0100
3644 +++ gcc-3.4.3/gcc/protector.h 2004-01-20 03:01:39.000000000 +0100
3646 +/* RTL buffer overflow protection function for GNU C compiler
3647 + Copyright (C) 2003 Free Software Foundation, Inc.
3649 +This file is part of GCC.
3651 +GCC is free software; you can redistribute it and/or modify it under
3652 +the terms of the GNU General Public License as published by the Free
3653 +Software Foundation; either version 2, or (at your option) any later
3656 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
3657 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
3658 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
3661 +You should have received a copy of the GNU General Public License
3662 +along with GCC; see the file COPYING. If not, write to the Free
3663 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
3664 +02111-1307, USA. */
3667 +/* Declare GUARD variable. */
3668 +#define GUARD_m Pmode
3669 +#define UNITS_PER_GUARD \
3670 + MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
3672 +#ifndef L_stack_smash_handler
3674 +/* Insert a guard variable before a character buffer and change the order
3675 + of pointer variables, character buffers and pointer arguments. */
3677 +extern void prepare_stack_protection (int);
3680 +/* Search a character array from the specified type tree. */
3682 +extern int search_string_def (tree);
3685 +/* Examine whether the input contains frame pointer addressing. */
3687 +extern int contains_fp (rtx);
3689 +/* Return size that is not allocated for stack frame. It will be allocated
3690 + to modify the home of pseudo registers called from global_alloc. */
3692 +extern HOST_WIDE_INT get_frame_free_size (void);
3694 +/* Allocate a local variable in the stack area before character buffers
3695 + to avoid the corruption of it. */
3697 +extern rtx assign_stack_local_for_pseudo_reg (enum machine_mode,
3698 + HOST_WIDE_INT, int);
3701 diff -uNr gcc-3.4.3.orig/gcc/reload1.c gcc-3.4.3/gcc/reload1.c
3702 --- gcc-3.4.3.orig/gcc/reload1.c 2004-05-02 14:37:17.000000000 +0200
3703 +++ gcc-3.4.3/gcc/reload1.c 2004-11-24 18:35:31.812641048 +0100
3708 +#include "protector.h"
3710 /* This file contains the reload pass of the compiler, which is
3711 run after register allocation has been done. It checks that
3713 if (cfun->stack_alignment_needed)
3714 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
3716 - starting_frame_size = get_frame_size ();
3717 + starting_frame_size = get_frame_size () - get_frame_free_size ();
3719 set_initial_elim_offsets ();
3720 set_initial_label_offsets ();
3722 setup_save_areas ();
3724 /* If we allocated another stack slot, redo elimination bookkeeping. */
3725 - if (starting_frame_size != get_frame_size ())
3726 + if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3729 if (caller_save_needed)
3732 /* If we allocated any new memory locations, make another pass
3733 since it might have changed elimination offsets. */
3734 - if (starting_frame_size != get_frame_size ())
3735 + if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3736 something_changed = 1;
3739 @@ -1066,11 +1067,11 @@
3740 if (insns_need_reload != 0 || something_needs_elimination
3741 || something_needs_operands_changed)
3743 - HOST_WIDE_INT old_frame_size = get_frame_size ();
3744 + HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
3746 reload_as_needed (global);
3748 - if (old_frame_size != get_frame_size ())
3749 + if (old_frame_size != get_frame_size () - get_frame_free_size ())
3753 @@ -1957,8 +1958,10 @@
3754 inherent space, and no less total space, then the previous slot. */
3757 - /* No known place to spill from => no slot to reuse. */
3758 - x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
3759 + /* No known place to spill from => no slot to reuse.
3760 + For the stack protection, an allocated slot should be placed in
3761 + the safe region from the stack smaching attack. */
3762 + x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
3763 inherent_size == total_size ? 0 : -1);
3764 if (BYTES_BIG_ENDIAN)
3765 /* Cancel the big-endian correction done in assign_stack_local.
3766 diff -uNr gcc-3.4.3.orig/gcc/rtl.h gcc-3.4.3/gcc/rtl.h
3767 --- gcc-3.4.3.orig/gcc/rtl.h 2004-10-13 01:35:32.000000000 +0200
3768 +++ gcc-3.4.3/gcc/rtl.h 2004-11-24 18:35:31.830638312 +0100
3769 @@ -473,6 +473,18 @@
3773 +#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) \
3775 +({ rtx const _rtx = (RTX); \
3776 + if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2 \
3777 + && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4 \
3778 + && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6 \
3779 + && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8 \
3780 + && GET_CODE(_rtx) != C9) \
3781 + rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
3785 extern void rtl_check_failed_flag (const char *, rtx, const char *,
3789 #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX)
3790 #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX)
3791 #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8) (RTX)
3792 +#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) (RTX)
3795 #define CLEAR_RTX_FLAGS(RTX) \
3797 #define LOG_LINKS(INSN) XEXP(INSN, 7)
3799 #define RTX_INTEGRATED_P(RTX) \
3800 - (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3801 + (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3802 JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST, \
3803 - NOTE)->integrated)
3804 + PLUS, NOTE)->integrated)
3805 #define RTX_UNCHANGING_P(RTX) \
3806 (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
3807 #define RTX_FRAME_RELATED_P(RTX) \
3808 @@ -1125,6 +1138,10 @@
3809 (RTL_FLAG_CHECK3("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \
3810 ASM_INPUT)->volatil)
3812 +/* 1 if RTX is an SET rtx that is not eliminated for the stack protection. */
3813 +#define SET_VOLATILE_P(RTX) \
3814 + (RTL_FLAG_CHECK1("SET_VOLATILE_P", (RTX), SET)->volatil)
3816 /* 1 if RTX is a mem that refers to an aggregate, either to the
3817 aggregate itself of to a field of the aggregate. If zero, RTX may
3818 or may not be such a reference. */
3819 diff -uNr gcc-3.4.3.orig/gcc/simplify-rtx.c gcc-3.4.3/gcc/simplify-rtx.c
3820 --- gcc-3.4.3.orig/gcc/simplify-rtx.c 2004-10-10 23:53:35.000000000 +0200
3821 +++ gcc-3.4.3/gcc/simplify-rtx.c 2004-11-24 18:35:31.858634056 +0100
3822 @@ -2287,6 +2287,7 @@
3823 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
3826 + HOST_WIDE_INT fp_offset = 0;
3828 memset (ops, 0, sizeof ops);
3830 @@ -2312,6 +2313,10 @@
3834 + if (flag_propolice_protection
3835 + && XEXP (this_op, 0) == virtual_stack_vars_rtx
3836 + && GET_CODE (XEXP (this_op, 1)) == CONST_INT)
3837 + fp_offset = INTVAL (XEXP (this_op, 1));
3841 @@ -2473,11 +2478,24 @@
3842 && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3843 && CONSTANT_P (ops[n_ops - 2].op))
3845 - rtx value = ops[n_ops - 1].op;
3846 - if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3847 - value = neg_const_int (mode, value);
3848 - ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3850 + if (!flag_propolice_protection)
3852 + rtx value = ops[n_ops - 1].op;
3853 + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3854 + value = neg_const_int (mode, value);
3855 + ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3858 + /* The stack protector keeps the addressing style of a local variable,
3859 + so it doesn't use neg_const_int function not to change
3860 + the offset value. */
3862 + HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3863 + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3865 + ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
3870 /* Count the number of CONSTs that we generated. */
3871 @@ -2495,6 +2513,59 @@
3872 || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
3875 + if (flag_propolice_protection)
3877 + /* keep the addressing style of local variables
3878 + as (plus (virtual_stack_vars_rtx) (CONST_int x)).
3879 + For the case array[r-1],
3880 + converts from (+ (+VFP c1) (+r -1)) to (SET R (+VFP c1)) (+ R (+r -1)).
3882 + This loop finds ops[i] which is the register for the frame
3883 + addressing, Then, makes the frame addressing using the register and
3884 + the constant of ops[n_ops - 1]. */
3885 + for (i = 0; i < n_ops; i++)
3886 +#ifdef FRAME_GROWS_DOWNWARD
3887 + if (ops[i].op == virtual_stack_vars_rtx)
3889 + if (ops[i].op == virtual_stack_vars_rtx
3890 + || ops[i].op == frame_pointer_rtx)
3893 + if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
3895 + HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3896 + if (value >= fp_offset)
3898 + ops[i].op = plus_constant (ops[i].op, value);
3904 + && (n_ops + 1 + n_consts > input_ops
3905 + || (n_ops + 1 + n_consts == input_ops
3906 + && n_consts <= input_consts)))
3908 + ops[n_ops - 1].op = GEN_INT (value-fp_offset);
3909 + ops[i].op = plus_constant (ops[i].op, fp_offset);
3912 + /* keep the following address pattern;
3913 + (1) buf[BUFSIZE] is the first assigned variable.
3914 + (+ (+ fp -BUFSIZE) BUFSIZE)
3915 + (2) ((+ (+ fp 1) r) -1). */
3916 + else if (fp_offset != 0)
3918 + /* keep the (+ fp 0) pattern for the following case;
3919 + (1) buf[i]: i: REG, buf: (+ fp 0) in !FRAME_GROWS_DOWNWARD
3920 + (2) argument: the address is (+ fp 0). */
3921 + else if (fp_offset == 0)
3928 /* Put a non-negated operand first, if possible. */
3930 for (i = 0; i < n_ops && ops[i].neg; i++)
3931 diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.dg/ssp-warn.c gcc-3.4.3/gcc/testsuite/gcc.dg/ssp-warn.c
3932 --- gcc-3.4.3.orig/gcc/testsuite/gcc.dg/ssp-warn.c 1970-01-01 01:00:00.000000000 +0100
3933 +++ gcc-3.4.3/gcc/testsuite/gcc.dg/ssp-warn.c 2003-11-21 09:41:19.000000000 +0100
3935 +/* { dg-do compile } */
3936 +/* { dg-options "-fstack-protector" } */
3940 + void intest1(int *a)
3948 +} /* { dg-bogus "not protecting function: it contains functions" } */
3958 +} /* { dg-bogus "not protecting variables: it has a variable length buffer" } */
3966 +} /* { dg-bogus "not protecting function: buffer is less than 8 bytes long" } */
3967 diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute1.c
3968 --- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 1970-01-01 01:00:00.000000000 +0100
3969 +++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 2004-02-16 06:15:39.000000000 +0100
3971 +/* Test location changes of character array. */
3983 + /* c1: the frame offset of buf[0]
3984 + c2: the frame offset of buf2[0]
3986 + p= &buf[0]; *p=1; /* expected rtl: (+ fp -c1) */
3989 + p= &buf[5]; *p=2; /* expected rtl: (+ fp -c1+5) */
3992 + p= &buf[-1]; *p=3; /* expected rtl: (+ (+ fp -c1) -1) */
3993 + if (*p != buf[-1])
3995 + p= &buf[49]; *p=4; /* expected rtl: (+ fp -c1+49) */
3996 + if (*p != buf[49])
3998 + p = &buf[i+5]; *p=5; /* expected rtl: (+ (+ fp -c1) (+ i 5)) */
3999 + if (*p != buf[i+5])
4001 + p = buf - 1; *p=6; /* expected rtl: (+ (+ fp -c1) -1) */
4002 + if (*p != buf[-1])
4004 + p = 1 + buf; *p=7; /* expected rtl: (+ (+ fp -c1) 1) */
4007 + p = &buf[1] - 1; *p=8; /* expected rtl: (+ (+ fp -c1+1) -1) */
4011 + /* test big offset which is greater than the max value of signed 16 bit integer. */
4012 + p = &buf2[45555]; *p=9; /* expected rtl: (+ fp -c2+45555) */
4013 + if (*p != buf2[45555])
4025 diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute2.c
4026 --- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 1970-01-01 01:00:00.000000000 +0100
4027 +++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 2003-11-22 09:44:33.000000000 +0100
4030 +test(int i, char *j, int k)
4046 + /* overflow buffer */
4047 + for (n = 0; n < 120; n++)
4050 + if (j == 0 || *j != 2)
4073 + test(i, &j[39], k);
4078 diff -uNr gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute.exp
4079 --- gcc-3.4.3.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 1970-01-01 01:00:00.000000000 +0100
4080 +++ gcc-3.4.3/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 2004-06-02 13:23:36.000000000 +0200
4082 +# Copyright (C) 2003, 2004 Free Software Foundation, Inc.
4084 +# This program is free software; you can redistribute it and/or modify
4085 +# it under the terms of the GNU General Public License as published by
4086 +# the Free Software Foundation; either version 2 of the License, or
4087 +# (at your option) any later version.
4089 +# This program is distributed in the hope that it will be useful,
4090 +# but WITHOUT ANY WARRANTY; without even the implied warranty of
4091 +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
4092 +# GNU General Public License for more details.
4094 +# You should have received a copy of the GNU General Public License
4095 +# along with this program; if not, write to the Free Software
4096 +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
4098 +if $tracelevel then {
4099 + strace $tracelevel
4102 +# Load support procs.
4103 +load_lib c-torture.exp
4109 +foreach src [lsort [glob -nocomplain $srcdir/$subdir/ssp-execute*.c]] {
4110 + # If we're only testing specific files and this isn't one of them, skip it.
4111 + if ![runtest_file_p $runtests $src] then {
4115 + c-torture-execute $src -fstack-protector
4117 diff -uNr gcc-3.4.3.orig/gcc/toplev.c gcc-3.4.3/gcc/toplev.c
4118 --- gcc-3.4.3.orig/gcc/toplev.c 2004-07-26 16:42:11.000000000 +0200
4119 +++ gcc-3.4.3/gcc/toplev.c 2004-11-24 18:35:31.000000000 +0100
4121 #include "coverage.h"
4122 #include "value-prof.h"
4123 #include "alloc-pool.h"
4124 +#include "protector.h"
4126 #if defined (DWARF2_UNWIND_INFO) || defined (DWARF2_DEBUGGING_INFO)
4127 #include "dwarf2out.h"
4129 declarations for e.g. AIX 4.x. */
4132 +#ifdef STACK_PROTECTOR
4133 +#include "protector.h"
4136 #ifndef HAVE_conditional_execution
4137 #define HAVE_conditional_execution 0
4139 @@ -979,6 +984,15 @@
4140 minimum function alignment. Zero means no alignment is forced. */
4141 int force_align_functions_log;
4143 +#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
4144 +/* Nonzero means use propolice as a stack protection method */
4145 +int flag_propolice_protection = 1;
4146 +int flag_stack_protection = 0;
4148 +int flag_propolice_protection = 0;
4149 +int flag_stack_protection = 0;
4154 const char *const string;
4155 @@ -1154,7 +1168,9 @@
4156 {"mem-report", &mem_report, 1 },
4157 { "trapv", &flag_trapv, 1 },
4158 { "wrapv", &flag_wrapv, 1 },
4159 - { "new-ra", &flag_new_regalloc, 1 }
4160 + { "new-ra", &flag_new_regalloc, 1 },
4161 + {"stack-protector", &flag_propolice_protection, 1 },
4162 + {"stack-protector-all", &flag_stack_protection, 1 }
4165 /* Here is a table, controlled by the tm.h file, listing each -m switch
4166 @@ -2686,6 +2702,9 @@
4168 insns = get_insns ();
4170 + if (flag_propolice_protection)
4171 + prepare_stack_protection (inlinable);
4173 /* Dump the rtl code if we are dumping rtl. */
4175 if (open_dump_file (DFI_rtl, decl))
4176 @@ -4483,6 +4502,12 @@
4177 /* The presence of IEEE signaling NaNs, implies all math can trap. */
4178 if (flag_signaling_nans)
4179 flag_trapping_math = 1;
4181 + /* This combination makes optimized frame addressings and causes
4182 + a internal compilation error at prepare_stack_protection.
4183 + so don't allow it. */
4184 + if (flag_stack_protection && !flag_propolice_protection)
4185 + flag_propolice_protection = TRUE;
4188 /* Initialize the compiler back end. */
4189 diff -uNr gcc-3.4.3.orig/gcc/tree.h gcc-3.4.3/gcc/tree.h
4190 --- gcc-3.4.3.orig/gcc/tree.h 2004-11-24 18:04:19.000000000 +0100
4191 +++ gcc-3.4.3/gcc/tree.h 2004-11-24 18:35:31.000000000 +0100
4192 @@ -1489,6 +1489,10 @@
4193 where it is called. */
4194 #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
4196 +/* In a VAR_DECL, nonzero if the declaration is copied for inlining.
4197 + The stack protector should keep its location in the stack. */
4198 +#define DECL_COPIED(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
4200 /* Nonzero in a FUNCTION_DECL means that this function was declared inline,
4201 such as via the `inline' keyword in C/C++. This flag controls the linkage
4202 semantics of 'inline'; whether or not the function is inlined is