1 diff -uNr gcc-3.4.0.orig/gcc/calls.c gcc-3.4.0/gcc/calls.c
2 --- gcc-3.4.0.orig/gcc/calls.c 2004-03-16 00:22:42.000000000 +0100
3 +++ gcc-3.4.0/gcc/calls.c 2004-05-13 23:59:01.343404072 +0200
6 /* For variable-sized objects, we must be called with a target
7 specified. If we were to allocate space on the stack here,
8 - we would have no way of knowing when to free it. */
9 - rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
10 + we would have no way of knowing when to free it.
12 + This is the structure of a function return object and it isn't
13 + a character array for the stack protection, so it is
14 + marked using the assignment of the KEEP argument to 5. */
15 + rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
17 mark_temp_addr_taken (d);
18 structure_value_addr = XEXP (d, 0);
19 diff -uNr gcc-3.4.0.orig/gcc/combine.c gcc-3.4.0/gcc/combine.c
20 --- gcc-3.4.0.orig/gcc/combine.c 2004-02-21 14:24:43.000000000 +0100
21 +++ gcc-3.4.0/gcc/combine.c 2004-05-13 23:59:01.392396624 +0200
22 @@ -1401,6 +1401,10 @@
23 && ! fixed_regs[REGNO (dest)]
24 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
26 + /* Never combine loads and stores protecting argument that use set insn
27 + with used flag on. */
28 + if (SET_VOLATILE_P (set))
33 @@ -3780,7 +3784,20 @@
34 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
35 rtx inner_op1 = XEXP (x, 1);
39 +#ifndef FRAME_GROWS_DOWNWARD
40 + /* For the case where the frame grows upward,
41 + the stack protector keeps the offset of the frame pointer
42 + positive integer. */
43 + if (flag_propolice_protection
45 + && other == frame_pointer_rtx
46 + && GET_CODE (inner_op0) == CONST_INT
47 + && GET_CODE (inner_op1) == CONST_INT
48 + && INTVAL (inner_op0) > 0
49 + && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
52 /* Make sure we pass the constant operand if any as the second
53 one if this is a commutative operation. */
54 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
55 @@ -4145,6 +4162,13 @@
56 they are now checked elsewhere. */
57 if (GET_CODE (XEXP (x, 0)) == PLUS
58 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
59 +#ifndef FRAME_GROWS_DOWNWARD
60 + /* The stack protector keeps the addressing style of a local variable
61 + to be able to change its stack position. */
62 + if (! (flag_propolice_protection
63 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
64 + && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
66 return gen_binary (PLUS, mode,
67 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
69 @@ -4272,8 +4296,14 @@
72 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
74 - if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
77 + The stack protector keeps the addressing style of
78 + a local variable. */
79 + if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
80 + && (! (flag_propolice_protection
81 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
82 + && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
83 return gen_binary (MINUS, mode,
84 gen_binary (MINUS, mode, XEXP (x, 0),
85 XEXP (XEXP (x, 1), 0)),
86 diff -uNr gcc-3.4.0.orig/gcc/common.opt gcc-3.4.0/gcc/common.opt
87 --- gcc-3.4.0.orig/gcc/common.opt 2004-02-18 01:09:04.000000000 +0100
88 +++ gcc-3.4.0/gcc/common.opt 2004-05-13 23:59:01.397395864 +0200
91 Warn when a variable is unused
95 +Warn when not issuing stack smashing protection for some reason
99 -aux-info <file> Emit declaration information into <file>
102 Put zero initialized data in the bss section
106 +Enables stack protection
108 +fstack-protector-all
110 +Enables stack protection of every function
113 Common JoinedOrMissing
114 Generate debug information in default format
115 diff -uNr gcc-3.4.0.orig/gcc/config/arm/arm.md gcc-3.4.0/gcc/config/arm/arm.md
116 --- gcc-3.4.0.orig/gcc/config/arm/arm.md 2004-01-13 14:24:37.000000000 +0100
117 +++ gcc-3.4.0/gcc/config/arm/arm.md 2004-05-13 23:59:01.879322600 +0200
118 @@ -3840,7 +3840,13 @@
119 (match_operand:DI 1 "general_operand" ""))]
125 + /* Everything except mem = const or mem = mem can be done easily */
126 + if (GET_CODE (operands[0]) == MEM)
127 + operands[1] = force_reg (DImode, operands[1]);
129 + else /* TARGET_THUMB.... */
133 diff -uNr gcc-3.4.0.orig/gcc/config/t-linux gcc-3.4.0/gcc/config/t-linux
134 --- gcc-3.4.0.orig/gcc/config/t-linux 2003-09-23 20:55:57.000000000 +0200
135 +++ gcc-3.4.0/gcc/config/t-linux 2004-05-13 23:59:01.839328680 +0200
137 # Compile crtbeginS.o and crtendS.o with pic.
138 CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
139 # Compile libgcc2.a with pic.
140 -TARGET_LIBGCC2_CFLAGS = -fPIC
141 +TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
143 # Override t-slibgcc-elf-ver to export some libgcc symbols with
144 # the symbol versions that glibc used.
145 diff -uNr gcc-3.4.0.orig/gcc/cse.c gcc-3.4.0/gcc/cse.c
146 --- gcc-3.4.0.orig/gcc/cse.c 2004-03-22 00:01:49.000000000 +0100
147 +++ gcc-3.4.0/gcc/cse.c 2004-05-13 23:59:01.437389784 +0200
148 @@ -4203,7 +4203,14 @@
153 +#ifndef FRAME_GROWS_DOWNWARD
154 + if (flag_propolice_protection
155 + && GET_CODE (y) == PLUS
156 + && XEXP (y, 0) == frame_pointer_rtx
157 + && INTVAL (inner_const) > 0
158 + && INTVAL (new_const) <= 0)
161 /* If we are associating shift operations, don't let this
162 produce a shift of the size of the object or larger.
163 This could occur when we follow a sign-extend by a right
164 @@ -4735,6 +4742,14 @@
165 if (SET_DEST (x) == pc_rtx
166 && GET_CODE (SET_SRC (x)) == LABEL_REF)
168 + /* cut the reg propagation of stack-protected argument. */
169 + else if (SET_VOLATILE_P (x)) {
170 + rtx x1 = SET_DEST (x);
171 + if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
172 + x1 = SUBREG_REG (x1);
173 + if (! REGNO_QTY_VALID_P(REGNO (x1)))
174 + make_new_qty (REGNO (x1), GET_MODE (x1));
177 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
178 The hard function value register is used only once, to copy to
179 diff -uNr gcc-3.4.0.orig/gcc/explow.c gcc-3.4.0/gcc/explow.c
180 --- gcc-3.4.0.orig/gcc/explow.c 2004-04-03 01:05:26.000000000 +0200
181 +++ gcc-3.4.0/gcc/explow.c 2004-05-13 23:59:01.455387048 +0200
184 int all_constant = 0;
188 + && ! (flag_propolice_protection && x == virtual_stack_vars_rtx))
198 + || (flag_propolice_protection && x == virtual_stack_vars_rtx))
199 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
201 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
203 if (memory_address_p (mode, oldx))
206 + /* The stack protector keeps the addressing style of a local variable.
207 + LEGITIMIZE_ADDRESS changes the addressing to the machine-dependent
208 + style, so the protector split the frame address to a register using
210 + if (flag_propolice_protection)
212 +#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \
213 + && XEXP (X, 0) == virtual_stack_vars_rtx \
214 + && GET_CODE (XEXP (X, 1)) == CONST_INT)
216 + if (FRAMEADDR_P (x))
218 + for (y = x; y != 0 && GET_CODE (y) == PLUS; y = XEXP (y, 0))
220 + if (FRAMEADDR_P (XEXP (y, 0)))
221 + XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
222 + if (FRAMEADDR_P (XEXP (y, 1)))
223 + XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
226 /* Perform machine-dependent transformations on X
227 in certain cases. This is not necessary since the code
228 below can handle all possible cases, but machine-dependent
229 diff -uNr gcc-3.4.0.orig/gcc/expr.c gcc-3.4.0/gcc/expr.c
230 --- gcc-3.4.0.orig/gcc/expr.c 2004-04-03 01:05:28.000000000 +0200
231 +++ gcc-3.4.0/gcc/expr.c 2004-05-13 23:59:01.495380968 +0200
236 +#include "protector.h"
238 /* Decide whether a function's arguments should be processed
239 from first to last or from last to first.
240 @@ -1060,7 +1061,11 @@
242 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
243 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
247 + When the stack protector is used at the reverse move, it starts the move
248 + instruction from the address within the region of a variable.
249 + So it eliminates the first address decrement instruction. */
252 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
253 @@ -1123,6 +1128,8 @@
255 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
257 + if (flag_propolice_protection)
258 + len = len - GET_MODE_SIZE (mode);
259 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
260 data.autinc_from = 1;
261 data.explicit_inc_from = -1;
262 @@ -1137,6 +1144,8 @@
263 data.from_addr = copy_addr_to_reg (from_addr);
264 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
266 + if (flag_propolice_protection)
267 + len = len - GET_MODE_SIZE (mode);
268 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
270 data.explicit_inc_to = -1;
271 @@ -1280,11 +1289,15 @@
272 from1 = adjust_address (data->from, mode, data->offset);
274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
275 - emit_insn (gen_add2_insn (data->to_addr,
276 - GEN_INT (-(HOST_WIDE_INT)size)));
277 + /* The stack protector skips the first address decrement instruction
278 + at the reverse move. */
279 + if (!flag_propolice_protection || data->explicit_inc_to < -1)
280 + emit_insn (gen_add2_insn (data->to_addr,
281 + GEN_INT (-(HOST_WIDE_INT)size)));
282 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
283 - emit_insn (gen_add2_insn (data->from_addr,
284 - GEN_INT (-(HOST_WIDE_INT)size)));
285 + if (!flag_propolice_protection || data->explicit_inc_from < -1)
286 + emit_insn (gen_add2_insn (data->from_addr,
287 + GEN_INT (-(HOST_WIDE_INT)size)));
290 emit_insn ((*genfun) (to1, from1));
291 @@ -2475,7 +2488,12 @@
293 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
295 - data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
296 + int len = data->len;
297 + /* The stack protector starts the store instruction from
298 + the address within the region of a variable. */
299 + if (flag_propolice_protection)
300 + len -= GET_MODE_SIZE (mode);
301 + data->to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
303 data->explicit_inc_to = -1;
305 @@ -2544,8 +2562,11 @@
306 to1 = adjust_address (data->to, mode, data->offset);
308 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
309 - emit_insn (gen_add2_insn (data->to_addr,
310 - GEN_INT (-(HOST_WIDE_INT) size)));
311 + /* The stack protector skips the first address decrement instruction
312 + at the reverse store. */
313 + if (!flag_propolice_protection || data->explicit_inc_to < -1)
314 + emit_insn (gen_add2_insn (data->to_addr,
315 + GEN_INT (-(HOST_WIDE_INT) size)));
317 cst = (*data->constfun) (data->constfundata, data->offset, mode);
318 emit_insn ((*genfun) (to1, cst));
319 @@ -5700,7 +5721,9 @@
320 && GET_CODE (XEXP (value, 0)) == PLUS
321 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
322 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
323 - && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
324 + && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
325 + && (!flag_propolice_protection
326 + || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
328 rtx temp = expand_simple_binop (GET_MODE (value), code,
329 XEXP (XEXP (value, 0), 0), op2,
330 diff -uNr gcc-3.4.0.orig/gcc/flags.h gcc-3.4.0/gcc/flags.h
331 --- gcc-3.4.0.orig/gcc/flags.h 2004-02-18 01:09:04.000000000 +0100
332 +++ gcc-3.4.0/gcc/flags.h 2004-05-13 23:59:01.507379144 +0200
335 extern bool warn_strict_aliasing;
337 +/* Warn when not issuing stack smashing protection for some reason. */
339 +extern bool warn_stack_protector;
341 /* Nonzero if generating code to do profiling. */
343 extern int profile_flag;
345 #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
346 (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && flag_rounding_math)
348 +/* Nonzero means use propolice as a stack protection method. */
350 +extern int flag_propolice_protection;
352 +/* Nonzero means use a stack protection method for every function. */
354 +extern int flag_stack_protection;
356 #endif /* ! GCC_FLAGS_H */
357 diff -uNr gcc-3.4.0.orig/gcc/function.c gcc-3.4.0/gcc/function.c
358 --- gcc-3.4.0.orig/gcc/function.c 2004-03-16 00:22:47.000000000 +0100
359 +++ gcc-3.4.0/gcc/function.c 2004-05-13 23:59:01.545373368 +0200
361 #include "integrate.h"
362 #include "langhooks.h"
364 +#include "protector.h"
366 #ifndef TRAMPOLINE_ALIGNMENT
367 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
369 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
371 static GTY(()) varray_type sibcall_epilogue;
373 +/* Current boundary mark for character arrays. */
374 +static int temp_boundary_mark = 0;
377 /* In order to evaluate some expressions, such as function calls returning
378 structures in memory, we need to temporarily allocate stack locations.
380 /* The size of the slot, including extra space for alignment. This
381 info is for combine_temp_slots. */
382 HOST_WIDE_INT full_size;
383 + /* Boundary mark of a character array and the others. This info is for propolice. */
387 /* This structure is used to record MEMs or pseudos used to replace VAR, any
389 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
390 if we are to allocate something at an inner level to be treated as
391 a variable in the block (e.g., a SAVE_EXPR).
392 + KEEP is 5 if we allocate a place to return structure.
394 TYPE is the type that will be used for the stack slot. */
398 struct temp_slot *p, *best_p = 0;
400 + int char_array = (flag_propolice_protection
401 + && keep == 1 && search_string_def (type));
403 /* If SIZE is -1 it means that somebody tried to allocate a temporary
404 of a variable size. */
407 && objects_must_conflict_p (p->type, type)
408 && (best_p == 0 || best_p->size > p->size
409 - || (best_p->size == p->size && best_p->align > p->align)))
410 + || (best_p->size == p->size && best_p->align > p->align))
411 + && (! char_array || p->boundary_mark != 0))
413 if (p->align == align && p->size == size)
418 p->type = best_p->type;
419 + p->boundary_mark = best_p->boundary_mark;
420 p->next = temp_slots;
424 p->full_size = frame_offset - frame_offset_old;
427 + p->boundary_mark = char_array ? ++temp_boundary_mark : 0;
428 p->next = temp_slots;
431 @@ -932,14 +945,16 @@
433 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
435 - if (p->base_offset + p->full_size == q->base_offset)
436 + if (p->base_offset + p->full_size == q->base_offset &&
437 + p->boundary_mark == q->boundary_mark)
439 /* Q comes after P; combine Q into P. */
441 p->full_size += q->full_size;
444 - else if (q->base_offset + q->full_size == p->base_offset)
445 + else if (q->base_offset + q->full_size == p->base_offset &&
446 + p->boundary_mark == q->boundary_mark)
448 /* P comes after Q; combine P into Q. */
450 @@ -1449,7 +1464,9 @@
454 - new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
456 + assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func)
457 + : assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
460 PUT_MODE (reg, decl_mode);
461 @@ -3930,7 +3947,8 @@
462 constant with that register. */
463 temp = gen_reg_rtx (Pmode);
465 - if (validate_change (object, &XEXP (x, 1), temp, 0))
466 + if (validate_change (object, &XEXP (x, 1), temp, 0)
467 + && !flag_propolice_protection)
468 emit_insn_before (gen_move_insn (temp, new_offset), object);
471 diff -uNr gcc-3.4.0.orig/gcc/gcse.c gcc-3.4.0/gcc/gcse.c
472 --- gcc-3.4.0.orig/gcc/gcse.c 2004-03-25 17:44:42.000000000 +0100
473 +++ gcc-3.4.0/gcc/gcse.c 2004-05-13 23:59:01.581367896 +0200
474 @@ -4178,7 +4178,7 @@
475 /* Find an assignment that sets reg_used and is available
476 at the start of the block. */
477 set = find_avail_set (regno, insn);
479 + if (! set || SET_VOLATILE_P (set->expr))
483 diff -uNr gcc-3.4.0.orig/gcc/integrate.c gcc-3.4.0/gcc/integrate.c
484 --- gcc-3.4.0.orig/gcc/integrate.c 2004-01-24 00:36:00.000000000 +0100
485 +++ gcc-3.4.0/gcc/integrate.c 2004-05-13 23:59:01.601364856 +0200
487 /* These args would always appear unused, if not for this. */
488 TREE_USED (copy) = 1;
490 + /* The inlined variable is marked as INLINE not to sweep by propolice */
491 + if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
492 + DECL_COPIED (copy) = 1;
494 /* Set the context for the new declaration. */
495 if (!DECL_CONTEXT (decl))
496 /* Globals stay global. */
497 @@ -1970,6 +1974,10 @@
501 +#ifdef ARGS_GROWS_DOWNWARD
502 + if (flag_propolice_protection && GET_CODE (seq) == SET)
503 + RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
505 emit_insn_after (seq, map->insns_at_start);
508 diff -uNr gcc-3.4.0.orig/gcc/libgcc2.c gcc-3.4.0/gcc/libgcc2.c
509 --- gcc-3.4.0.orig/gcc/libgcc2.c 2003-11-14 03:23:13.000000000 +0100
510 +++ gcc-3.4.0/gcc/libgcc2.c 2004-05-13 23:59:01.612363184 +0200
511 @@ -1667,3 +1667,121 @@
512 #endif /* no INIT_SECTION_ASM_OP and not CTOR_LISTS_DEFINED_EXTERNALLY */
516 +#ifdef L_stack_smash_handler
522 +#ifdef _POSIX_SOURCE
526 +#if defined(HAVE_SYSLOG)
527 +#include <sys/types.h>
528 +#include <sys/socket.h>
531 +#include <sys/syslog.h>
533 +#define _PATH_LOG "/dev/log"
537 +long __guard[8] = {0, 0, 0, 0, 0, 0, 0, 0};
538 +static void __guard_setup (void) __attribute__ ((constructor));
541 +__guard_setup (void)
544 + if (__guard[0] != 0)
546 + fd = open ("/dev/urandom", 0);
548 + ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
550 + if (size == sizeof(__guard))
553 + /* If a random generator can't be used, the protector switches the guard
554 + to the "terminator canary". */
555 + ((char*)__guard)[0] = 0;
556 + ((char*)__guard)[1] = 0;
557 + ((char*)__guard)[2] = '\n';
558 + ((char*)__guard)[3] = 255;
562 +__stack_smash_handler (char func[], ATTRIBUTE_UNUSED int damaged)
564 +#if defined (__GNU_LIBRARY__)
565 + extern char * __progname;
567 + const char message[] = ": stack smashing attack in function ";
568 + int bufsz = 256, len;
570 +#if defined(HAVE_SYSLOG)
572 + struct sockaddr_un sys_log_addr; /* AF_UNIX address of local logger. */
574 +#ifdef _POSIX_SOURCE
577 + sigfillset (&mask);
578 + /* Block all signal handlers except SIGABRT. */
579 + sigdelset (&mask, SIGABRT);
580 + sigprocmask (SIG_BLOCK, &mask, NULL);
584 + /* send LOG_CRIT. */
585 + strcpy (buf, "<2>"); len=3;
586 +#if defined (__GNU_LIBRARY__)
587 + strncat (buf, __progname, bufsz - len - 1);
588 + len = strlen (buf);
592 + strncat (buf, message, bufsz - len - 1);
593 + len = strlen (buf);
597 + strncat (buf, func, bufsz - len - 1);
598 + len = strlen (buf);
601 + /* Print error message. */
602 + write (STDERR_FILENO, buf + 3, len - 3);
603 +#if defined(HAVE_SYSLOG)
604 + if ((log_file = socket (AF_UNIX, SOCK_DGRAM, 0)) != -1)
607 + /* Send "found" message to the "/dev/log" path. */
608 + sys_log_addr.sun_family = AF_UNIX;
609 + (void)strncpy (sys_log_addr.sun_path, _PATH_LOG,
610 + sizeof (sys_log_addr.sun_path) - 1);
611 + sys_log_addr.sun_path[sizeof (sys_log_addr.sun_path) - 1] = '\0';
612 + sendto(log_file, buf, len, 0, (struct sockaddr *)&sys_log_addr,
613 + sizeof (sys_log_addr));
617 +#ifdef _POSIX_SOURCE
619 + /* Make sure the default handler is associated with SIGABRT. */
620 + struct sigaction sa;
622 + memset (&sa, 0, sizeof(struct sigaction));
623 + sigfillset (&sa.sa_mask); /* Block all signals. */
625 + sa.sa_handler = SIG_DFL;
626 + sigaction (SIGABRT, &sa, NULL);
627 + (void)kill (getpid(), SIGABRT);
632 +#endif /* L_stack_smash_handler */
633 diff -uNr gcc-3.4.0.orig/gcc/libgcc-std.ver gcc-3.4.0/gcc/libgcc-std.ver
634 --- gcc-3.4.0.orig/gcc/libgcc-std.ver 2003-09-04 12:47:45.000000000 +0200
635 +++ gcc-3.4.0/gcc/libgcc-std.ver 2004-05-13 23:59:01.606364096 +0200
637 _Unwind_SjLj_RaiseException
638 _Unwind_SjLj_ForcedUnwind
641 + # stack smash handler symbols
643 + __stack_smash_handler
646 %inherit GCC_3.3 GCC_3.0
647 diff -uNr gcc-3.4.0.orig/gcc/loop.c gcc-3.4.0/gcc/loop.c
648 --- gcc-3.4.0.orig/gcc/loop.c 2004-02-14 15:46:03.000000000 +0100
649 +++ gcc-3.4.0/gcc/loop.c 2004-05-13 23:59:01.657356344 +0200
650 @@ -6513,6 +6513,14 @@
651 if (GET_CODE (*mult_val) == USE)
652 *mult_val = XEXP (*mult_val, 0);
654 +#ifndef FRAME_GROWS_DOWNWARD
655 + if (flag_propolice_protection
656 + && GET_CODE (*add_val) == PLUS
657 + && (XEXP (*add_val, 0) == frame_pointer_rtx
658 + || XEXP (*add_val, 1) == frame_pointer_rtx))
663 *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
665 diff -uNr gcc-3.4.0.orig/gcc/Makefile.in gcc-3.4.0/gcc/Makefile.in
666 --- gcc-3.4.0.orig/gcc/Makefile.in 2004-05-13 23:50:28.000000000 +0200
667 +++ gcc-3.4.0/gcc/Makefile.in 2004-05-13 23:59:01.310409088 +0200
669 sibcall.o simplify-rtx.o sreal.o stmt.o stor-layout.o stringpool.o \
670 targhooks.o timevar.o toplev.o tracer.o tree.o tree-dump.o unroll.o \
671 varasm.o varray.o version.o vmsdbgout.o xcoffout.o alloc-pool.o \
672 - et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o
673 + et-forest.o cfghooks.o bt-load.o pretty-print.o $(GGC) web.o protector.o
675 OBJS-md = $(out_object_file)
676 OBJS-archive = $(EXTRA_OBJS) $(host_hook_obj) hashtable.o tree-inline.o \
677 @@ -1834,6 +1834,10 @@
678 params.o : params.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(PARAMS_H) toplev.h
679 hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(HOOKS_H)
680 pretty-print.o: $(CONFIG_H) $(SYSTEM_H) pretty-print.c $(PRETTY_PRINT_H)
681 +protector.o : protector.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) $(TREE_H) \
682 + flags.h function.h $(EXPR_H) $(OPTABS_H) $(REGS_H) toplev.h hard-reg-set.h \
683 + insn-config.h insn-flags.h $(RECOG_H) output.h toplev.h except.h reload.h \
684 + $(TM_P_H) conditions.h $(INSN_ATTR_H) real.h protector.h
686 $(out_object_file): $(out_file) $(CONFIG_H) coretypes.h $(TM_H) $(TREE_H) $(GGC_H) \
687 $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
688 diff -uNr gcc-3.4.0.orig/gcc/mklibgcc.in gcc-3.4.0/gcc/mklibgcc.in
689 --- gcc-3.4.0.orig/gcc/mklibgcc.in 2003-11-21 05:53:09.000000000 +0100
690 +++ gcc-3.4.0/gcc/mklibgcc.in 2004-05-13 23:59:01.673353912 +0200
692 _trampoline __main _absvsi2 _absvdi2 _addvsi3 _addvdi3
693 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
694 _ffssi2 _ffsdi2 _clz _clzsi2 _clzdi2 _ctzsi2 _ctzdi2 _popcount_tab
695 - _popcountsi2 _popcountdi2 _paritysi2 _paritydi2'
696 + _popcountsi2 _popcountdi2 _paritysi2 _paritydi2 _stack_smash_handler'
698 # Disable SHLIB_LINK if shared libgcc not enabled.
699 if [ "@enable_shared@" = "no" ]; then
700 diff -uNr gcc-3.4.0.orig/gcc/optabs.c gcc-3.4.0/gcc/optabs.c
701 --- gcc-3.4.0.orig/gcc/optabs.c 2004-03-03 01:45:01.000000000 +0100
702 +++ gcc-3.4.0/gcc/optabs.c 2004-05-13 23:59:01.711348136 +0200
705 target = protect_from_queue (target, 1);
707 + if (flag_propolice_protection
708 + && binoptab->code == PLUS
709 + && op0 == virtual_stack_vars_rtx
710 + && GET_CODE(op1) == CONST_INT)
712 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
716 + temp = gen_reg_rtx (mode);
718 + if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
719 + || GET_CODE (temp) != REG)
720 + temp = gen_reg_rtx (mode);
722 + emit_insn (gen_rtx_SET (VOIDmode, temp,
723 + gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
729 /* Load duplicate non-volatile operands once. */
730 diff -uNr gcc-3.4.0.orig/gcc/opts.c gcc-3.4.0/gcc/opts.c
731 --- gcc-3.4.0.orig/gcc/opts.c 2004-02-18 01:09:04.000000000 +0100
732 +++ gcc-3.4.0/gcc/opts.c 2004-05-13 23:59:01.724346160 +0200
734 bool warn_unused_variable;
735 bool warn_unused_value;
737 +/* Warn when not issuing stack smashing protection for some reason */
738 +bool warn_stack_protector;
740 /* Hack for cooperation between set_Wunused and set_Wextra. */
741 static bool maybe_warn_unused_parameter;
744 warn_unused_variable = value;
747 + case OPT_Wstack_protector:
748 + warn_stack_protector = value;
753 aux_info_file_name = arg;
754 @@ -1361,6 +1368,14 @@
755 stack_limit_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (arg));
758 + case OPT_fstack_protector:
759 + flag_propolice_protection = value;
762 + case OPT_fstack_protector_all:
763 + flag_stack_protection = value;
766 case OPT_fstrength_reduce:
767 flag_strength_reduce = value;
769 diff -uNr gcc-3.4.0.orig/gcc/protector.c gcc-3.4.0/gcc/protector.c
770 --- gcc-3.4.0.orig/gcc/protector.c 1970-01-01 01:00:00.000000000 +0100
771 +++ gcc-3.4.0/gcc/protector.c 2004-03-22 08:34:40.000000000 +0100
773 +/* RTL buffer overflow protection function for GNU C compiler
774 + Copyright (C) 2003 Free Software Foundation, Inc.
776 +This file is part of GCC.
778 +GCC is free software; you can redistribute it and/or modify it under
779 +the terms of the GNU General Public License as published by the Free
780 +Software Foundation; either version 2, or (at your option) any later
783 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
784 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
785 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
788 +You should have received a copy of the GNU General Public License
789 +along with GCC; see the file COPYING. If not, write to the Free
790 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
793 +/* This file contains several memory arrangement functions to protect
794 + the return address and the frame pointer of the stack
795 + from a stack-smashing attack. It also
796 + provides the function that protects pointer variables. */
800 +#include "coretypes.h"
802 +#include "machmode.h"
808 +#include "insn-config.h"
809 +#include "insn-flags.h"
813 +#include "hard-reg-set.h"
815 +#include "function.h"
818 +#include "conditions.h"
819 +#include "insn-attr.h"
822 +#include "protector.h"
825 +/* Round a value to the lowest integer less than it that is a multiple of
826 + the required alignment. Avoid using division in case the value is
827 + negative. Assume the alignment is a power of two. */
828 +#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
830 +/* Similar, but round to the next highest integer that meets the
832 +#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
835 +/* Nonzero if function being compiled can define string buffers that may be
836 + damaged by the stack-smash attack. */
837 +static int current_function_defines_vulnerable_string;
838 +static int current_function_defines_short_string;
839 +static int current_function_has_variable_string;
840 +static int current_function_defines_vsized_array;
841 +static int current_function_is_inlinable;
842 +static int is_array;
844 +static rtx guard_area, _guard;
845 +static rtx function_first_insn, prologue_insert_point;
847 +/* Offset to end of sweeped area for gathering character arrays. */
848 +static HOST_WIDE_INT sweep_frame_offset;
850 +/* Offset to end of allocated area for instantiating pseudo registers. */
851 +static HOST_WIDE_INT push_allocated_offset = 0;
853 +/* Offset to end of assigned area for instantiating pseudo registers. */
854 +static HOST_WIDE_INT push_frame_offset = 0;
856 +/* Set to 1 after cse_not_expected becomes nonzero. it is used to identify
857 + which stage assign_stack_local_for_pseudo_reg is called from. */
858 +static int saved_cse_not_expected = 0;
860 +static int search_string_from_argsandvars (int);
861 +static int search_string_from_local_vars (tree);
862 +static int search_pointer_def (tree);
863 +static int search_func_pointer (tree);
864 +static int check_used_flag (rtx);
865 +static void reset_used_flags_for_insns (rtx);
866 +static void reset_used_flags_for_decls (tree);
867 +static void reset_used_flags_of_plus (rtx);
868 +static void rtl_prologue (rtx);
869 +static void rtl_epilogue (rtx);
870 +static void arrange_var_order (tree);
871 +static void copy_args_for_protection (void);
872 +static void sweep_string_variable (rtx, HOST_WIDE_INT);
873 +static void sweep_string_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
874 +static void sweep_string_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
875 +static void sweep_string_use_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
876 +static void sweep_string_in_operand (rtx, rtx *, HOST_WIDE_INT, HOST_WIDE_INT);
877 +static void move_arg_location (rtx, rtx, rtx, HOST_WIDE_INT);
878 +static void change_arg_use_of_insns (rtx, rtx, rtx *, HOST_WIDE_INT);
879 +static void change_arg_use_in_operand (rtx, rtx, rtx, rtx *, HOST_WIDE_INT);
880 +static void validate_insns_of_varrefs (rtx);
881 +static void validate_operand_of_varrefs (rtx, rtx *);
883 +/* Specify which size of buffers should be protected from a stack smashing
884 + attack. Because small buffers are not used in situations which may
885 + overflow buffer, the default size sets to the size of 64 bit register. */
886 +#ifndef SUSPICIOUS_BUF_SIZE
887 +#define SUSPICIOUS_BUF_SIZE 8
890 +#define AUTO_BASEPTR(X) \
891 + (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
892 +#define AUTO_OFFSET(X) \
893 + (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
894 +#undef PARM_PASSED_IN_MEMORY
895 +#define PARM_PASSED_IN_MEMORY(PARM) \
896 + (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
897 +#define TREE_VISITED(NODE) ((NODE)->common.unused_0)
899 +/* Argument values for calling search_string_from_argsandvars. */
900 +#define CALL_FROM_PREPARE_STACK_PROTECTION 0
901 +#define CALL_FROM_PUSH_FRAME 1
904 +/* Prepare several stack protection instruments for the current function
905 + if the function has an array as a local variable, which may be vulnerable
906 + from a stack smashing attack, and it is not inlinable.
908 + The overall steps are as follows;
909 + (1)search an array,
910 + (2)insert guard_area on the stack,
911 + (3)duplicate pointer arguments into local variables, and
912 + (4)arrange the location of local variables. */
914 +prepare_stack_protection (int inlinable)
916 + tree blocks = DECL_INITIAL (current_function_decl);
917 + current_function_is_inlinable = inlinable && !flag_no_inline;
918 + push_frame_offset = push_allocated_offset = 0;
919 + saved_cse_not_expected = 0;
921 + /* Skip the protection if the function has no block
922 + or it is an inline function. */
923 + if (current_function_is_inlinable)
924 + validate_insns_of_varrefs (get_insns ());
925 + if (! blocks || current_function_is_inlinable)
928 + current_function_defines_vulnerable_string
929 + = search_string_from_argsandvars (CALL_FROM_PREPARE_STACK_PROTECTION);
931 + if (current_function_defines_vulnerable_string
932 + || flag_stack_protection)
934 + function_first_insn = get_insns ();
936 + if (current_function_contains_functions)
938 + if (warn_stack_protector)
939 + warning ("not protecting function: it contains functions");
943 + /* Initialize recognition, indicating that volatile is OK. */
946 + sweep_frame_offset = 0;
948 +#ifdef STACK_GROWS_DOWNWARD
949 + /* frame_offset: offset to end of allocated area of stack frame.
950 + It is defined in the function.c. */
952 + /* the location must be before buffers. */
953 + guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
954 + PUT_MODE (guard_area, GUARD_m);
955 + MEM_VOLATILE_P (guard_area) = 1;
957 +#ifndef FRAME_GROWS_DOWNWARD
958 + sweep_frame_offset = frame_offset;
961 + /* For making room for guard value, scan all insns and fix the offset
962 + address of the variable that is based on frame pointer.
963 + Scan all declarations of variables and fix the offset address
964 + of the variable that is based on the frame pointer. */
965 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
968 + /* the location of guard area moves to the beginning of stack frame. */
969 + if (AUTO_OFFSET(XEXP (guard_area, 0)))
970 + XEXP (XEXP (guard_area, 0), 1)
971 + = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
974 + /* Insert prologue rtl instructions. */
975 + rtl_prologue (function_first_insn);
977 + if (! current_function_has_variable_string)
979 + /* Generate argument saving instruction. */
980 + copy_args_for_protection ();
982 +#ifndef FRAME_GROWS_DOWNWARD
983 + /* If frame grows upward, character arrays for protecting args
984 + may copy to the top of the guard variable.
985 + So sweep the guard variable again. */
986 + sweep_frame_offset = CEIL_ROUND (frame_offset,
987 + BIGGEST_ALIGNMENT / BITS_PER_UNIT);
988 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
991 + /* Variable can't be protected from the overflow of variable length
992 + buffer. But variable reordering is still effective against
993 + the overflow of fixed size character arrays. */
994 + else if (warn_stack_protector)
995 + warning ("not protecting variables: it has a variable length buffer");
997 +#ifndef FRAME_GROWS_DOWNWARD
998 + if (STARTING_FRAME_OFFSET == 0)
1000 + /* This part may be only for alpha. */
1001 + push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1002 + assign_stack_local (BLKmode, push_allocated_offset, -1);
1003 + sweep_frame_offset = frame_offset;
1004 + sweep_string_variable (const0_rtx, -push_allocated_offset);
1005 + sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
1009 + /* Arrange the order of local variables. */
1010 + arrange_var_order (blocks);
1012 +#ifdef STACK_GROWS_DOWNWARD
1013 + /* Insert epilogue rtl instructions. */
1014 + rtl_epilogue (get_last_insn ());
1016 + init_recog_no_volatile ();
1018 + else if (current_function_defines_short_string
1019 + && warn_stack_protector)
1020 + warning ("not protecting function: buffer is less than %d bytes long",
1021 + SUSPICIOUS_BUF_SIZE);
1025 + Search string from arguments and local variables.
1026 + caller: CALL_FROM_PREPARE_STACK_PROTECTION (0)
1027 + CALL_FROM_PUSH_FRAME (1)
1030 +search_string_from_argsandvars (int caller)
1032 + tree blocks, parms;
1035 + /* Saves a latest search result as a cached infomation. */
1036 + static tree __latest_search_decl = 0;
1037 + static int __latest_search_result = FALSE;
1039 + if (__latest_search_decl == current_function_decl)
1040 + return __latest_search_result;
1042 + if (caller == CALL_FROM_PUSH_FRAME)
1045 + __latest_search_decl = current_function_decl;
1046 + __latest_search_result = TRUE;
1048 + current_function_defines_short_string = FALSE;
1049 + current_function_has_variable_string = FALSE;
1050 + current_function_defines_vsized_array = FALSE;
1052 + /* Search a string variable from local variables. */
1053 + blocks = DECL_INITIAL (current_function_decl);
1054 + string_p = search_string_from_local_vars (blocks);
1056 + if (! current_function_defines_vsized_array && current_function_calls_alloca)
1058 + current_function_has_variable_string = TRUE;
1065 +#ifdef STACK_GROWS_DOWNWARD
1066 + /* Search a string variable from arguments. */
1067 + parms = DECL_ARGUMENTS (current_function_decl);
1069 + for (; parms; parms = TREE_CHAIN (parms))
1070 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1072 + if (PARM_PASSED_IN_MEMORY (parms))
1074 + string_p = search_string_def (TREE_TYPE(parms));
1081 + __latest_search_result = FALSE;
1086 +/* Search string from local variables in the specified scope. */
1088 +search_string_from_local_vars (tree block)
1091 + int found = FALSE;
1093 + while (block && TREE_CODE(block)==BLOCK)
1095 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1097 + /* Skip the declaration that refers an external variable. */
1098 + /* name: types.decl.name.identifier.id */
1099 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1100 + && TREE_CODE (types) == VAR_DECL
1101 + && ! DECL_ARTIFICIAL (types)
1102 + && DECL_RTL_SET_P (types)
1103 + && GET_CODE (DECL_RTL (types)) == MEM
1105 + && search_string_def (TREE_TYPE (types)))
1107 + rtx home = DECL_RTL (types);
1109 + if (GET_CODE (home) == MEM
1110 + && (GET_CODE (XEXP (home, 0)) == MEM
1111 + || (GET_CODE (XEXP (home, 0)) == REG
1112 + && XEXP (home, 0) != virtual_stack_vars_rtx
1113 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1114 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1115 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1116 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1119 + /* If the value is indirect by memory or by a register
1120 + that isn't the frame pointer then it means the object is
1121 + variable-sized and address through
1122 + that register or stack slot.
1123 + The protection has no way to hide pointer variables
1124 + behind the array, so all we can do is staying
1125 + the order of variables and arguments. */
1127 + current_function_has_variable_string = TRUE;
1130 + /* Found character array. */
1135 + if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
1140 + block = BLOCK_CHAIN (block);
1147 +/* Search a character array from the specified type tree. */
1149 +search_string_def (tree type)
1156 + switch (TREE_CODE (type))
1159 + /* Check if the array is a variable-sized array. */
1160 + if (TYPE_DOMAIN (type) == 0
1161 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1162 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1163 + current_function_defines_vsized_array = TRUE;
1165 + /* Check if the array is related to char array. */
1166 + if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1167 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1168 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1170 + /* Check if the string is a variable string. */
1171 + if (TYPE_DOMAIN (type) == 0
1172 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1173 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1176 + /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE. */
1177 + if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1178 + && (TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1
1179 + >= SUSPICIOUS_BUF_SIZE))
1182 + current_function_defines_short_string = TRUE;
1185 + /* to protect every functions, sweep any arrays to the frame top. */
1188 + return search_string_def(TREE_TYPE(type));
1191 + case QUAL_UNION_TYPE:
1193 + /* Check if each field has character arrays. */
1194 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1196 + /* Omit here local type decls until we know how to support them. */
1197 + if ((TREE_CODE (tem) == TYPE_DECL)
1198 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1201 + if (search_string_def(TREE_TYPE(tem)))
1206 + case POINTER_TYPE:
1207 + case REFERENCE_TYPE:
1217 +/* Examine whether the input contains frame pointer addressing. */
1219 +contains_fp (rtx op)
1221 + enum rtx_code code;
1230 + code = GET_CODE (x);
1235 + case CONST_DOUBLE:
1244 + /* This case is not generated at the stack protection.
1245 + see plus_constant_wide and simplify_plus_minus function. */
1246 + if (XEXP (x, 0) == virtual_stack_vars_rtx)
1250 + if (XEXP (x, 0) == virtual_stack_vars_rtx
1251 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
1258 + /* Scan all subexpressions. */
1259 + fmt = GET_RTX_FORMAT (code);
1260 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1263 + if (contains_fp (XEXP (x, i)))
1266 + else if (*fmt == 'E')
1267 + for (j = 0; j < XVECLEN (x, i); j++)
1268 + if (contains_fp (XVECEXP (x, i, j)))
1275 +/* Examine whether the input contains any pointer. */
1277 +search_pointer_def (tree type)
1284 + switch (TREE_CODE (type))
1287 + case QUAL_UNION_TYPE:
1289 + /* Check if each field has a pointer. */
1290 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1292 + if ((TREE_CODE (tem) == TYPE_DECL)
1293 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1296 + if (search_pointer_def (TREE_TYPE(tem)))
1302 + return search_pointer_def (TREE_TYPE(type));
1304 + case POINTER_TYPE:
1305 + case REFERENCE_TYPE:
1307 + if (TYPE_READONLY (TREE_TYPE (type)))
1309 + /* If this pointer contains function pointer,
1310 + it should be protected. */
1311 + return search_func_pointer (TREE_TYPE (type));
1323 +/* Examine whether the input contains function pointer. */
1325 +search_func_pointer (tree type)
1332 + switch (TREE_CODE (type))
1335 + case QUAL_UNION_TYPE:
1337 + if (! TREE_VISITED (type))
1339 + /* Mark the type as having been visited already. */
1340 + TREE_VISITED (type) = 1;
1342 + /* Check if each field has a function pointer. */
1343 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1345 + if (TREE_CODE (tem) == FIELD_DECL
1346 + && search_func_pointer (TREE_TYPE(tem)))
1348 + TREE_VISITED (type) = 0;
1353 + TREE_VISITED (type) = 0;
1358 + return search_func_pointer (TREE_TYPE(type));
1360 + case POINTER_TYPE:
1361 + case REFERENCE_TYPE:
1363 + if (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
1365 + return search_func_pointer (TREE_TYPE(type));
1375 +/* Check whether the specified rtx contains PLUS rtx with used flag. */
1377 +check_used_flag (rtx x)
1379 + register int i, j;
1380 + register enum rtx_code code;
1381 + register const char *format_ptr;
1386 + code = GET_CODE (x);
1393 + case CONST_DOUBLE:
1408 + format_ptr = GET_RTX_FORMAT (code);
1409 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1411 + switch (*format_ptr++)
1414 + if (check_used_flag (XEXP (x, i)))
1419 + for (j = 0; j < XVECLEN (x, i); j++)
1420 + if (check_used_flag (XVECEXP (x, i, j)))
1430 +/* Reset used flag of every insns after the spcecified insn. */
1432 +reset_used_flags_for_insns (rtx insn)
1435 + enum rtx_code code;
1436 + const char *format_ptr;
1438 + for (; insn; insn = NEXT_INSN (insn))
1439 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1440 + || GET_CODE (insn) == CALL_INSN)
1442 + code = GET_CODE (insn);
1444 + format_ptr = GET_RTX_FORMAT (code);
1446 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1448 + switch (*format_ptr++)
1451 + reset_used_flags_of_plus (XEXP (insn, i));
1455 + for (j = 0; j < XVECLEN (insn, i); j++)
1456 + reset_used_flags_of_plus (XVECEXP (insn, i, j));
1464 +/* Reset used flag of every variables in the specified block. */
1466 +reset_used_flags_for_decls (tree block)
1471 + while (block && TREE_CODE(block)==BLOCK)
1473 + types = BLOCK_VARS(block);
1475 + for (types= BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1477 + /* Skip the declaration that refers an external variable and
1478 + also skip an global variable. */
1479 + if (! DECL_EXTERNAL (types))
1481 + if (! DECL_RTL_SET_P (types))
1483 + home = DECL_RTL (types);
1485 + if (GET_CODE (home) == MEM
1486 + && GET_CODE (XEXP (home, 0)) == PLUS
1487 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1489 + XEXP (home, 0)->used = 0;
1494 + reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
1496 + block = BLOCK_CHAIN (block);
1501 +/* Reset the used flag of every PLUS rtx derived from the specified rtx. */
1503 +reset_used_flags_of_plus (rtx x)
1506 + enum rtx_code code;
1507 + const char *format_ptr;
1512 + code = GET_CODE (x);
1516 + /* These types may be freely shared so we needn't do any resetting
1521 + case CONST_DOUBLE:
1534 + /* The chain of insns is not being copied. */
1541 + case CALL_PLACEHOLDER:
1542 + reset_used_flags_for_insns (XEXP (x, 0));
1543 + reset_used_flags_for_insns (XEXP (x, 1));
1544 + reset_used_flags_for_insns (XEXP (x, 2));
1551 + format_ptr = GET_RTX_FORMAT (code);
1552 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1554 + switch (*format_ptr++)
1557 + reset_used_flags_of_plus (XEXP (x, i));
1561 + for (j = 0; j < XVECLEN (x, i); j++)
1562 + reset_used_flags_of_plus (XVECEXP (x, i, j));
1569 +/* Generate the prologue insns of the protector into the specified insn. */
1571 +rtl_prologue (rtx insn)
1573 +#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
1574 +#undef HAS_INIT_SECTION
1575 +#define HAS_INIT_SECTION
1580 + for (; insn; insn = NEXT_INSN (insn))
1581 + if (GET_CODE (insn) == NOTE
1582 + && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1585 +#if !defined (HAS_INIT_SECTION)
1586 + /* If this function is `main', skip a call to `__main'
1587 + to run guard instruments after global initializers, etc. */
1588 + if (DECL_NAME (current_function_decl)
1589 + && MAIN_NAME_P (DECL_NAME (current_function_decl))
1590 + && DECL_CONTEXT (current_function_decl) == NULL_TREE)
1592 + rtx fbinsn = insn;
1593 + for (; insn; insn = NEXT_INSN (insn))
1594 + if (GET_CODE (insn) == NOTE
1595 + && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
1602 + /* Mark the next insn of FUNCTION_BEG insn. */
1603 + prologue_insert_point = NEXT_INSN (insn);
1605 + start_sequence ();
1607 + _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
1608 + emit_move_insn ( guard_area, _guard);
1610 + _val = get_insns ();
1613 + emit_insn_before (_val, prologue_insert_point);
1617 +/* Generate the epilogue insns of the protector into the specified insn. */
1619 +rtl_epilogue (rtx insn)
1621 + rtx if_false_label;
1625 + int flag_have_return = FALSE;
1627 + start_sequence ();
1633 + return_label = gen_label_rtx ();
1635 + for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
1636 + if (GET_CODE (insn) == JUMP_INSN
1637 + && GET_CODE (PATTERN (insn)) == RETURN
1638 + && GET_MODE (PATTERN (insn)) == VOIDmode)
1640 + rtx pat = gen_rtx_SET (VOIDmode,
1642 + gen_rtx_LABEL_REF (VOIDmode,
1644 + PATTERN (insn) = pat;
1645 + flag_have_return = TRUE;
1649 + emit_label (return_label);
1653 + /* if (guard_area != _guard) */
1654 + compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX);
1656 + if_false_label = gen_label_rtx (); /* { */
1657 + emit_jump_insn ( gen_beq(if_false_label));
1659 + /* generate string for the current function name */
1660 + funcstr = build_string (strlen(current_function_name ())+1,
1661 + current_function_name ());
1662 + TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);
1663 + funcname = output_constant_def (funcstr, 1);
1665 + emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__stack_smash_handler"),
1667 + XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
1669 + /* generate RTL to return from the current function */
1671 + emit_barrier (); /* } */
1672 + emit_label (if_false_label);
1674 + /* generate RTL to return from the current function */
1675 + if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
1676 + use_return_register ();
1679 + if (HAVE_return && flag_have_return)
1681 + emit_jump_insn (gen_return ());
1686 + _val = get_insns ();
1689 + emit_insn_after (_val, insn);
1693 +/* For every variable which type is character array, moves its location
1694 + in the stack frame to the sweep_frame_offset position. */
1696 +arrange_var_order (tree block)
1699 + HOST_WIDE_INT offset;
1701 + while (block && TREE_CODE(block)==BLOCK)
1703 + /* arrange the location of character arrays in depth first. */
1704 + arrange_var_order (BLOCK_SUBBLOCKS (block));
1706 + for (types = BLOCK_VARS (block); types; types = TREE_CHAIN(types))
1708 + /* Skip the declaration that refers an external variable. */
1709 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1710 + && TREE_CODE (types) == VAR_DECL
1711 + && ! DECL_ARTIFICIAL (types)
1712 + /* && ! DECL_COPIED (types): gcc3.4 can sweep inlined string. */
1713 + && DECL_RTL_SET_P (types)
1714 + && GET_CODE (DECL_RTL (types)) == MEM
1715 + && GET_MODE (DECL_RTL (types)) == BLKmode
1718 + search_string_def (TREE_TYPE (types))
1719 + || (! current_function_defines_vulnerable_string && is_array)))
1721 + rtx home = DECL_RTL (types);
1723 + if (!(GET_CODE (home) == MEM
1724 + && (GET_CODE (XEXP (home, 0)) == MEM
1725 + || (GET_CODE (XEXP (home, 0)) == REG
1726 + && XEXP (home, 0) != virtual_stack_vars_rtx
1727 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1728 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1729 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1730 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1734 + /* Found a string variable. */
1735 + HOST_WIDE_INT var_size =
1736 + ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1739 + /* Confirmed it is BLKmode. */
1740 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1741 + var_size = CEIL_ROUND (var_size, alignment);
1743 + /* Skip the variable if it is top of the region
1744 + specified by sweep_frame_offset. */
1745 + offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1746 + if (offset == sweep_frame_offset - var_size)
1747 + sweep_frame_offset -= var_size;
1749 + else if (offset < sweep_frame_offset - var_size)
1750 + sweep_string_variable (DECL_RTL (types), var_size);
1755 + block = BLOCK_CHAIN (block);
1760 +/* To protect every pointer argument and move character arrays in the argument,
1761 + Copy those variables to the top of the stack frame and move the location of
1762 + character arrays to the posion of sweep_frame_offset. */
1764 +copy_args_for_protection (void)
1766 + tree parms = DECL_ARGUMENTS (current_function_decl);
1769 + parms = DECL_ARGUMENTS (current_function_decl);
1770 + for (; parms; parms = TREE_CHAIN (parms))
1771 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1773 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1778 + string_p = search_string_def (TREE_TYPE(parms));
1780 + /* Check if it is a candidate to move. */
1781 + if (string_p || search_pointer_def (TREE_TYPE (parms)))
1784 + = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1786 + tree passed_type = DECL_ARG_TYPE (parms);
1787 + tree nominal_type = TREE_TYPE (parms);
1789 + start_sequence ();
1791 + if (GET_CODE (DECL_RTL (parms)) == REG)
1795 + change_arg_use_of_insns (prologue_insert_point,
1796 + DECL_RTL (parms), &safe, 0);
1799 + /* Generate codes for copying the content. */
1800 + rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1802 + /* Avoid register elimination in gcse.c. */
1803 + PATTERN (movinsn)->volatil = 1;
1805 + /* Save debugger info. */
1806 + SET_DECL_RTL (parms, safe);
1809 + else if (GET_CODE (DECL_RTL (parms)) == MEM
1810 + && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1813 + rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1815 + /* Generate codes for copying the content. */
1816 + movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1817 + /* Avoid register elimination in gcse.c. */
1818 + PATTERN (movinsn)->volatil = 1;
1820 + /* Change the addressof information to the newly
1821 + allocated pseudo register. */
1822 + emit_move_insn (DECL_RTL (parms), safe);
1824 + /* Save debugger info. */
1825 + SET_DECL_RTL (parms, safe);
1828 + /* See if the frontend wants to pass this by invisible
1830 + else if (passed_type != nominal_type
1831 + && POINTER_TYPE_P (passed_type)
1832 + && TREE_TYPE (passed_type) == nominal_type)
1834 + rtx safe = 0, orig = XEXP (DECL_RTL (parms), 0);
1836 + change_arg_use_of_insns (prologue_insert_point,
1840 + /* Generate codes for copying the content. */
1841 + rtx movinsn = emit_move_insn (safe, orig);
1843 + /* Avoid register elimination in gcse.c */
1844 + PATTERN (movinsn)->volatil = 1;
1846 + /* Save debugger info. */
1847 + SET_DECL_RTL (parms, safe);
1853 + /* Declare temporary local variable for parms. */
1855 + = assign_stack_local (DECL_MODE (parms), arg_size,
1856 + DECL_MODE (parms) == BLKmode ?
1859 + MEM_IN_STRUCT_P (temp_rtx)
1860 + = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1861 + set_mem_alias_set (temp_rtx, get_alias_set (parms));
1863 + /* Generate codes for copying the content. */
1864 + store_expr (parms, temp_rtx, 0);
1866 + /* Change the reference for each instructions. */
1867 + move_arg_location (prologue_insert_point, DECL_RTL (parms),
1868 + temp_rtx, arg_size);
1870 + /* Change the location of parms variable. */
1871 + SET_DECL_RTL (parms, temp_rtx);
1874 + seq = get_insns ();
1876 + emit_insn_before (seq, prologue_insert_point);
1878 +#ifdef FRAME_GROWS_DOWNWARD
1879 + /* Process the string argument. */
1880 + if (string_p && DECL_MODE (parms) == BLKmode)
1882 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1883 + arg_size = CEIL_ROUND (arg_size, alignment);
1885 + /* Change the reference for each instructions. */
1886 + sweep_string_variable (DECL_RTL (parms), arg_size);
1895 +/* Sweep a string variable to the positon of sweep_frame_offset in the
1896 + stack frame, that is a last position of string variables. */
1898 +sweep_string_variable (rtx sweep_var, HOST_WIDE_INT var_size)
1900 + HOST_WIDE_INT sweep_offset;
1902 + switch (GET_CODE (sweep_var))
1905 + if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1906 + && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1908 + sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1911 + sweep_offset = INTVAL (sweep_var);
1917 + /* Scan all declarations of variables and fix the offset address of
1918 + the variable based on the frame pointer. */
1919 + sweep_string_in_decls (DECL_INITIAL (current_function_decl),
1920 + sweep_offset, var_size);
1922 + /* Scan all argument variable and fix the offset address based on
1923 + the frame pointer. */
1924 + sweep_string_in_args (DECL_ARGUMENTS (current_function_decl),
1925 + sweep_offset, var_size);
1927 + /* For making room for sweep variable, scan all insns and
1928 + fix the offset address of the variable that is based on frame pointer. */
1929 + sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1932 + /* Clear all the USED bits in operands of all insns and declarations of
1933 + local variables. */
1934 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1935 + reset_used_flags_for_insns (function_first_insn);
1937 + sweep_frame_offset -= var_size;
1942 +/* Move an argument to the local variable addressed by frame_offset. */
1944 +move_arg_location (rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size)
1946 + /* For making room for sweep variable, scan all insns and
1947 + fix the offset address of the variable that is based on frame pointer. */
1948 + change_arg_use_of_insns (insn, orig, &new, var_size);
1951 + /* Clear all the USED bits in operands of all insns and declarations
1952 + of local variables. */
1953 + reset_used_flags_for_insns (insn);
1957 +/* Sweep character arrays declared as local variable. */
1959 +sweep_string_in_decls (tree block, HOST_WIDE_INT sweep_offset,
1960 + HOST_WIDE_INT sweep_size)
1963 + HOST_WIDE_INT offset;
1966 + while (block && TREE_CODE(block)==BLOCK)
1968 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
1970 + /* Skip the declaration that refers an external variable and
1971 + also skip an global variable. */
1972 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1974 + if (! DECL_RTL_SET_P (types))
1977 + home = DECL_RTL (types);
1979 + /* Process for static local variable. */
1980 + if (GET_CODE (home) == MEM
1981 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1984 + if (GET_CODE (home) == MEM
1985 + && XEXP (home, 0) == virtual_stack_vars_rtx)
1989 + /* the operand related to the sweep variable. */
1990 + if (sweep_offset <= offset
1991 + && offset < sweep_offset + sweep_size)
1993 + offset = sweep_frame_offset - sweep_size - sweep_offset;
1995 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
1997 + XEXP (home, 0)->used = 1;
1999 + else if (sweep_offset <= offset
2000 + && offset < sweep_frame_offset)
2002 + /* the rest of variables under sweep_frame_offset,
2003 + shift the location. */
2004 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx,
2006 + XEXP (home, 0)->used = 1;
2010 + if (GET_CODE (home) == MEM
2011 + && GET_CODE (XEXP (home, 0)) == MEM)
2013 + /* Process for dynamically allocated array. */
2014 + home = XEXP (home, 0);
2017 + if (GET_CODE (home) == MEM
2018 + && GET_CODE (XEXP (home, 0)) == PLUS
2019 + && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
2020 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2022 + if (! XEXP (home, 0)->used)
2024 + offset = AUTO_OFFSET(XEXP (home, 0));
2026 + /* the operand related to the sweep variable. */
2027 + if (sweep_offset <= offset
2028 + && offset < sweep_offset + sweep_size)
2032 + += sweep_frame_offset - sweep_size - sweep_offset;
2033 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2037 + XEXP (home, 0)->used = 1;
2039 + else if (sweep_offset <= offset
2040 + && offset < sweep_frame_offset)
2042 + /* the rest of variables under sweep_frame_offset,
2043 + so shift the location. */
2045 + XEXP (XEXP (home, 0), 1)
2046 + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2049 + XEXP (home, 0)->used = 1;
2056 + sweep_string_in_decls (BLOCK_SUBBLOCKS (block),
2057 + sweep_offset, sweep_size);
2059 + block = BLOCK_CHAIN (block);
2064 +/* Sweep character arrays declared as argument. */
2066 +sweep_string_in_args (tree parms, HOST_WIDE_INT sweep_offset,
2067 + HOST_WIDE_INT sweep_size)
2070 + HOST_WIDE_INT offset;
2072 + for (; parms; parms = TREE_CHAIN (parms))
2073 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2075 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2077 + home = DECL_INCOMING_RTL (parms);
2079 + if (XEXP (home, 0)->used)
2082 + offset = AUTO_OFFSET(XEXP (home, 0));
2084 + /* the operand related to the sweep variable. */
2085 + if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
2087 + if (sweep_offset <= offset
2088 + && offset < sweep_offset + sweep_size)
2090 + offset += sweep_frame_offset - sweep_size - sweep_offset;
2091 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2095 + XEXP (home, 0)->used = 1;
2097 + else if (sweep_offset <= offset
2098 + && offset < sweep_frame_offset)
2100 + /* the rest of variables under sweep_frame_offset,
2101 + shift the location. */
2102 + XEXP (XEXP (home, 0), 1)
2103 + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2106 + XEXP (home, 0)->used = 1;
2114 +/* Set to 1 when the instruction contains virtual registers. */
2115 +static int has_virtual_reg;
2117 +/* Sweep the specified character array for every insns. The array starts from
2118 + the sweep_offset and its size is sweep_size. */
2120 +sweep_string_use_of_insns (rtx insn, HOST_WIDE_INT sweep_offset,
2121 + HOST_WIDE_INT sweep_size)
2123 + for (; insn; insn = NEXT_INSN (insn))
2124 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2125 + || GET_CODE (insn) == CALL_INSN)
2127 + has_virtual_reg = FALSE;
2128 + sweep_string_in_operand (insn, &PATTERN (insn),
2129 + sweep_offset, sweep_size);
2130 + sweep_string_in_operand (insn, ®_NOTES (insn),
2131 + sweep_offset, sweep_size);
2136 +/* Sweep the specified character array, which starts from the sweep_offset and
2137 + its size is sweep_size.
2139 + When a pointer is given,
2140 + if it points the address higher than the array, it stays.
2141 + if it points the address inside the array, it changes to point inside
2142 + the sweeped array.
2143 + if it points the address lower than the array, it shifts higher address by
2144 + the sweep_size. */
2146 +sweep_string_in_operand (rtx insn, rtx *loc,
2147 + HOST_WIDE_INT sweep_offset, HOST_WIDE_INT sweep_size)
2150 + enum rtx_code code;
2152 + HOST_WIDE_INT offset;
2158 + code = GET_CODE (x);
2163 + case CONST_DOUBLE:
2171 + case ADDR_DIFF_VEC:
2177 + if (x == virtual_incoming_args_rtx
2178 + || x == virtual_stack_vars_rtx
2179 + || x == virtual_stack_dynamic_rtx
2180 + || x == virtual_outgoing_args_rtx
2181 + || x == virtual_cfa_rtx)
2182 + has_virtual_reg = TRUE;
2187 + skip setjmp setup insn and setjmp restore insn
2189 + (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
2190 + (set (virtual_stack_vars_rtx) (REG))
2192 + if (GET_CODE (XEXP (x, 0)) == MEM
2193 + && XEXP (x, 1) == virtual_stack_vars_rtx)
2195 + if (XEXP (x, 0) == virtual_stack_vars_rtx
2196 + && GET_CODE (XEXP (x, 1)) == REG)
2201 + /* Handle typical case of frame register plus constant. */
2202 + if (XEXP (x, 0) == virtual_stack_vars_rtx
2203 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
2206 + goto single_use_of_virtual_reg;
2208 + offset = AUTO_OFFSET(x);
2210 + /* When arguments grow downward, the virtual incoming
2211 + args pointer points to the top of the argument block,
2212 + so block is identified by the pointer - 1.
2213 + The flag is set at the copy_rtx_and_substitute in integrate.c */
2214 + if (RTX_INTEGRATED_P (x))
2217 + /* the operand related to the sweep variable. */
2218 + if (sweep_offset <= offset + k
2219 + && offset + k < sweep_offset + sweep_size)
2221 + offset += sweep_frame_offset - sweep_size - sweep_offset;
2223 + XEXP (x, 0) = virtual_stack_vars_rtx;
2224 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2227 + else if (sweep_offset <= offset + k
2228 + && offset + k < sweep_frame_offset)
2230 + /* the rest of variables under sweep_frame_offset,
2231 + shift the location. */
2232 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
2236 + single_use_of_virtual_reg:
2237 + if (has_virtual_reg) {
2238 + /* excerpt from insn_invalid_p in recog.c */
2239 + int icode = recog_memoized (insn);
2241 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2245 + start_sequence ();
2246 + temp = force_operand (x, NULL_RTX);
2247 + seq = get_insns ();
2250 + emit_insn_before (seq, insn);
2251 + if (! validate_change (insn, loc, temp, 0)
2252 + && !validate_replace_rtx (x, temp, insn))
2253 + fatal_insn ("sweep_string_in_operand", insn);
2257 + has_virtual_reg = TRUE;
2261 +#ifdef FRAME_GROWS_DOWNWARD
2262 + /* Alert the case of frame register plus constant given by reg. */
2263 + else if (XEXP (x, 0) == virtual_stack_vars_rtx
2264 + && GET_CODE (XEXP (x, 1)) == REG)
2265 + fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
2269 + process further subtree:
2270 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2275 + case CALL_PLACEHOLDER:
2276 + for (i = 0; i < 3; i++)
2278 + rtx seq = XEXP (x, i);
2281 + push_to_sequence (seq);
2282 + sweep_string_use_of_insns (XEXP (x, i),
2283 + sweep_offset, sweep_size);
2284 + XEXP (x, i) = get_insns ();
2294 + /* Scan all subexpressions. */
2295 + fmt = GET_RTX_FORMAT (code);
2296 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2300 + virtual_stack_vars_rtx without offset
2302 + (set (reg:SI xx) (reg:SI 78))
2303 + (set (reg:SI xx) (MEM (reg:SI 78)))
2305 + if (XEXP (x, i) == virtual_stack_vars_rtx)
2306 + fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
2307 + sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
2309 + else if (*fmt == 'E')
2310 + for (j = 0; j < XVECLEN (x, i); j++)
2311 + sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
2315 +/* Change the use of an argument to the use of the duplicated variable for
2316 + every insns, The variable is addressed by new rtx. */
2318 +change_arg_use_of_insns (rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size)
2320 + for (; insn; insn = NEXT_INSN (insn))
2321 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2322 + || GET_CODE (insn) == CALL_INSN)
2326 + start_sequence ();
2327 + change_arg_use_in_operand (insn, PATTERN (insn), orig, new, size);
2329 + seq = get_insns ();
2331 + emit_insn_before (seq, insn);
2333 + /* load_multiple insn from virtual_incoming_args_rtx have several
2334 + load insns. If every insn change the load address of arg
2335 + to frame region, those insns are moved before the PARALLEL insn
2336 + and remove the PARALLEL insn. */
2337 + if (GET_CODE (PATTERN (insn)) == PARALLEL
2338 + && XVECLEN (PATTERN (insn), 0) == 0)
2339 + delete_insn (insn);
2344 +/* Change the use of an argument to the use of the duplicated variable for
2345 + every rtx derived from the x. */
2347 +change_arg_use_in_operand (rtx insn, rtx x, rtx orig, rtx *new, HOST_WIDE_INT size)
2349 + enum rtx_code code;
2351 + HOST_WIDE_INT offset;
2357 + code = GET_CODE (x);
2362 + case CONST_DOUBLE:
2370 + case ADDR_DIFF_VEC:
2377 + /* Handle special case of MEM (incoming_args). */
2378 + if (GET_CODE (orig) == MEM
2379 + && XEXP (x, 0) == virtual_incoming_args_rtx)
2383 + /* the operand related to the sweep variable. */
2384 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2385 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2387 + offset = AUTO_OFFSET(XEXP (*new, 0))
2388 + + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2390 + XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
2391 + XEXP (x, 0)->used = 1;
2399 + /* Handle special case of frame register plus constant. */
2400 + if (GET_CODE (orig) == MEM
2401 + && XEXP (x, 0) == virtual_incoming_args_rtx
2402 + && GET_CODE (XEXP (x, 1)) == CONST_INT
2405 + offset = AUTO_OFFSET(x);
2407 + /* the operand related to the sweep variable. */
2408 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2409 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2412 + offset = (AUTO_OFFSET(XEXP (*new, 0))
2413 + + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2415 + XEXP (x, 0) = virtual_stack_vars_rtx;
2416 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2423 + process further subtree:
2424 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2431 + /* Handle special case of "set (REG or MEM) (incoming_args)".
2432 + It means that the the address of the 1st argument is stored. */
2433 + if (GET_CODE (orig) == MEM
2434 + && XEXP (x, 1) == virtual_incoming_args_rtx)
2438 + /* the operand related to the sweep variable. */
2439 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2440 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size)
2442 + offset = (AUTO_OFFSET(XEXP (*new, 0))
2443 + + (offset - AUTO_OFFSET(XEXP (orig, 0))));
2445 + XEXP (x, 1) = force_operand (plus_constant (virtual_stack_vars_rtx,
2446 + offset), NULL_RTX);
2447 + XEXP (x, 1)->used = 1;
2454 + case CALL_PLACEHOLDER:
2455 + for (i = 0; i < 3; i++)
2457 + rtx seq = XEXP (x, i);
2460 + push_to_sequence (seq);
2461 + change_arg_use_of_insns (XEXP (x, i), orig, new, size);
2462 + XEXP (x, i) = get_insns ();
2469 + for (j = 0; j < XVECLEN (x, 0); j++)
2471 + change_arg_use_in_operand (insn, XVECEXP (x, 0, j), orig, new, size);
2473 + if (recog_memoized (insn) < 0)
2475 + for (i = 0, j = 0; j < XVECLEN (x, 0); j++)
2477 + /* if parallel insn has a insn used virtual_incoming_args_rtx,
2478 + the insn is removed from this PARALLEL insn. */
2479 + if (check_used_flag (XVECEXP (x, 0, j)))
2481 + emit_insn (XVECEXP (x, 0, j));
2482 + XVECEXP (x, 0, j) = NULL;
2485 + XVECEXP (x, 0, i++) = XVECEXP (x, 0, j);
2487 + PUT_NUM_ELEM (XVEC (x, 0), i);
2495 + /* Scan all subexpressions. */
2496 + fmt = GET_RTX_FORMAT (code);
2497 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2500 + if (XEXP (x, i) == orig)
2503 + *new = gen_reg_rtx (GET_MODE (orig));
2504 + XEXP (x, i) = *new;
2507 + change_arg_use_in_operand (insn, XEXP (x, i), orig, new, size);
2509 + else if (*fmt == 'E')
2510 + for (j = 0; j < XVECLEN (x, i); j++)
2512 + if (XVECEXP (x, i, j) == orig)
2515 + *new = gen_reg_rtx (GET_MODE (orig));
2516 + XVECEXP (x, i, j) = *new;
2519 + change_arg_use_in_operand (insn, XVECEXP (x, i, j), orig, new, size);
2524 +/* Validate every instructions from the specified instruction.
2526 + The stack protector prohibits to generate machine specific frame addressing
2527 + for the first rtl generation. The prepare_stack_protection must convert
2528 + machine independent frame addressing to machine specific frame addressing,
2529 + so instructions for inline functions, which skip the conversion of
2530 + the stack protection, validate every instructions. */
2532 +validate_insns_of_varrefs (rtx insn)
2536 + /* Initialize recognition, indicating that volatile is OK. */
2539 + for (; insn; insn = next)
2541 + next = NEXT_INSN (insn);
2542 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2543 + || GET_CODE (insn) == CALL_INSN)
2545 + /* excerpt from insn_invalid_p in recog.c */
2546 + int icode = recog_memoized (insn);
2548 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2549 + validate_operand_of_varrefs (insn, &PATTERN (insn));
2553 + init_recog_no_volatile ();
2557 +/* Validate frame addressing of the rtx and covert it to machine specific one. */
2559 +validate_operand_of_varrefs (rtx insn, rtx *loc)
2561 + enum rtx_code code;
2570 + code = GET_CODE (x);
2576 + case CONST_DOUBLE:
2584 + case ADDR_DIFF_VEC:
2591 + /* validate insn of frame register plus constant. */
2592 + if (GET_CODE (x) == PLUS
2593 + && XEXP (x, 0) == virtual_stack_vars_rtx
2594 + && GET_CODE (XEXP (x, 1)) == CONST_INT)
2596 + start_sequence ();
2598 + { /* excerpt from expand_binop in optabs.c */
2599 + optab binoptab = add_optab;
2600 + enum machine_mode mode = GET_MODE (x);
2601 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
2602 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
2604 + rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
2605 + temp = gen_reg_rtx (mode);
2607 + /* Now, if insn's predicates don't allow offset operands,
2608 + put them into pseudo regs. */
2610 + if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
2611 + && mode1 != VOIDmode)
2612 + xop1 = copy_to_mode_reg (mode1, xop1);
2614 + pat = GEN_FCN (icode) (temp, xop0, xop1);
2618 + abort (); /* there must be add_optab handler. */
2620 + seq = get_insns ();
2623 + emit_insn_before (seq, insn);
2624 + if (! validate_change (insn, loc, temp, 0))
2631 + case CALL_PLACEHOLDER:
2632 + for (i = 0; i < 3; i++)
2634 + rtx seq = XEXP (x, i);
2637 + push_to_sequence (seq);
2638 + validate_insns_of_varrefs (XEXP (x, i));
2639 + XEXP (x, i) = get_insns ();
2649 + /* Scan all subexpressions. */
2650 + fmt = GET_RTX_FORMAT (code);
2651 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2653 + validate_operand_of_varrefs (insn, &XEXP (x, i));
2654 + else if (*fmt == 'E')
2655 + for (j = 0; j < XVECLEN (x, i); j++)
2656 + validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
2661 +/* Return size that is not allocated for stack frame. It will be allocated
2662 + to modify the home of pseudo registers called from global_alloc. */
2664 +get_frame_free_size (void)
2666 + if (! flag_propolice_protection)
2669 + return push_allocated_offset - push_frame_offset;
2673 +/* The following codes are invoked after the instantiation of pseudo registers.
2675 + Reorder local variables to place a peudo register after buffers to avoid
2676 + the corruption of local variables that could be used to further corrupt
2677 + arbitrary memory locations. */
2678 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2679 +static void push_frame (HOST_WIDE_INT, HOST_WIDE_INT);
2680 +static void push_frame_in_decls (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2681 +static void push_frame_in_args (tree, HOST_WIDE_INT, HOST_WIDE_INT);
2682 +static void push_frame_of_insns (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2683 +static void push_frame_in_operand (rtx, rtx, HOST_WIDE_INT, HOST_WIDE_INT);
2684 +static void push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT, HOST_WIDE_INT);
2685 +static void push_frame_of_reg_equiv_constant (HOST_WIDE_INT, HOST_WIDE_INT);
2686 +static void reset_used_flags_for_push_frame (void);
2687 +static int check_out_of_frame_access (rtx, HOST_WIDE_INT);
2688 +static int check_out_of_frame_access_in_operand (rtx, HOST_WIDE_INT);
2692 +/* Assign stack local at the stage of register allocater. if a pseudo reg is
2693 + spilled out from such an allocation, it is allocated on the stack.
2694 + The protector keep the location be lower stack region than the location of
2695 + sweeped arrays. */
2697 +assign_stack_local_for_pseudo_reg (enum machine_mode mode,
2698 + HOST_WIDE_INT size, int align)
2700 +#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
2701 + return assign_stack_local (mode, size, align);
2703 + tree blocks = DECL_INITIAL (current_function_decl);
2705 + HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2706 + int first_call_from_purge_addressof, first_call_from_global_alloc;
2708 + if (! flag_propolice_protection
2711 + || current_function_is_inlinable
2712 + || ! search_string_from_argsandvars (CALL_FROM_PUSH_FRAME)
2713 + || current_function_contains_functions)
2714 + return assign_stack_local (mode, size, align);
2716 + first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2717 + first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2718 + saved_cse_not_expected = cse_not_expected;
2720 + starting_frame = ((STARTING_FRAME_OFFSET)
2721 + ? STARTING_FRAME_OFFSET : BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2722 + units_per_push = MAX (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2723 + GET_MODE_SIZE (mode));
2725 + if (first_call_from_purge_addressof)
2727 + push_frame_offset = push_allocated_offset;
2728 + if (check_out_of_frame_access (get_insns (), starting_frame))
2730 + /* After the purge_addressof stage, there may be an instruction which
2731 + have the pointer less than the starting_frame.
2732 + if there is an access below frame, push dummy region to seperate
2733 + the address of instantiated variables. */
2734 + push_frame (GET_MODE_SIZE (DImode), 0);
2735 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2739 + if (first_call_from_global_alloc)
2741 + push_frame_offset = push_allocated_offset = 0;
2742 + if (check_out_of_frame_access (get_insns (), starting_frame))
2744 + if (STARTING_FRAME_OFFSET)
2746 + /* if there is an access below frame, push dummy region
2747 + to seperate the address of instantiated variables. */
2748 + push_frame (GET_MODE_SIZE (DImode), 0);
2749 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2752 + push_allocated_offset = starting_frame;
2756 + saved_frame_offset = frame_offset;
2757 + frame_offset = push_frame_offset;
2759 + new = assign_stack_local (mode, size, align);
2761 + push_frame_offset = frame_offset;
2762 + frame_offset = saved_frame_offset;
2764 + if (push_frame_offset > push_allocated_offset)
2766 + push_frame (units_per_push,
2767 + push_allocated_offset + STARTING_FRAME_OFFSET);
2769 + assign_stack_local (BLKmode, units_per_push, -1);
2770 + push_allocated_offset += units_per_push;
2773 + /* At the second call from global alloc, alpha push frame and assign
2774 + a local variable to the top of the stack. */
2775 + if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2776 + push_frame_offset = push_allocated_offset = 0;
2783 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2785 +/* push frame infomation for instantiating pseudo register at the top of stack.
2786 + This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2789 + It is called by purge_addressof function and global_alloc (or reload)
2792 +push_frame (HOST_WIDE_INT var_size, HOST_WIDE_INT boundary)
2794 + reset_used_flags_for_push_frame();
2796 + /* Scan all declarations of variables and fix the offset address of
2797 + the variable based on the frame pointer. */
2798 + push_frame_in_decls (DECL_INITIAL (current_function_decl),
2799 + var_size, boundary);
2801 + /* Scan all argument variable and fix the offset address based on
2802 + the frame pointer. */
2803 + push_frame_in_args (DECL_ARGUMENTS (current_function_decl),
2804 + var_size, boundary);
2806 + /* Scan all operands of all insns and fix the offset address
2807 + based on the frame pointer. */
2808 + push_frame_of_insns (get_insns (), var_size, boundary);
2810 + /* Scan all reg_equiv_memory_loc and reg_equiv_constant. */
2811 + push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2812 + push_frame_of_reg_equiv_constant (var_size, boundary);
2814 + reset_used_flags_for_push_frame();
2818 +/* Reset used flag of every insns, reg_equiv_memory_loc,
2819 + and reg_equiv_constant. */
2821 +reset_used_flags_for_push_frame(void)
2824 + extern rtx *reg_equiv_memory_loc;
2825 + extern rtx *reg_equiv_constant;
2827 + /* Clear all the USED bits in operands of all insns and declarations of
2829 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2830 + reset_used_flags_for_insns (get_insns ());
2833 + /* The following codes are processed if the push_frame is called from
2834 + global_alloc (or reload) function. */
2835 + if (reg_equiv_memory_loc == 0)
2838 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2839 + if (reg_equiv_memory_loc[i])
2841 + rtx x = reg_equiv_memory_loc[i];
2843 + if (GET_CODE (x) == MEM
2844 + && GET_CODE (XEXP (x, 0)) == PLUS
2845 + && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2848 + XEXP (x, 0)->used = 0;
2853 + if (reg_equiv_constant == 0)
2856 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2857 + if (reg_equiv_constant[i])
2859 + rtx x = reg_equiv_constant[i];
2861 + if (GET_CODE (x) == PLUS
2862 + && AUTO_BASEPTR (x) == frame_pointer_rtx)
2871 +/* Push every variables declared as a local variable and make a room for
2872 + instantiated register. */
2874 +push_frame_in_decls (tree block, HOST_WIDE_INT push_size,
2875 + HOST_WIDE_INT boundary)
2878 + HOST_WIDE_INT offset;
2881 + while (block && TREE_CODE(block)==BLOCK)
2883 + for (types = BLOCK_VARS(block); types; types = TREE_CHAIN(types))
2885 + /* Skip the declaration that refers an external variable and
2886 + also skip an global variable. */
2887 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2889 + if (! DECL_RTL_SET_P (types))
2892 + home = DECL_RTL (types);
2894 + /* Process for static local variable. */
2895 + if (GET_CODE (home) == MEM
2896 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2899 + if (GET_CODE (home) == MEM
2900 + && GET_CODE (XEXP (home, 0)) == REG)
2902 + if (XEXP (home, 0) != frame_pointer_rtx
2906 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2910 + XEXP (home, 0)->used = 1;
2913 + if (GET_CODE (home) == MEM
2914 + && GET_CODE (XEXP (home, 0)) == MEM)
2916 + /* Process for dynamically allocated array. */
2917 + home = XEXP (home, 0);
2920 + if (GET_CODE (home) == MEM
2921 + && GET_CODE (XEXP (home, 0)) == PLUS
2922 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2924 + offset = AUTO_OFFSET(XEXP (home, 0));
2926 + if (! XEXP (home, 0)->used
2927 + && offset >= boundary)
2929 + offset += push_size;
2930 + XEXP (XEXP (home, 0), 1)
2931 + = gen_rtx_CONST_INT (VOIDmode, offset);
2934 + XEXP (home, 0)->used = 1;
2940 + push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2941 + block = BLOCK_CHAIN (block);
2946 +/* Push every variables declared as an argument and make a room for
2947 + instantiated register. */
2949 +push_frame_in_args (tree parms, HOST_WIDE_INT push_size,
2950 + HOST_WIDE_INT boundary)
2953 + HOST_WIDE_INT offset;
2955 + for (; parms; parms = TREE_CHAIN (parms))
2956 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2958 + if (PARM_PASSED_IN_MEMORY (parms))
2960 + home = DECL_INCOMING_RTL (parms);
2961 + offset = AUTO_OFFSET(XEXP (home, 0));
2963 + if (XEXP (home, 0)->used || offset < boundary)
2966 + /* the operand related to the sweep variable. */
2967 + if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2969 + if (XEXP (home, 0) == frame_pointer_rtx)
2970 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2973 + offset += push_size;
2974 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2979 + XEXP (home, 0)->used = 1;
2986 +/* Set to 1 when the instruction has the reference to be pushed. */
2987 +static int insn_pushed;
2989 +/* Tables of equivalent registers with frame pointer. */
2990 +static int *fp_equiv = 0;
2993 +/* Push the frame region to make a room for allocated local variable. */
2995 +push_frame_of_insns (rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
2997 + /* init fp_equiv */
2998 + fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
3000 + for (; insn; insn = NEXT_INSN (insn))
3001 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3002 + || GET_CODE (insn) == CALL_INSN)
3006 + insn_pushed = FALSE;
3008 + /* Push frame in INSN operation. */
3009 + push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
3011 + /* Push frame in NOTE. */
3012 + push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
3014 + /* Push frame in CALL EXPR_LIST. */
3015 + if (GET_CODE (insn) == CALL_INSN)
3016 + push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn),
3017 + push_size, boundary);
3019 + /* Pushed frame addressing style may not be machine specific one.
3020 + so the instruction should be converted to use the machine specific
3021 + frame addressing. */
3023 + && (last = try_split (PATTERN (insn), insn, 1)) != insn)
3025 + rtx first = NEXT_INSN (insn);
3026 + rtx trial = NEXT_INSN (first);
3027 + rtx pattern = PATTERN (trial);
3030 + /* Update REG_EQUIV info to the first splitted insn. */
3031 + if ((set = single_set (insn))
3032 + && find_reg_note (insn, REG_EQUIV, SET_SRC (set))
3033 + && GET_CODE (PATTERN (first)) == SET)
3036 + = gen_rtx_EXPR_LIST (REG_EQUIV,
3037 + SET_SRC (PATTERN (first)),
3038 + REG_NOTES (first));
3041 + /* copy the first insn of splitted insns to the original insn and
3042 + delete the first insn,
3043 + because the original insn is pointed from records:
3044 + insn_chain, reg_equiv_init, used for global_alloc. */
3045 + if (cse_not_expected)
3047 + add_insn_before (insn, first);
3049 + /* Copy the various flags, and other information. */
3050 + memcpy (insn, first, sizeof (struct rtx_def) - sizeof (rtunion));
3051 + PATTERN (insn) = PATTERN (first);
3052 + REG_NOTES (insn) = REG_NOTES (first);
3054 + /* then remove the first insn of splitted insns. */
3055 + remove_insn (first);
3056 + INSN_DELETED_P (first) = 1;
3059 + if (GET_CODE (pattern) == SET
3060 + && GET_CODE (XEXP (pattern, 0)) == REG
3061 + && GET_CODE (XEXP (pattern, 1)) == PLUS
3062 + && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
3063 + && GET_CODE (XEXP (XEXP (pattern, 1), 1)) == CONST_INT)
3065 + rtx offset = XEXP (XEXP (pattern, 1), 1);
3066 + fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
3068 + delete_insn (trial);
3080 +/* Push the frame region by changing the operand that points the frame. */
3082 +push_frame_in_operand (rtx insn, rtx orig,
3083 + HOST_WIDE_INT push_size, HOST_WIDE_INT boundary)
3086 + enum rtx_code code;
3088 + HOST_WIDE_INT offset;
3094 + code = GET_CODE (x);
3099 + case CONST_DOUBLE:
3107 + case ADDR_DIFF_VEC:
3116 + Skip setjmp setup insn and setjmp restore insn
3118 + (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
3119 + (set (frame_pointer_rtx) (REG))
3121 + if (GET_CODE (XEXP (x, 0)) == MEM
3122 + && XEXP (x, 1) == frame_pointer_rtx)
3124 + if (XEXP (x, 0) == frame_pointer_rtx
3125 + && GET_CODE (XEXP (x, 1)) == REG)
3129 + powerpc case: restores setjmp address
3130 + (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
3132 + (set (reg) (plus frame_pointer_rtx const_int -n))
3133 + (set (frame_pointer_rtx) (reg))
3135 + if (GET_CODE (XEXP (x, 0)) == REG
3136 + && GET_CODE (XEXP (x, 1)) == PLUS
3137 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
3138 + && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3139 + && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
3142 + offset = AUTO_OFFSET(x);
3143 + if (x->used || -offset < boundary)
3146 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
3147 + x->used = 1; insn_pushed = TRUE;
3151 + /* Reset fp_equiv register. */
3152 + else if (GET_CODE (XEXP (x, 0)) == REG
3153 + && fp_equiv[REGNO (XEXP (x, 0))])
3154 + fp_equiv[REGNO (XEXP (x, 0))] = 0;
3156 + /* Propagete fp_equiv register. */
3157 + else if (GET_CODE (XEXP (x, 0)) == REG
3158 + && GET_CODE (XEXP (x, 1)) == REG
3159 + && fp_equiv[REGNO (XEXP (x, 1))])
3160 + if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
3161 + || reg_renumber[REGNO (XEXP (x, 0))] > 0)
3162 + fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
3166 + if (XEXP (x, 0) == frame_pointer_rtx
3169 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3170 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3176 + /* Handle special case of frame register plus constant. */
3177 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3178 + && XEXP (x, 0) == frame_pointer_rtx)
3180 + offset = AUTO_OFFSET(x);
3182 + if (x->used || offset < boundary)
3185 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3186 + x->used = 1; insn_pushed = TRUE;
3191 + Handle alpha case:
3192 + (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
3194 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3195 + && GET_CODE (XEXP (x, 0)) == SUBREG
3196 + && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
3198 + offset = AUTO_OFFSET(x);
3200 + if (x->used || offset < boundary)
3203 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3204 + x->used = 1; insn_pushed = TRUE;
3209 + Handle powerpc case:
3210 + (set (reg x) (plus fp const))
3211 + (set (.....) (... (plus (reg x) (const B))))
3213 + else if (GET_CODE (XEXP (x, 1)) == CONST_INT
3214 + && GET_CODE (XEXP (x, 0)) == REG
3215 + && fp_equiv[REGNO (XEXP (x, 0))])
3217 + offset = AUTO_OFFSET(x);
3222 + offset += fp_equiv[REGNO (XEXP (x, 0))];
3224 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3225 + x->used = 1; insn_pushed = TRUE;
3230 + Handle special case of frame register plus reg (constant).
3231 + (set (reg x) (const B))
3232 + (set (....) (...(plus fp (reg x))))
3234 + else if (XEXP (x, 0) == frame_pointer_rtx
3235 + && GET_CODE (XEXP (x, 1)) == REG
3236 + && PREV_INSN (insn)
3237 + && PATTERN (PREV_INSN (insn))
3238 + && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
3239 + && GET_CODE (SET_SRC (PATTERN (PREV_INSN (insn)))) == CONST_INT)
3241 + offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
3243 + if (x->used || offset < boundary)
3246 + SET_SRC (PATTERN (PREV_INSN (insn)))
3247 + = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
3249 + XEXP (x, 1)->used = 1;
3254 + Handle special case of frame register plus reg (used).
3255 + The register already have a pushed offset, just mark this frame
3258 + else if (XEXP (x, 0) == frame_pointer_rtx
3259 + && XEXP (x, 1)->used)
3265 + Process further subtree:
3266 + Example: (plus:SI (mem/s:SI (plus:SI (FP) (const_int 8)))
3271 + case CALL_PLACEHOLDER:
3272 + push_frame_of_insns (XEXP (x, 0), push_size, boundary);
3273 + push_frame_of_insns (XEXP (x, 1), push_size, boundary);
3274 + push_frame_of_insns (XEXP (x, 2), push_size, boundary);
3281 + /* Scan all subexpressions. */
3282 + fmt = GET_RTX_FORMAT (code);
3283 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3286 + if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
3287 + fatal_insn ("push_frame_in_operand", insn);
3288 + push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
3290 + else if (*fmt == 'E')
3291 + for (j = 0; j < XVECLEN (x, i); j++)
3292 + push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
3296 +/* Change the location pointed in reg_equiv_memory_loc. */
3298 +push_frame_of_reg_equiv_memory_loc (HOST_WIDE_INT push_size,
3299 + HOST_WIDE_INT boundary)
3302 + extern rtx *reg_equiv_memory_loc;
3304 + /* This function is processed if the push_frame is called from
3305 + global_alloc (or reload) function. */
3306 + if (reg_equiv_memory_loc == 0)
3309 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
3310 + if (reg_equiv_memory_loc[i])
3312 + rtx x = reg_equiv_memory_loc[i];
3315 + if (GET_CODE (x) == MEM
3316 + && GET_CODE (XEXP (x, 0)) == PLUS
3317 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
3319 + offset = AUTO_OFFSET(XEXP (x, 0));
3321 + if (! XEXP (x, 0)->used
3322 + && offset >= boundary)
3324 + offset += push_size;
3325 + XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3328 + XEXP (x, 0)->used = 1;
3331 + else if (GET_CODE (x) == MEM
3332 + && XEXP (x, 0) == frame_pointer_rtx
3335 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
3336 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
3342 +/* Change the location pointed in reg_equiv_constant. */
3344 +push_frame_of_reg_equiv_constant (HOST_WIDE_INT push_size,
3345 + HOST_WIDE_INT boundary)
3348 + extern rtx *reg_equiv_constant;
3350 + /* This function is processed if the push_frame is called from
3351 + global_alloc (or reload) function. */
3352 + if (reg_equiv_constant == 0)
3355 + for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
3356 + if (reg_equiv_constant[i])
3358 + rtx x = reg_equiv_constant[i];
3361 + if (GET_CODE (x) == PLUS
3362 + && XEXP (x, 0) == frame_pointer_rtx)
3364 + offset = AUTO_OFFSET(x);
3367 + && offset >= boundary)
3369 + offset += push_size;
3370 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3376 + else if (x == frame_pointer_rtx
3379 + reg_equiv_constant[i]
3380 + = plus_constant (frame_pointer_rtx, push_size);
3381 + reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
3387 +/* Check every instructions if insn's memory reference is out of frame. */
3389 +check_out_of_frame_access (rtx insn, HOST_WIDE_INT boundary)
3391 + for (; insn; insn = NEXT_INSN (insn))
3392 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3393 + || GET_CODE (insn) == CALL_INSN)
3395 + if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
3402 +/* Check every operands if the reference is out of frame. */
3404 +check_out_of_frame_access_in_operand (rtx orig, HOST_WIDE_INT boundary)
3407 + enum rtx_code code;
3414 + code = GET_CODE (x);
3419 + case CONST_DOUBLE:
3427 + case ADDR_DIFF_VEC:
3434 + if (XEXP (x, 0) == frame_pointer_rtx)
3440 + /* Handle special case of frame register plus constant. */
3441 + if (GET_CODE (XEXP (x, 1)) == CONST_INT
3442 + && XEXP (x, 0) == frame_pointer_rtx)
3444 + if (0 <= AUTO_OFFSET(x)
3445 + && AUTO_OFFSET(x) < boundary)
3450 + Process further subtree:
3451 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
3456 + case CALL_PLACEHOLDER:
3457 + if (check_out_of_frame_access (XEXP (x, 0), boundary))
3459 + if (check_out_of_frame_access (XEXP (x, 1), boundary))
3461 + if (check_out_of_frame_access (XEXP (x, 2), boundary))
3469 + /* Scan all subexpressions. */
3470 + fmt = GET_RTX_FORMAT (code);
3471 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3474 + if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
3477 + else if (*fmt == 'E')
3478 + for (j = 0; j < XVECLEN (x, i); j++)
3479 + if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
3485 diff -uNr gcc-3.4.0.orig/gcc/protector.h gcc-3.4.0/gcc/protector.h
3486 --- gcc-3.4.0.orig/gcc/protector.h 1970-01-01 01:00:00.000000000 +0100
3487 +++ gcc-3.4.0/gcc/protector.h 2004-01-20 03:01:39.000000000 +0100
3489 +/* RTL buffer overflow protection function for GNU C compiler
3490 + Copyright (C) 2003 Free Software Foundation, Inc.
3492 +This file is part of GCC.
3494 +GCC is free software; you can redistribute it and/or modify it under
3495 +the terms of the GNU General Public License as published by the Free
3496 +Software Foundation; either version 2, or (at your option) any later
3499 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
3500 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
3501 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
3504 +You should have received a copy of the GNU General Public License
3505 +along with GCC; see the file COPYING. If not, write to the Free
3506 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
3507 +02111-1307, USA. */
3510 +/* Declare GUARD variable. */
3511 +#define GUARD_m Pmode
3512 +#define UNITS_PER_GUARD \
3513 + MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
3515 +#ifndef L_stack_smash_handler
3517 +/* Insert a guard variable before a character buffer and change the order
3518 + of pointer variables, character buffers and pointer arguments. */
3520 +extern void prepare_stack_protection (int);
3523 +/* Search a character array from the specified type tree. */
3525 +extern int search_string_def (tree);
3528 +/* Examine whether the input contains frame pointer addressing. */
3530 +extern int contains_fp (rtx);
3532 +/* Return size that is not allocated for stack frame. It will be allocated
3533 + to modify the home of pseudo registers called from global_alloc. */
3535 +extern HOST_WIDE_INT get_frame_free_size (void);
3537 +/* Allocate a local variable in the stack area before character buffers
3538 + to avoid the corruption of it. */
3540 +extern rtx assign_stack_local_for_pseudo_reg (enum machine_mode,
3541 + HOST_WIDE_INT, int);
3544 diff -uNr gcc-3.4.0.orig/gcc/reload1.c gcc-3.4.0/gcc/reload1.c
3545 --- gcc-3.4.0.orig/gcc/reload1.c 2004-03-25 17:44:42.000000000 +0100
3546 +++ gcc-3.4.0/gcc/reload1.c 2004-05-13 23:59:01.761340536 +0200
3551 +#include "protector.h"
3553 /* This file contains the reload pass of the compiler, which is
3554 run after register allocation has been done. It checks that
3556 if (cfun->stack_alignment_needed)
3557 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
3559 - starting_frame_size = get_frame_size ();
3560 + starting_frame_size = get_frame_size () - get_frame_free_size ();
3562 set_initial_elim_offsets ();
3563 set_initial_label_offsets ();
3565 setup_save_areas ();
3567 /* If we allocated another stack slot, redo elimination bookkeeping. */
3568 - if (starting_frame_size != get_frame_size ())
3569 + if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3572 if (caller_save_needed)
3575 /* If we allocated any new memory locations, make another pass
3576 since it might have changed elimination offsets. */
3577 - if (starting_frame_size != get_frame_size ())
3578 + if (starting_frame_size != get_frame_size () - get_frame_free_size ())
3579 something_changed = 1;
3582 @@ -1056,11 +1057,11 @@
3583 if (insns_need_reload != 0 || something_needs_elimination
3584 || something_needs_operands_changed)
3586 - HOST_WIDE_INT old_frame_size = get_frame_size ();
3587 + HOST_WIDE_INT old_frame_size = get_frame_size () - get_frame_free_size ();
3589 reload_as_needed (global);
3591 - if (old_frame_size != get_frame_size ())
3592 + if (old_frame_size != get_frame_size () - get_frame_free_size ())
3596 @@ -1948,7 +1949,7 @@
3599 /* No known place to spill from => no slot to reuse. */
3600 - x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
3601 + x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
3602 inherent_size == total_size ? 0 : -1);
3603 if (BYTES_BIG_ENDIAN)
3604 /* Cancel the big-endian correction done in assign_stack_local.
3605 diff -uNr gcc-3.4.0.orig/gcc/rtl.h gcc-3.4.0/gcc/rtl.h
3606 --- gcc-3.4.0.orig/gcc/rtl.h 2004-03-25 17:44:43.000000000 +0100
3607 +++ gcc-3.4.0/gcc/rtl.h 2004-05-13 23:59:01.767339624 +0200
3608 @@ -473,6 +473,18 @@
3612 +#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) \
3614 +({ rtx const _rtx = (RTX); \
3615 + if (GET_CODE(_rtx) != C1 && GET_CODE(_rtx) != C2 \
3616 + && GET_CODE(_rtx) != C3 && GET_CODE(_rtx) != C4 \
3617 + && GET_CODE(_rtx) != C5 && GET_CODE(_rtx) != C6 \
3618 + && GET_CODE(_rtx) != C7 && GET_CODE(_rtx) != C8 \
3619 + && GET_CODE(_rtx) != C9) \
3620 + rtl_check_failed_flag (NAME, _rtx, __FILE__, __LINE__, \
3624 extern void rtl_check_failed_flag (const char *, rtx, const char *,
3628 #define RTL_FLAG_CHECK6(NAME, RTX, C1, C2, C3, C4, C5, C6) (RTX)
3629 #define RTL_FLAG_CHECK7(NAME, RTX, C1, C2, C3, C4, C5, C6, C7) (RTX)
3630 #define RTL_FLAG_CHECK8(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8) (RTX)
3631 +#define RTL_FLAG_CHECK9(NAME, RTX, C1, C2, C3, C4, C5, C6, C7, C8, C9) (RTX)
3634 #define CLEAR_RTX_FLAGS(RTX) \
3636 #define LOG_LINKS(INSN) XEXP(INSN, 7)
3638 #define RTX_INTEGRATED_P(RTX) \
3639 - (RTL_FLAG_CHECK8("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3640 + (RTL_FLAG_CHECK9("RTX_INTEGRATED_P", (RTX), INSN, CALL_INSN, \
3641 JUMP_INSN, INSN_LIST, BARRIER, CODE_LABEL, CONST, \
3642 - NOTE)->integrated)
3643 + PLUS, NOTE)->integrated)
3644 #define RTX_UNCHANGING_P(RTX) \
3645 (RTL_FLAG_CHECK3("RTX_UNCHANGING_P", (RTX), REG, MEM, CONCAT)->unchanging)
3646 #define RTX_FRAME_RELATED_P(RTX) \
3647 @@ -1125,6 +1138,10 @@
3648 (RTL_FLAG_CHECK3("MEM_VOLATILE_P", (RTX), MEM, ASM_OPERANDS, \
3649 ASM_INPUT)->volatil)
3651 +/* 1 if RTX is an SET rtx that is not eliminated for the stack protection. */
3652 +#define SET_VOLATILE_P(RTX) \
3653 + (RTL_FLAG_CHECK1("SET_VOLATILE_P", (RTX), SET)->volatil)
3655 /* 1 if RTX is a mem that refers to an aggregate, either to the
3656 aggregate itself of to a field of the aggregate. If zero, RTX may
3657 or may not be such a reference. */
3658 diff -uNr gcc-3.4.0.orig/gcc/simplify-rtx.c gcc-3.4.0/gcc/simplify-rtx.c
3659 --- gcc-3.4.0.orig/gcc/simplify-rtx.c 2004-01-24 12:05:08.000000000 +0100
3660 +++ gcc-3.4.0/gcc/simplify-rtx.c 2004-05-13 23:59:01.785336888 +0200
3661 @@ -2252,6 +2252,7 @@
3662 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
3665 + HOST_WIDE_INT fp_offset = 0;
3667 memset (ops, 0, sizeof ops);
3669 @@ -2277,6 +2278,10 @@
3673 + if (flag_propolice_protection
3674 + && XEXP (this_op, 0) == virtual_stack_vars_rtx
3675 + && GET_CODE (XEXP (this_op, 1)) == CONST_INT)
3676 + fp_offset = INTVAL (XEXP (this_op, 1));
3680 @@ -2438,11 +2443,24 @@
3681 && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3682 && CONSTANT_P (ops[n_ops - 2].op))
3684 - rtx value = ops[n_ops - 1].op;
3685 - if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3686 - value = neg_const_int (mode, value);
3687 - ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3689 + if (!flag_propolice_protection)
3691 + rtx value = ops[n_ops - 1].op;
3692 + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3693 + value = neg_const_int (mode, value);
3694 + ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3697 + /* The stack protector keeps the addressing style of a local variable,
3698 + so it doesn't use neg_const_int function not to change
3699 + the offset value. */
3701 + HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3702 + if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3704 + ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
3709 /* Count the number of CONSTs that we generated. */
3710 @@ -2460,6 +2478,59 @@
3711 || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
3714 + if (flag_propolice_protection)
3716 + /* keep the addressing style of local variables
3717 + as (plus (virtual_stack_vars_rtx) (CONST_int x)).
3718 + For the case array[r-1],
3719 + converts from (+ (+VFP c1) (+r -1)) to (SET R (+VFP c1)) (+ R (+r -1)).
3721 + This loop finds ops[i] which is the register for the frame
3722 + addressing, Then, makes the frame addressing using the register and
3723 + the constant of ops[n_ops - 1]. */
3724 + for (i = 0; i < n_ops; i++)
3725 +#ifdef FRAME_GROWS_DOWNWARD
3726 + if (ops[i].op == virtual_stack_vars_rtx)
3728 + if (ops[i].op == virtual_stack_vars_rtx
3729 + || ops[i].op == frame_pointer_rtx)
3732 + if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
3734 + HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3735 + if (value >= fp_offset)
3737 + ops[i].op = plus_constant (ops[i].op, value);
3743 + && (n_ops + 1 + n_consts > input_ops
3744 + || (n_ops + 1 + n_consts == input_ops
3745 + && n_consts <= input_consts)))
3747 + ops[n_ops - 1].op = GEN_INT (value-fp_offset);
3748 + ops[i].op = plus_constant (ops[i].op, fp_offset);
3751 + /* keep the following address pattern;
3752 + (1) buf[BUFSIZE] is the first assigned variable.
3753 + (+ (+ fp -BUFSIZE) BUFSIZE)
3754 + (2) ((+ (+ fp 1) r) -1). */
3755 + else if (fp_offset != 0)
3757 + /* keep the (+ fp 0) pattern for the following case;
3758 + (1) buf[i]: i: REG, buf: (+ fp 0) in !FRAME_GROWS_DOWNWARD
3759 + (2) argument: the address is (+ fp 0). */
3760 + else if (fp_offset == 0)
3767 /* Put a non-negated operand first, if possible. */
3769 for (i = 0; i < n_ops && ops[i].neg; i++)
3770 diff -uNr gcc-3.4.0.orig/gcc/testsuite/gcc.dg/ssp-warn.c gcc-3.4.0/gcc/testsuite/gcc.dg/ssp-warn.c
3771 --- gcc-3.4.0.orig/gcc/testsuite/gcc.dg/ssp-warn.c 1970-01-01 01:00:00.000000000 +0100
3772 +++ gcc-3.4.0/gcc/testsuite/gcc.dg/ssp-warn.c 2003-11-21 09:41:19.000000000 +0100
3774 +/* { dg-do compile } */
3775 +/* { dg-options "-fstack-protector" } */
3779 + void intest1(int *a)
3787 +} /* { dg-bogus "not protecting function: it contains functions" } */
3797 +} /* { dg-bogus "not protecting variables: it has a variable length buffer" } */
3805 +} /* { dg-bogus "not protecting function: buffer is less than 8 bytes long" } */
3806 diff -uNr gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute1.c
3807 --- gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 1970-01-01 01:00:00.000000000 +0100
3808 +++ gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute1.c 2004-02-16 06:15:39.000000000 +0100
3810 +/* Test location changes of character array. */
3822 + /* c1: the frame offset of buf[0]
3823 + c2: the frame offset of buf2[0]
3825 + p= &buf[0]; *p=1; /* expected rtl: (+ fp -c1) */
3828 + p= &buf[5]; *p=2; /* expected rtl: (+ fp -c1+5) */
3831 + p= &buf[-1]; *p=3; /* expected rtl: (+ (+ fp -c1) -1) */
3832 + if (*p != buf[-1])
3834 + p= &buf[49]; *p=4; /* expected rtl: (+ fp -c1+49) */
3835 + if (*p != buf[49])
3837 + p = &buf[i+5]; *p=5; /* expected rtl: (+ (+ fp -c1) (+ i 5)) */
3838 + if (*p != buf[i+5])
3840 + p = buf - 1; *p=6; /* expected rtl: (+ (+ fp -c1) -1) */
3841 + if (*p != buf[-1])
3843 + p = 1 + buf; *p=7; /* expected rtl: (+ (+ fp -c1) 1) */
3846 + p = &buf[1] - 1; *p=8; /* expected rtl: (+ (+ fp -c1+1) -1) */
3850 + /* test big offset which is greater than the max value of signed 16 bit integer. */
3851 + p = &buf2[45555]; *p=9; /* expected rtl: (+ fp -c2+45555) */
3852 + if (*p != buf2[45555])
3864 diff -uNr gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute2.c
3865 --- gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 1970-01-01 01:00:00.000000000 +0100
3866 +++ gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute2.c 2003-11-22 09:44:33.000000000 +0100
3869 +test(int i, char *j, int k)
3885 + /* overflow buffer */
3886 + for (n = 0; n < 120; n++)
3889 + if (j == 0 || *j != 2)
3912 + test(i, &j[39], k);
3917 diff -uNr gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute.exp
3918 --- gcc-3.4.0.orig/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 1970-01-01 01:00:00.000000000 +0100
3919 +++ gcc-3.4.0/gcc/testsuite/gcc.misc-tests/ssp-execute.exp 2003-11-22 01:28:12.000000000 +0100
3921 +# Copyright (C) 2003 Free Software Foundation, Inc.
3923 +# This program is free software; you can redistribute it and/or modify
3924 +# it under the terms of the GNU General Public License as published by
3925 +# the Free Software Foundation; either version 2 of the License, or
3926 +# (at your option) any later version.
3928 +# This program is distributed in the hope that it will be useful,
3929 +# but WITHOUT ANY WARRANTY; without even the implied warranty of
3930 +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
3931 +# GNU General Public License for more details.
3933 +# You should have received a copy of the GNU General Public License
3934 +# along with this program; if not, write to the Free Software
3935 +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
3937 +# Test the functionality of programs compiled with -fstack-protector.
3938 +# ordering like c-torture options.
3939 +set TORTURE_OPTIONS [list \
3940 + { -fstack-protector -O0 } \
3941 + { -fstack-protector -O1 } \
3942 + { -fstack-protector -O2 } \
3943 + { -fstack-protector -O2 -fomit-frame-pointer } \
3944 + { -fstack-protector -O3 -fomit-frame-pointer } \
3945 + { -fstack-protector -O3 -fomit-frame-pointer -funroll-loops } \
3946 + { -fstack-protector -O3 -fomit-frame-pointer -funroll-all-loops -finline-functions } \
3947 + { -fstack-protector -O3 -g } \
3948 + { -fstack-protector -Os } ]
3950 +if $tracelevel then {
3951 + strace $tracelevel
3954 +# Load support procs.
3955 +load_lib c-torture.exp
3961 +foreach src [lsort [glob -nocomplain $srcdir/$subdir/ssp-execute*.c]] {
3962 + # If we're only testing specific files and this isn't one of them, skip it.
3963 + if ![runtest_file_p $runtests $src] then {
3967 + c-torture-execute $src
3969 diff -uNr gcc-3.4.0.orig/gcc/toplev.c gcc-3.4.0/gcc/toplev.c
3970 --- gcc-3.4.0.orig/gcc/toplev.c 2004-02-20 09:40:49.000000000 +0100
3971 +++ gcc-3.4.0/gcc/toplev.c 2004-05-13 23:59:01.806333696 +0200
3973 declarations for e.g. AIX 4.x. */
3976 +#ifdef STACK_PROTECTOR
3977 +#include "protector.h"
3980 #ifndef HAVE_conditional_execution
3981 #define HAVE_conditional_execution 0
3983 @@ -979,6 +983,15 @@
3984 minimum function alignment. Zero means no alignment is forced. */
3985 int force_align_functions_log;
3987 +#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
3988 +/* Nonzero means use propolice as a stack protection method */
3989 +int flag_propolice_protection = 1;
3990 +int flag_stack_protection = 0;
3992 +int flag_propolice_protection = 0;
3993 +int flag_stack_protection = 0;
3998 const char *const string;
3999 @@ -1154,7 +1167,9 @@
4000 {"mem-report", &mem_report, 1 },
4001 { "trapv", &flag_trapv, 1 },
4002 { "wrapv", &flag_wrapv, 1 },
4003 - { "new-ra", &flag_new_regalloc, 1 }
4004 + { "new-ra", &flag_new_regalloc, 1 },
4005 + {"stack-protector", &flag_propolice_protection, 1 },
4006 + {"stack-protector-all", &flag_stack_protection, 1 }
4009 /* Here is a table, controlled by the tm.h file, listing each -m switch
4010 @@ -2687,6 +2702,9 @@
4012 insns = get_insns ();
4014 + if (flag_propolice_protection)
4015 + prepare_stack_protection (inlinable);
4017 /* Dump the rtl code if we are dumping rtl. */
4019 if (open_dump_file (DFI_rtl, decl))
4020 @@ -4482,6 +4500,12 @@
4021 /* The presence of IEEE signaling NaNs, implies all math can trap. */
4022 if (flag_signaling_nans)
4023 flag_trapping_math = 1;
4025 + /* This combination makes optimized frame addressings and causes
4026 + a internal compilation error at prepare_stack_protection.
4027 + so don't allow it. */
4028 + if (flag_stack_protection && !flag_propolice_protection)
4029 + flag_propolice_protection = TRUE;
4032 /* Initialize the compiler back end. */
4033 diff -uNr gcc-3.4.0.orig/gcc/tree.h gcc-3.4.0/gcc/tree.h
4034 --- gcc-3.4.0.orig/gcc/tree.h 2004-02-08 02:52:43.000000000 +0100
4035 +++ gcc-3.4.0/gcc/tree.h 2004-05-13 23:59:01.827330504 +0200
4036 @@ -1489,6 +1489,10 @@
4037 where it is called. */
4038 #define DECL_INLINE(NODE) (FUNCTION_DECL_CHECK (NODE)->decl.inline_flag)
4040 +/* In a VAR_DECL, nonzero if the declaration is copied for inlining.
4041 + The stack protector should keep its location in the stack. */
4042 +#define DECL_COPIED(NODE) (VAR_DECL_CHECK (NODE)->decl.inline_flag)
4044 /* Nonzero in a FUNCTION_DECL means that this function was declared inline,
4045 such as via the `inline' keyword in C/C++. This flag controls the linkage
4046 semantics of 'inline'; whether or not the function is inlined is