1 diff -urN gcc-3.3.1/gcc/Makefile.in gcc-3.3.1-pp/gcc/Makefile.in
2 --- gcc-3.3.1/gcc/Makefile.in 2003-08-03 15:48:36.000000000 +0000
3 +++ gcc-3.3.1-pp/gcc/Makefile.in 2003-09-12 13:40:28.000000000 +0000
5 # Options to use when compiling libgcc2.a.
7 LIBGCC2_DEBUG_CFLAGS = -g
8 -LIBGCC2_CFLAGS = -O2 $(LIBGCC2_INCLUDES) $(GCC_CFLAGS) $(TARGET_LIBGCC2_CFLAGS) $(LIBGCC2_DEBUG_CFLAGS) $(GTHREAD_FLAGS) -DIN_LIBGCC2 -D__GCC_FLOAT_NOT_NEEDED @inhibit_libc@
9 +LIBGCC2_CFLAGS = -O2 $(LIBGCC2_INCLUDES) $(GCC_CFLAGS) $(TARGET_LIBGCC2_CFLAGS) $(LIBGCC2_DEBUG_CFLAGS) $(GTHREAD_FLAGS) -DIN_LIBGCC2 -D__GCC_FLOAT_NOT_NEEDED @inhibit_libc@ -fno-stack-protector
11 # Additional options to use when compiling libgcc2.a.
12 # Some targets override this to -isystem include
14 sibcall.o simplify-rtx.o ssa.o ssa-ccp.o ssa-dce.o stmt.o \
15 stor-layout.o stringpool.o timevar.o toplev.o tracer.o tree.o tree-dump.o \
16 tree-inline.o unroll.o varasm.o varray.o version.o vmsdbgout.o xcoffout.o \
17 - et-forest.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
18 + et-forest.o protector.o $(GGC) $(out_object_file) $(EXTRA_OBJS)
20 BACKEND = main.o libbackend.a
24 LIB2FUNCS_2 = _floatdixf _fixunsxfsi _fixtfdi _fixunstfdi _floatditf \
25 _clear_cache _trampoline __main _exit _absvsi2 _absvdi2 _addvsi3 \
26 - _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors
27 + _addvdi3 _subvsi3 _subvdi3 _mulvsi3 _mulvdi3 _negvsi2 _negvdi2 _ctors _stack_smash_handler
29 # Defined in libgcc2.c, included only in the static library.
30 LIB2FUNCS_ST = _eprintf _bb __gcc_bcmp
32 ssa.h $(PARAMS_H) $(TM_P_H) reload.h dwarf2asm.h $(TARGET_H) \
33 langhooks.h insn-flags.h options.h cfglayout.h real.h
34 $(CC) $(ALL_CFLAGS) $(ALL_CPPFLAGS) $(INCLUDES) \
35 - -DTARGET_NAME=\"$(target_alias)\" \
36 + -DSTACK_PROTECTOR -DTARGET_NAME=\"$(target_alias)\" \
37 -c $(srcdir)/toplev.c $(OUTPUT_OPTION)
38 main.o : main.c $(CONFIG_H) $(SYSTEM_H) toplev.h
41 output.h except.h $(TM_P_H) real.h
42 params.o : params.c $(CONFIG_H) $(SYSTEM_H) $(PARAMS_H) toplev.h
43 hooks.o: hooks.c $(CONFIG_H) $(SYSTEM_H) $(HOOKS_H)
44 +protector.o: protector.c $(CONFIG_H)
46 $(out_object_file): $(out_file) $(CONFIG_H) $(TREE_H) $(GGC_H) \
47 $(RTL_H) $(REGS_H) hard-reg-set.h real.h insn-config.h conditions.h \
48 diff -urN gcc-3.3.1/gcc/calls.c gcc-3.3.1-pp/gcc/calls.c
49 --- gcc-3.3.1/gcc/calls.c 2003-07-18 02:58:25.000000000 +0000
50 +++ gcc-3.3.1-pp/gcc/calls.c 2003-09-12 13:40:28.000000000 +0000
52 /* For variable-sized objects, we must be called with a target
53 specified. If we were to allocate space on the stack here,
54 we would have no way of knowing when to free it. */
55 - rtx d = assign_temp (TREE_TYPE (exp), 1, 1, 1);
56 + rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
58 mark_temp_addr_taken (d);
59 structure_value_addr = XEXP (d, 0);
60 diff -urN gcc-3.3.1/gcc/combine.c gcc-3.3.1-pp/gcc/combine.c
61 --- gcc-3.3.1/gcc/combine.c 2003-03-24 11:37:32.000000000 +0000
62 +++ gcc-3.3.1-pp/gcc/combine.c 2003-09-12 13:40:28.000000000 +0000
63 @@ -3859,7 +3859,17 @@
64 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
65 rtx inner_op1 = XEXP (x, 1);
69 +#ifndef FRAME_GROWS_DOWNWARD
70 + if (flag_propolice_protection
72 + && other == frame_pointer_rtx
73 + && GET_CODE (inner_op0) == CONST_INT
74 + && GET_CODE (inner_op1) == CONST_INT
75 + && INTVAL (inner_op0) > 0
76 + && INTVAL (inner_op0) + INTVAL (inner_op1) <= 0)
79 /* Make sure we pass the constant operand if any as the second
80 one if this is a commutative operation. */
81 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
82 @@ -4272,6 +4282,11 @@
83 they are now checked elsewhere. */
84 if (GET_CODE (XEXP (x, 0)) == PLUS
85 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
86 +#ifndef FRAME_GROWS_DOWNWARD
87 + if (! (flag_propolice_protection
88 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx
89 + && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
91 return gen_binary (PLUS, mode,
92 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
94 @@ -4400,7 +4415,10 @@
96 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
98 - if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
99 + if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)
100 + && (! (flag_propolice_protection
101 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
102 + && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT)))
103 return gen_binary (MINUS, mode,
104 gen_binary (MINUS, mode, XEXP (x, 0),
105 XEXP (XEXP (x, 1), 0)),
106 diff -urN gcc-3.3.1/gcc/config/t-linux gcc-3.3.1-pp/gcc/config/t-linux
107 --- gcc-3.3.1/gcc/config/t-linux 2003-06-04 16:56:11.000000000 +0000
108 +++ gcc-3.3.1-pp/gcc/config/t-linux 2003-09-12 13:40:28.000000000 +0000
110 # Compile crtbeginS.o and crtendS.o with pic.
111 CRTSTUFF_T_CFLAGS_S = $(CRTSTUFF_T_CFLAGS) -fPIC
112 # Compile libgcc2.a with pic.
113 -TARGET_LIBGCC2_CFLAGS = -fPIC
114 +TARGET_LIBGCC2_CFLAGS = -fPIC -DHAVE_SYSLOG
116 # Override t-slibgcc-elf-ver to export some libgcc symbols with
117 # the symbol versions that glibc used.
118 diff -urN gcc-3.3.1/gcc/cse.c gcc-3.3.1-pp/gcc/cse.c
119 --- gcc-3.3.1/gcc/cse.c 2003-04-29 19:16:40.000000000 +0000
120 +++ gcc-3.3.1-pp/gcc/cse.c 2003-09-12 13:40:28.000000000 +0000
121 @@ -4288,7 +4288,14 @@
126 +#ifndef FRAME_GROWS_DOWNWARD
127 + if (flag_propolice_protection
128 + && GET_CODE (y) == PLUS
129 + && XEXP (y, 0) == frame_pointer_rtx
130 + && INTVAL (inner_const) > 0
131 + && INTVAL (new_const) <= 0)
134 /* If we are associating shift operations, don't let this
135 produce a shift of the size of the object or larger.
136 This could occur when we follow a sign-extend by a right
137 @@ -4823,6 +4830,13 @@
138 if (SET_DEST (x) == pc_rtx
139 && GET_CODE (SET_SRC (x)) == LABEL_REF)
141 + /* cut the reg propagation of stack-protected argument */
142 + else if (x->volatil) {
143 + rtx x1 = SET_DEST (x);
144 + if (GET_CODE (x1) == SUBREG && GET_CODE (SUBREG_REG (x1)) == REG)
145 + x1 = SUBREG_REG (x1);
146 + make_new_qty (REGNO (x1), GET_MODE (x1));
149 /* Don't count call-insns, (set (reg 0) (call ...)), as a set.
150 The hard function value register is used only once, to copy to
151 diff -urN gcc-3.3.1/gcc/explow.c gcc-3.3.1-pp/gcc/explow.c
152 --- gcc-3.3.1/gcc/explow.c 2003-04-07 22:58:12.000000000 +0000
153 +++ gcc-3.3.1-pp/gcc/explow.c 2003-09-12 13:40:28.000000000 +0000
156 int all_constant = 0;
160 + && !(flag_propolice_protection && x == virtual_stack_vars_rtx))
170 + || (flag_propolice_protection && x == virtual_stack_vars_rtx))
171 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
173 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
175 in certain cases. This is not necessary since the code
176 below can handle all possible cases, but machine-dependent
177 transformations can make better code. */
178 + if (flag_propolice_protection)
180 +#define FRAMEADDR_P(X) (GET_CODE (X) == PLUS \
181 + && XEXP (X, 0) == virtual_stack_vars_rtx \
182 + && GET_CODE (XEXP (X, 1)) == CONST_INT)
184 + if (FRAMEADDR_P (x)) goto win;
185 + for (y=x; y!=0 && GET_CODE (y)==PLUS; y = XEXP (y, 0))
187 + if (FRAMEADDR_P (XEXP (y, 0)))
188 + XEXP (y, 0) = force_reg (GET_MODE (XEXP (y, 0)), XEXP (y, 0));
189 + if (FRAMEADDR_P (XEXP (y, 1)))
190 + XEXP (y, 1) = force_reg (GET_MODE (XEXP (y, 1)), XEXP (y, 1));
193 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
195 /* PLUS and MULT can appear in special ways
196 diff -urN gcc-3.3.1/gcc/expr.c gcc-3.3.1-pp/gcc/expr.c
197 --- gcc-3.3.1/gcc/expr.c 2003-07-24 19:11:20.000000000 +0000
198 +++ gcc-3.3.1-pp/gcc/expr.c 2003-09-12 13:40:28.000000000 +0000
200 #include "langhooks.h"
203 +#include "protector.h"
205 /* Decide whether a function's arguments should be processed
206 from first to last or from last to first.
207 @@ -1518,7 +1519,7 @@
209 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
211 - data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
212 + data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len-GET_MODE_SIZE (mode)));
213 data.autinc_from = 1;
214 data.explicit_inc_from = -1;
216 @@ -1532,7 +1533,7 @@
217 data.from_addr = copy_addr_to_reg (from_addr);
218 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
220 - data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
221 + data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len-GET_MODE_SIZE (mode)));
223 data.explicit_inc_to = -1;
225 @@ -1649,11 +1650,13 @@
226 from1 = adjust_address (data->from, mode, data->offset);
228 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
229 - emit_insn (gen_add2_insn (data->to_addr,
230 - GEN_INT (-(HOST_WIDE_INT)size)));
231 + if (data->explicit_inc_to < -1)
232 + emit_insn (gen_add2_insn (data->to_addr,
233 + GEN_INT (-(HOST_WIDE_INT)size)));
234 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
235 - emit_insn (gen_add2_insn (data->from_addr,
236 - GEN_INT (-(HOST_WIDE_INT)size)));
237 + if (data->explicit_inc_from < -1)
238 + emit_insn (gen_add2_insn (data->from_addr,
239 + GEN_INT (-(HOST_WIDE_INT)size)));
242 emit_insn ((*genfun) (to1, from1));
243 @@ -2826,7 +2829,7 @@
245 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
247 - data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
248 + data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len-GET_MODE_SIZE (mode)));
250 data->explicit_inc_to = -1;
252 @@ -2897,8 +2900,9 @@
253 to1 = adjust_address (data->to, mode, data->offset);
255 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
256 - emit_insn (gen_add2_insn (data->to_addr,
257 - GEN_INT (-(HOST_WIDE_INT) size)));
258 + if (data->explicit_inc_to < -1)
259 + emit_insn (gen_add2_insn (data->to_addr,
260 + GEN_INT (-(HOST_WIDE_INT) size)));
262 cst = (*data->constfun) (data->constfundata, data->offset, mode);
263 emit_insn ((*genfun) (to1, cst));
264 @@ -5894,7 +5898,9 @@
265 && GET_CODE (XEXP (value, 0)) == PLUS
266 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
267 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
268 - && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
269 + && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER
270 + && (!flag_propolice_protection
271 + || XEXP (XEXP (value, 0), 0) != virtual_stack_vars_rtx))
273 rtx temp = expand_simple_binop (GET_MODE (value), code,
274 XEXP (XEXP (value, 0), 0), op2,
275 @@ -8070,7 +8076,8 @@
276 /* If adding to a sum including a constant,
277 associate it to put the constant outside. */
278 if (GET_CODE (op1) == PLUS
279 - && CONSTANT_P (XEXP (op1, 1)))
280 + && CONSTANT_P (XEXP (op1, 1))
281 + && !(flag_propolice_protection && (contains_fp (op0) || contains_fp (op1))))
283 rtx constant_term = const0_rtx;
285 diff -urN gcc-3.3.1/gcc/flags.h gcc-3.3.1-pp/gcc/flags.h
286 --- gcc-3.3.1/gcc/flags.h 2003-06-20 21:18:41.000000000 +0000
287 +++ gcc-3.3.1-pp/gcc/flags.h 2003-09-12 13:40:28.000000000 +0000
289 #define HONOR_SIGN_DEPENDENT_ROUNDING(MODE) \
290 (MODE_HAS_SIGN_DEPENDENT_ROUNDING (MODE) && !flag_unsafe_math_optimizations)
292 +/* Nonzero means use propolice as a stack protection method */
294 +extern int flag_propolice_protection;
296 +/* Warn when not issuing stack smashing protection for some reason */
298 +extern int warn_stack_protector;
300 #endif /* ! GCC_FLAGS_H */
301 diff -urN gcc-3.3.1/gcc/function.c gcc-3.3.1-pp/gcc/function.c
302 --- gcc-3.3.1/gcc/function.c 2003-04-10 22:26:04.000000000 +0000
303 +++ gcc-3.3.1-pp/gcc/function.c 2003-09-12 13:40:28.000000000 +0000
306 #include "integrate.h"
307 #include "langhooks.h"
308 +#include "protector.h"
310 #ifndef TRAMPOLINE_ALIGNMENT
311 #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
313 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
315 static GTY(()) varray_type sibcall_epilogue;
317 +/* Current boundary mark for character arrays. */
318 +int temp_boundary_mark = 0;
321 /* In order to evaluate some expressions, such as function calls returning
322 structures in memory, we need to temporarily allocate stack locations.
324 /* The size of the slot, including extra space for alignment. This
325 info is for combine_temp_slots. */
326 HOST_WIDE_INT full_size;
327 + /* Boundary mark of a character array and the others. This info is for propolice */
331 /* This structure is used to record MEMs or pseudos used to replace VAR, any
333 whose lifetime is controlled by CLEANUP_POINT_EXPRs. KEEP is 3
334 if we are to allocate something at an inner level to be treated as
335 a variable in the block (e.g., a SAVE_EXPR).
336 + KEEP is 5 if we allocate a place to return structure.
338 TYPE is the type that will be used for the stack slot. */
342 struct temp_slot *p, *best_p = 0;
344 + int char_array = (flag_propolice_protection
345 + && keep == 1 && search_string_def (type));
347 /* If SIZE is -1 it means that somebody tried to allocate a temporary
348 of a variable size. */
351 && objects_must_conflict_p (p->type, type)
352 && (best_p == 0 || best_p->size > p->size
353 - || (best_p->size == p->size && best_p->align > p->align)))
354 + || (best_p->size == p->size && best_p->align > p->align))
355 + && (! char_array || p->boundary_mark != 0))
357 if (p->align == align && p->size == size)
362 p->type = best_p->type;
363 + p->boundary_mark = best_p->boundary_mark;
364 p->next = temp_slots;
368 p->full_size = frame_offset - frame_offset_old;
371 + p->boundary_mark = char_array?++temp_boundary_mark:0;
372 p->next = temp_slots;
375 @@ -932,14 +945,16 @@
377 if (! q->in_use && GET_MODE (q->slot) == BLKmode)
379 - if (p->base_offset + p->full_size == q->base_offset)
380 + if (p->base_offset + p->full_size == q->base_offset &&
381 + p->boundary_mark == q->boundary_mark)
383 /* Q comes after P; combine Q into P. */
385 p->full_size += q->full_size;
388 - else if (q->base_offset + q->full_size == p->base_offset)
389 + else if (q->base_offset + q->full_size == p->base_offset &&
390 + p->boundary_mark == q->boundary_mark)
392 /* P comes after Q; combine P into Q. */
394 @@ -1497,7 +1512,9 @@
395 new = func->x_parm_reg_stack_loc[regno];
398 - new = assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func);
400 + assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode), 0, func):
401 + assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode), 0);
404 PUT_MODE (reg, decl_mode);
405 @@ -3961,7 +3978,8 @@
406 constant with that register. */
407 temp = gen_reg_rtx (Pmode);
409 - if (validate_change (object, &XEXP (x, 1), temp, 0))
410 + if (validate_change (object, &XEXP (x, 1), temp, 0)
411 + && ! flag_propolice_protection)
412 emit_insn_before (gen_move_insn (temp, new_offset), object);
415 diff -urN gcc-3.3.1/gcc/gcse.c gcc-3.3.1-pp/gcc/gcse.c
416 --- gcc-3.3.1/gcc/gcse.c 2003-07-14 09:21:22.000000000 +0000
417 +++ gcc-3.3.1-pp/gcc/gcse.c 2003-09-12 13:40:28.000000000 +0000
418 @@ -4211,7 +4211,7 @@
419 /* Find an assignment that sets reg_used and is available
420 at the start of the block. */
421 set = find_avail_set (regno, insn);
423 + if (! set || set->expr->volatil)
427 diff -urN gcc-3.3.1/gcc/integrate.c gcc-3.3.1-pp/gcc/integrate.c
428 --- gcc-3.3.1/gcc/integrate.c 2003-07-15 01:05:43.000000000 +0000
429 +++ gcc-3.3.1-pp/gcc/integrate.c 2003-09-12 13:40:28.000000000 +0000
431 /* These args would always appear unused, if not for this. */
432 TREE_USED (copy) = 1;
434 + /* The inlined variable is marked as INLINE not to sweep by propolice */
435 + if (flag_propolice_protection && TREE_CODE (copy) == VAR_DECL)
436 + DECL_INLINE (copy) = 1;
438 /* Set the context for the new declaration. */
439 if (!DECL_CONTEXT (decl))
440 /* Globals stay global. */
441 @@ -1965,6 +1969,10 @@
445 +#ifdef FRAME_GROWS_DOWNWARD
446 + if (flag_propolice_protection && GET_CODE (seq) == SET)
447 + RTX_INTEGRATED_P (SET_SRC (seq)) = 1;
449 emit_insn_after (seq, map->insns_at_start);
452 diff -urN gcc-3.3.1/gcc/libgcc-std.ver gcc-3.3.1-pp/gcc/libgcc-std.ver
453 --- gcc-3.3.1/gcc/libgcc-std.ver 2003-07-13 21:25:09.000000000 +0000
454 +++ gcc-3.3.1-pp/gcc/libgcc-std.ver 2003-09-12 13:40:28.000000000 +0000
456 _Unwind_SjLj_RaiseException
457 _Unwind_SjLj_ForcedUnwind
460 + # stack smash handler symbols
462 + __stack_smash_handler
465 %inherit GCC_3.3 GCC_3.0
466 diff -urN gcc-3.3.1/gcc/libgcc2.c gcc-3.3.1-pp/gcc/libgcc2.c
467 --- gcc-3.3.1/gcc/libgcc2.c 2002-10-23 10:47:24.000000000 +0000
468 +++ gcc-3.3.1-pp/gcc/libgcc2.c 2003-09-12 13:40:28.000000000 +0000
469 @@ -1993,3 +1993,102 @@
470 #endif /* NEED_ATEXIT */
474 +#ifdef L_stack_smash_handler
480 +#ifdef _POSIX_SOURCE
484 +#if defined(HAVE_SYSLOG)
485 +#include <sys/types.h>
486 +#include <sys/socket.h>
489 +#include <sys/syslog.h>
491 +#define _PATH_LOG "/dev/log"
495 +long __guard[8] = {0,0,0,0,0,0,0,0};
496 +static void __guard_setup (void) __attribute__ ((constructor)) ;
497 +static void __guard_setup (void)
500 + if (__guard[0]!=0) return;
501 + fd = open ("/dev/urandom", 0);
503 + ssize_t size = read (fd, (char*)&__guard, sizeof(__guard));
505 + if (size == sizeof(__guard)) return;
507 + /* If a random generator can't be used, the protector switches the guard
508 + to the "terminator canary" */
509 + ((char*)__guard)[0] = 0; ((char*)__guard)[1] = 0;
510 + ((char*)__guard)[2] = '\n'; ((char*)__guard)[3] = 255;
512 +void __stack_smash_handler (char func[], int damaged ATTRIBUTE_UNUSED)
514 +#if defined (__GNU_LIBRARY__)
515 + extern char * __progname;
517 + const char message[] = ": stack smashing attack in function ";
518 + int bufsz = 256, len;
520 +#if defined(HAVE_SYSLOG)
522 + struct sockaddr_un SyslogAddr; /* AF_UNIX address of local logger */
524 +#ifdef _POSIX_SOURCE
528 + sigdelset(&mask, SIGABRT); /* Block all signal handlers */
529 + sigprocmask(SIG_BLOCK, &mask, NULL); /* except SIGABRT */
533 + strcpy(buf, "<2>"); len=3; /* send LOG_CRIT */
534 +#if defined (__GNU_LIBRARY__)
535 + strncat(buf, __progname, bufsz-len-1); len = strlen(buf);
537 + if (bufsz>len) {strncat(buf, message, bufsz-len-1); len = strlen(buf);}
538 + if (bufsz>len) {strncat(buf, func, bufsz-len-1); len = strlen(buf);}
540 + /* print error message */
541 + write (STDERR_FILENO, buf+3, len-3);
542 +#if defined(HAVE_SYSLOG)
543 + if ((LogFile = socket(AF_UNIX, SOCK_DGRAM, 0)) != -1) {
546 + * Send "found" message to the "/dev/log" path
548 + SyslogAddr.sun_family = AF_UNIX;
549 + (void)strncpy(SyslogAddr.sun_path, _PATH_LOG,
550 + sizeof(SyslogAddr.sun_path) - 1);
551 + SyslogAddr.sun_path[sizeof(SyslogAddr.sun_path) - 1] = '\0';
552 + sendto(LogFile, buf, len, 0, (struct sockaddr *)&SyslogAddr,
553 + sizeof(SyslogAddr));
557 +#ifdef _POSIX_SOURCE
558 + { /* Make sure the default handler is associated with SIGABRT */
559 + struct sigaction sa;
561 + memset(&sa, 0, sizeof(struct sigaction));
562 + sigfillset(&sa.sa_mask); /* Block all signals */
564 + sa.sa_handler = SIG_DFL;
565 + sigaction(SIGABRT, &sa, NULL);
566 + (void)kill(getpid(), SIGABRT);
572 diff -urN gcc-3.3.1/gcc/loop.c gcc-3.3.1-pp/gcc/loop.c
573 --- gcc-3.3.1/gcc/loop.c 2003-07-11 06:47:05.000000000 +0000
574 +++ gcc-3.3.1-pp/gcc/loop.c 2003-09-12 13:40:28.000000000 +0000
575 @@ -6516,6 +6516,14 @@
576 if (GET_CODE (*mult_val) == USE)
577 *mult_val = XEXP (*mult_val, 0);
579 +#ifndef FRAME_GROWS_DOWNWARD
580 + if (flag_propolice_protection
581 + && GET_CODE (*add_val) == PLUS
582 + && (XEXP (*add_val, 0) == frame_pointer_rtx
583 + || XEXP (*add_val, 1) == frame_pointer_rtx))
588 *pbenefit += address_cost (orig_x, addr_mode) - reg_address_cost;
590 diff -urN gcc-3.3.1/gcc/optabs.c gcc-3.3.1-pp/gcc/optabs.c
591 --- gcc-3.3.1/gcc/optabs.c 2003-07-19 00:25:25.000000000 +0000
592 +++ gcc-3.3.1-pp/gcc/optabs.c 2003-09-12 13:40:28.000000000 +0000
595 target = protect_from_queue (target, 1);
597 + if (flag_propolice_protection
598 + && binoptab->code == PLUS
599 + && op0 == virtual_stack_vars_rtx
600 + && GET_CODE(op1) == CONST_INT)
602 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
606 + temp = gen_reg_rtx (mode);
608 + if (! (*insn_data[icode].operand[0].predicate) (temp, mode)
609 + || GET_CODE (temp) != REG)
610 + temp = gen_reg_rtx (mode);
612 + emit_insn (gen_rtx_SET (VOIDmode, temp,
613 + gen_rtx_PLUS (GET_MODE (op0), op0, op1)));
619 op0 = force_not_mem (op0);
620 diff -urN gcc-3.3.1/gcc/protector.c gcc-3.3.1-pp/gcc/protector.c
621 --- gcc-3.3.1/gcc/protector.c 1970-01-01 00:00:00.000000000 +0000
622 +++ gcc-3.3.1-pp/gcc/protector.c 2003-09-12 13:40:28.000000000 +0000
624 +/* RTL buffer overflow protection function for GNU C compiler
625 + Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
627 +This file is part of GCC.
629 +GCC is free software; you can redistribute it and/or modify it under
630 +the terms of the GNU General Public License as published by the Free
631 +Software Foundation; either version 2, or (at your option) any later
634 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
635 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
636 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
639 +You should have received a copy of the GNU General Public License
640 +along with GCC; see the file COPYING. If not, write to the Free
641 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
646 +#include "machmode.h"
652 +#include "insn-config.h"
653 +#include "insn-flags.h"
657 +#include "hard-reg-set.h"
660 +#include "function.h"
662 +#include "conditions.h"
663 +#include "insn-attr.h"
666 +#include "protector.h"
669 +void prepare_stack_protection PARAMS ((int inlinable));
670 +int search_string_def PARAMS ((tree names));
671 +rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
674 +/* Warn when not issuing stack smashing protection for some reason */
675 +int warn_stack_protector;
677 +/* Round a value to the lowest integer less than it that is a multiple of
678 + the required alignment. Avoid using division in case the value is
679 + negative. Assume the alignment is a power of two. */
680 +#define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
682 +/* Similar, but round to the next highest integer that meets the
684 +#define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
687 +/* Nonzero means use propolice as a stack protection method */
688 +extern int flag_propolice_protection;
690 +/* This file contains several memory arrangement functions to protect
691 + the return address and the frame pointer of the stack
692 + from a stack-smashing attack. It also
693 + provides the function that protects pointer variables. */
695 +/* Nonzero if function being compiled can define string buffers that may be
696 + damaged by the stack-smash attack */
697 +static int current_function_defines_vulnerable_string;
698 +static int current_function_defines_short_string;
699 +static int current_function_has_variable_string;
700 +static int current_function_defines_vsized_array;
701 +static int current_function_is_inlinable;
703 +static rtx guard_area, _guard;
704 +static rtx function_first_insn, prologue_insert_point;
707 +static HOST_WIDE_INT sweep_frame_offset;
708 +static HOST_WIDE_INT push_allocated_offset = 0;
709 +static HOST_WIDE_INT push_frame_offset = 0;
710 +static int saved_cse_not_expected = 0;
712 +static int search_string_from_argsandvars PARAMS ((int caller));
713 +static int search_string_from_local_vars PARAMS ((tree block));
714 +static int search_pointer_def PARAMS ((tree names));
715 +static int search_func_pointer PARAMS ((tree type, int mark));
716 +static void reset_used_flags_for_insns PARAMS ((rtx insn));
717 +static void reset_used_flags_for_decls PARAMS ((tree block));
718 +static void reset_used_flags_of_plus PARAMS ((rtx x));
719 +static void rtl_prologue PARAMS ((rtx insn));
720 +static void rtl_epilogue PARAMS ((rtx fnlastinsn));
721 +static void arrange_var_order PARAMS ((tree blocks));
722 +static void copy_args_for_protection PARAMS ((void));
723 +static void sweep_string_variable PARAMS ((rtx sweep_var, HOST_WIDE_INT var_size));
724 +static void sweep_string_in_decls PARAMS ((tree block, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
725 +static void sweep_string_in_args PARAMS ((tree parms, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
726 +static void sweep_string_use_of_insns PARAMS ((rtx insn, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
727 +static void sweep_string_in_operand PARAMS ((rtx insn, rtx *loc, HOST_WIDE_INT sweep_offset, HOST_WIDE_INT size));
728 +static void move_arg_location PARAMS ((rtx insn, rtx orig, rtx new, HOST_WIDE_INT var_size));
729 +static void change_arg_use_of_insns PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
730 +static void change_arg_use_of_insns_2 PARAMS ((rtx insn, rtx orig, rtx *new, HOST_WIDE_INT size));
731 +static void change_arg_use_in_operand PARAMS ((rtx x, rtx orig, rtx *new, HOST_WIDE_INT size));
732 +static void validate_insns_of_varrefs PARAMS ((rtx insn));
733 +static void validate_operand_of_varrefs PARAMS ((rtx insn, rtx *loc));
735 +#define SUSPICIOUS_BUF_SIZE 8
737 +#define AUTO_BASEPTR(X) \
738 + (GET_CODE (X) == PLUS ? XEXP (X, 0) : X)
739 +#define AUTO_OFFSET(X) \
740 + (GET_CODE (X) == PLUS ? INTVAL (XEXP (X, 1)) : 0)
741 +#undef PARM_PASSED_IN_MEMORY
742 +#define PARM_PASSED_IN_MEMORY(PARM) \
743 + (GET_CODE (DECL_INCOMING_RTL (PARM)) == MEM)
744 +#define VIRTUAL_STACK_VARS_P(X) \
745 + ((X) == virtual_stack_vars_rtx || (GET_CODE (X) == REG && (X)->used))
750 +prepare_stack_protection (inlinable)
753 + tree blocks = DECL_INITIAL (current_function_decl);
754 + current_function_is_inlinable = inlinable && !flag_no_inline;
755 + push_frame_offset = push_allocated_offset = 0;
756 + saved_cse_not_expected = 0;
759 + skip the protection if the function has no block or it is an inline function
761 + if (current_function_is_inlinable) validate_insns_of_varrefs (get_insns ());
762 + if (! blocks || current_function_is_inlinable) return;
764 + current_function_defines_vulnerable_string = search_string_from_argsandvars (0);
766 + if (current_function_defines_vulnerable_string)
768 + HOST_WIDE_INT offset;
769 + function_first_insn = get_insns ();
771 + if (current_function_contains_functions) {
772 + if (warn_stack_protector)
773 + warning ("not protecting function: it contains functions");
777 + /* Initialize recognition, indicating that volatile is OK. */
780 + sweep_frame_offset = 0;
782 +#ifdef STACK_GROWS_DOWNWARD
784 + frame_offset: offset to end of allocated area of stack frame.
785 + It is defined in the function.c
788 + /* the location must be before buffers */
789 + guard_area = assign_stack_local (BLKmode, UNITS_PER_GUARD, -1);
790 + PUT_MODE (guard_area, GUARD_m);
791 + MEM_VOLATILE_P (guard_area) = 1;
793 +#ifndef FRAME_GROWS_DOWNWARD
794 + sweep_frame_offset = frame_offset;
797 + /* For making room for guard value, scan all insns and fix the offset address
798 + of the variable that is based on frame pointer.
799 + Scan all declarations of variables and fix the offset address of the variable that
800 + is based on the frame pointer */
801 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
804 + /* the location of guard area moves to the beginning of stack frame */
805 + if ((offset = AUTO_OFFSET(XEXP (guard_area, 0))))
806 + XEXP (XEXP (guard_area, 0), 1) = gen_rtx_CONST_INT (VOIDmode, sweep_frame_offset);
809 + /* Insert prologue rtl instructions */
810 + rtl_prologue (function_first_insn);
812 + if (! current_function_has_variable_string)
814 + /* Generate argument saving instruction */
815 + copy_args_for_protection ();
817 +#ifndef FRAME_GROWS_DOWNWARD
818 + /* If frame grows upward, character string copied from an arg stays top of
819 + the guard variable. So sweep the guard variable again */
820 + sweep_frame_offset = CEIL_ROUND (frame_offset, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
821 + sweep_string_variable (guard_area, UNITS_PER_GUARD);
824 + else if (warn_stack_protector)
825 + warning ("not protecting variables: it has a variable length buffer");
827 +#ifndef FRAME_GROWS_DOWNWARD
828 + if (STARTING_FRAME_OFFSET == 0)
830 + /* this may be only for alpha */
831 + push_allocated_offset = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
832 + assign_stack_local (BLKmode, push_allocated_offset, -1);
833 + sweep_frame_offset = frame_offset;
834 + sweep_string_variable (const0_rtx, -push_allocated_offset);
835 + sweep_frame_offset = AUTO_OFFSET (XEXP (guard_area, 0));
839 + /* Arrange the order of local variables */
840 + arrange_var_order (blocks);
842 +#ifdef STACK_GROWS_DOWNWARD
843 + /* Insert epilogue rtl instructions */
844 + rtl_epilogue (get_last_insn ());
846 + init_recog_no_volatile ();
848 + else if (current_function_defines_short_string
849 + && warn_stack_protector)
850 + warning ("not protecting function: buffer is less than %d bytes long",
851 + SUSPICIOUS_BUF_SIZE);
855 + search string from arguments and local variables
856 + caller: 0 means call from protector_stack_protection
857 + 1 means call from push_frame
860 +search_string_from_argsandvars (caller)
863 + tree blocks, parms;
866 + /* saves a latest search result as a cached infomation */
867 + static tree __latest_search_decl = 0;
868 + static int __latest_search_result = FALSE;
870 + if (__latest_search_decl == current_function_decl)
871 + return __latest_search_result;
872 + else if (caller) return FALSE;
873 + __latest_search_decl = current_function_decl;
874 + __latest_search_result = TRUE;
876 + current_function_defines_short_string = FALSE;
877 + current_function_has_variable_string = FALSE;
878 + current_function_defines_vsized_array = FALSE;
881 + search a string variable from local variables
883 + blocks = DECL_INITIAL (current_function_decl);
884 + string_p = search_string_from_local_vars (blocks);
886 + if (!current_function_defines_vsized_array && current_function_calls_alloca)
888 + current_function_has_variable_string = TRUE;
892 + if (string_p) return TRUE;
894 +#ifdef STACK_GROWS_DOWNWARD
896 + search a string variable from arguments
898 + parms = DECL_ARGUMENTS (current_function_decl);
900 + for (; parms; parms = TREE_CHAIN (parms))
901 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
903 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
905 + string_p = search_string_def (TREE_TYPE(parms));
906 + if (string_p) return TRUE;
911 + __latest_search_result = FALSE;
917 +search_string_from_local_vars (block)
923 + while (block && TREE_CODE(block)==BLOCK)
925 + types = BLOCK_VARS(block);
929 + /* skip the declaration that refers an external variable */
930 + /* name: types.decl.name.identifier.id */
931 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
932 + && TREE_CODE (types) == VAR_DECL
933 + && ! DECL_ARTIFICIAL (types)
934 + && DECL_RTL_SET_P (types)
935 + && GET_CODE (DECL_RTL (types)) == MEM)
937 + if (search_string_def (TREE_TYPE (types)))
939 + rtx home = DECL_RTL (types);
941 + if (GET_CODE (home) == MEM
942 + && (GET_CODE (XEXP (home, 0)) == MEM
943 + || (GET_CODE (XEXP (home, 0)) == REG
944 + && XEXP (home, 0) != virtual_stack_vars_rtx
945 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
946 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
947 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
948 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
951 + /* If the value is indirect by memory or by a register
952 + that isn't the frame pointer
953 + then it means the object is variable-sized and address through
954 + that register or stack slot. The protection has no way to hide pointer variables
955 + behind the array, so all we can do is staying the order of variables and arguments. */
957 + current_function_has_variable_string = TRUE;
960 + /* found character array */
965 + types = TREE_CHAIN(types);
968 + if (search_string_from_local_vars (BLOCK_SUBBLOCKS (block)))
973 + block = BLOCK_CHAIN (block);
981 + * search a character array from the specified type tree
984 +search_string_def (type)
992 + switch (TREE_CODE (type))
995 + /* Check if the array is a variable-sized array */
996 + if (TYPE_DOMAIN (type) == 0
997 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
998 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
999 + current_function_defines_vsized_array = TRUE;
1001 + /* TREE_CODE( TREE_TYPE(type) ) == INTEGER_TYPE */
1002 + if (TYPE_MAIN_VARIANT (TREE_TYPE(type)) == char_type_node
1003 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == signed_char_type_node
1004 + || TYPE_MAIN_VARIANT (TREE_TYPE(type)) == unsigned_char_type_node)
1006 + /* Check if the string is a variable string */
1007 + if (TYPE_DOMAIN (type) == 0
1008 + || (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1009 + && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) == NOP_EXPR))
1012 + /* Check if the string size is greater than SUSPICIOUS_BUF_SIZE */
1013 + if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != 0
1014 + && TREE_INT_CST_LOW(TYPE_MAX_VALUE(TYPE_DOMAIN(type)))+1 >= SUSPICIOUS_BUF_SIZE)
1017 + current_function_defines_short_string = TRUE;
1019 + return search_string_def(TREE_TYPE(type));
1022 + case QUAL_UNION_TYPE:
1024 + /* Output the name, type, position (in bits), size (in bits) of each
1026 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1028 + /* Omit here local type decls until we know how to support them. */
1029 + if ((TREE_CODE (tem) == TYPE_DECL)
1030 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1033 + if (search_string_def(TREE_TYPE(tem))) return TRUE;
1037 + case POINTER_TYPE:
1038 + case REFERENCE_TYPE:
1039 + /* I'm not sure whether OFFSET_TYPE needs this treatment,
1040 + so I'll play safe and return 1. */
1050 + * examine whether the input contains frame pointer addressing
1056 + register enum rtx_code code;
1065 + code = GET_CODE (x);
1070 + case CONST_DOUBLE:
1079 + if (XEXP (x, 0) == virtual_stack_vars_rtx
1080 + && CONSTANT_P (XEXP (x, 1)))
1087 + /* Scan all subexpressions. */
1088 + fmt = GET_RTX_FORMAT (code);
1089 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1092 + if (contains_fp (XEXP (x, i))) return TRUE;
1094 + else if (*fmt == 'E')
1095 + for (j = 0; j < XVECLEN (x, i); j++)
1096 + if (contains_fp (XVECEXP (x, i, j))) return TRUE;
1103 +search_pointer_def (type)
1111 + switch (TREE_CODE (type))
1114 + case QUAL_UNION_TYPE:
1116 + /* Output the name, type, position (in bits), size (in bits) of each
1118 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1120 + /* Omit here local type decls until we know how to support them. */
1121 + if ((TREE_CODE (tem) == TYPE_DECL)
1122 + || (TREE_CODE (tem) == VAR_DECL && TREE_STATIC (tem)))
1125 + if (search_pointer_def (TREE_TYPE(tem))) return TRUE;
1130 + return search_pointer_def (TREE_TYPE(type));
1132 + case POINTER_TYPE:
1133 + case REFERENCE_TYPE:
1134 + /* I'm not sure whether OFFSET_TYPE needs this treatment,
1135 + so I'll play safe and return 1. */
1137 + if (TYPE_READONLY (TREE_TYPE (type)))
1139 + int funcp = search_func_pointer (TREE_TYPE (type), 1);
1140 + /* Un-mark the type as having been visited already */
1141 + search_func_pointer (TREE_TYPE (type), 0);
1155 +search_func_pointer (type, mark)
1164 + switch (TREE_CODE (type))
1167 + case QUAL_UNION_TYPE:
1169 + if (TREE_ASM_WRITTEN (type) != mark)
1171 + /* mark the type as having been visited already */
1172 + TREE_ASM_WRITTEN (type) = mark;
1174 + /* Output the name, type, position (in bits), size (in bits) of
1176 + for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
1178 + /* Omit here local type decls until we know how to support them. */
1179 + if (TREE_CODE (tem) == FIELD_DECL
1180 + && search_func_pointer (TREE_TYPE(tem), mark)) return TRUE;
1186 + return search_func_pointer (TREE_TYPE(type), mark);
1188 + case POINTER_TYPE:
1189 + case REFERENCE_TYPE:
1190 + /* I'm not sure whether OFFSET_TYPE needs this treatment,
1191 + so I'll play safe and return 1. */
1193 + return TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE;
1204 +reset_used_flags_for_insns (insn)
1207 + register int i, j;
1208 + register enum rtx_code code;
1209 + register const char *format_ptr;
1211 + for (; insn; insn = NEXT_INSN (insn))
1212 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1213 + || GET_CODE (insn) == CALL_INSN)
1215 + code = GET_CODE (insn);
1217 + format_ptr = GET_RTX_FORMAT (code);
1219 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1221 + switch (*format_ptr++) {
1223 + reset_used_flags_of_plus (XEXP (insn, i));
1227 + for (j = 0; j < XVECLEN (insn, i); j++)
1228 + reset_used_flags_of_plus (XVECEXP (insn, i, j));
1236 +reset_used_flags_for_decls (block)
1242 + while (block && TREE_CODE(block)==BLOCK)
1244 + types = BLOCK_VARS(block);
1248 + /* skip the declaration that refers an external variable and
1249 + also skip an global variable */
1250 + if (! DECL_EXTERNAL (types))
1252 + if (!DECL_RTL_SET_P (types)) goto next;
1253 + home = DECL_RTL (types);
1255 + if (GET_CODE (home) == MEM
1256 + && GET_CODE (XEXP (home, 0)) == PLUS
1257 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1259 + XEXP (home, 0)->used = 0;
1263 + types = TREE_CHAIN(types);
1266 + reset_used_flags_for_decls (BLOCK_SUBBLOCKS (block));
1268 + block = BLOCK_CHAIN (block);
1272 +/* Clear the USED bits only of type PLUS in X */
1275 +reset_used_flags_of_plus (x)
1278 + register int i, j;
1279 + register enum rtx_code code;
1280 + register const char *format_ptr;
1285 + code = GET_CODE (x);
1287 + /* These types may be freely shared so we needn't do any resetting
1295 + case CONST_DOUBLE:
1308 + /* The chain of insns is not being copied. */
1315 + case CALL_PLACEHOLDER:
1316 + reset_used_flags_for_insns (XEXP (x, 0));
1317 + reset_used_flags_for_insns (XEXP (x, 1));
1318 + reset_used_flags_for_insns (XEXP (x, 2));
1325 + format_ptr = GET_RTX_FORMAT (code);
1326 + for (i = 0; i < GET_RTX_LENGTH (code); i++)
1328 + switch (*format_ptr++)
1331 + reset_used_flags_of_plus (XEXP (x, i));
1335 + for (j = 0; j < XVECLEN (x, i); j++)
1336 + reset_used_flags_of_plus (XVECEXP (x, i, j));
1344 +rtl_prologue (insn)
1347 +#if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
1348 +#undef HAS_INIT_SECTION
1349 +#define HAS_INIT_SECTION
1354 + for (; insn; insn = NEXT_INSN (insn))
1355 + if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1358 +#if !defined (HAS_INIT_SECTION)
1359 + /* If this function is `main', skip a call to `__main'
1360 + to run guard instruments after global initializers, etc. */
1361 + if (DECL_NAME (current_function_decl)
1362 + && MAIN_NAME_P (DECL_NAME (current_function_decl))
1363 + && DECL_CONTEXT (current_function_decl) == NULL_TREE)
1365 + rtx fbinsn = insn;
1366 + for (; insn; insn = NEXT_INSN (insn))
1367 + if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
1369 + if (insn == 0) insn = fbinsn;
1373 + prologue_insert_point = NEXT_INSN (insn); /* mark the next insn of FUNCTION_BEG insn */
1375 + start_sequence ();
1377 + _guard = gen_rtx_MEM (GUARD_m, gen_rtx_SYMBOL_REF (Pmode, "__guard"));
1378 + emit_move_insn ( guard_area, _guard);
1380 + _val = get_insns ();
1383 + emit_insn_before (_val, prologue_insert_point);
1387 +rtl_epilogue (insn)
1390 + rtx if_false_label;
1394 + int flag_have_return = FALSE;
1396 + start_sequence ();
1402 + return_label = gen_label_rtx ();
1404 + for (insn = prologue_insert_point; insn; insn = NEXT_INSN (insn))
1405 + if (GET_CODE (insn) == JUMP_INSN
1406 + && GET_CODE (PATTERN (insn)) == RETURN
1407 + && GET_MODE (PATTERN (insn)) == VOIDmode)
1409 + rtx pat = gen_rtx_SET (VOIDmode,
1411 + gen_rtx_LABEL_REF (VOIDmode,
1413 + PATTERN (insn) = pat;
1414 + flag_have_return = TRUE;
1418 + emit_label (return_label);
1422 + compare_from_rtx (guard_area, _guard, NE, 0, GUARD_m, NULL_RTX); /* if (guard_area != _guard) */
1424 + if_false_label = gen_label_rtx (); /* { */
1425 + emit_jump_insn ( gen_beq(if_false_label));
1427 + /* generate string for the current function name */
1428 + funcstr = build_string (strlen(current_function_name)+1, current_function_name);
1429 + TREE_TYPE (funcstr) = build_array_type (char_type_node, 0);/* = char_array_type_node;*/
1430 + funcname = output_constant_def (funcstr, 1);
1432 + emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "__stack_smash_handler"),
1434 + XEXP (funcname, 0), Pmode, guard_area, GUARD_m);
1436 + /* generate RTL to return from the current function */
1438 + emit_barrier (); /* } */
1439 + emit_label (if_false_label);
1441 + /* generate RTL to return from the current function */
1442 + if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
1443 + use_return_register ();
1446 + if (HAVE_return && flag_have_return)
1448 + emit_jump_insn (gen_return ());
1453 + _val = get_insns ();
1456 + emit_insn_after (_val, insn);
1461 +arrange_var_order (block)
1465 + HOST_WIDE_INT offset;
1467 + while (block && TREE_CODE(block)==BLOCK)
1469 + types = BLOCK_VARS (block);
1473 + /* skip the declaration that refers an external variable */
1474 + /* name: types.decl.assembler_name.id */
1475 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)
1476 + && TREE_CODE (types) == VAR_DECL
1477 + && ! DECL_ARTIFICIAL (types)
1478 + && ! DECL_INLINE (types) /* don't sweep inlined string */
1479 + && DECL_RTL_SET_P (types)
1480 + && GET_CODE (DECL_RTL (types)) == MEM)
1482 + if (search_string_def (TREE_TYPE (types)))
1484 + rtx home = DECL_RTL (types);
1486 + if (! (GET_CODE (home) == MEM
1487 + && (GET_CODE (XEXP (home, 0)) == MEM
1488 + || (GET_CODE (XEXP (home, 0)) == REG
1489 + && XEXP (home, 0) != virtual_stack_vars_rtx
1490 + && REGNO (XEXP (home, 0)) != HARD_FRAME_POINTER_REGNUM
1491 + && REGNO (XEXP (home, 0)) != STACK_POINTER_REGNUM
1492 +#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1493 + && REGNO (XEXP (home, 0)) != ARG_POINTER_REGNUM
1497 + /* found a string variable */
1498 + HOST_WIDE_INT var_size =
1499 + ((TREE_INT_CST_LOW (DECL_SIZE (types)) + BITS_PER_UNIT - 1)
1502 + if (GET_MODE (DECL_RTL (types)) == BLKmode)
1504 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1505 + var_size = CEIL_ROUND (var_size, alignment);
1508 + /* skip the variable if it is top of the region
1509 + specified by sweep_frame_offset */
1510 + offset = AUTO_OFFSET (XEXP (DECL_RTL (types), 0));
1511 + if (offset == sweep_frame_offset - var_size)
1512 + sweep_frame_offset -= var_size;
1514 + else if (offset < sweep_frame_offset - var_size)
1515 + sweep_string_variable (DECL_RTL (types), var_size);
1520 + types = TREE_CHAIN(types);
1523 + arrange_var_order (BLOCK_SUBBLOCKS (block));
1525 + block = BLOCK_CHAIN (block);
1531 +copy_args_for_protection ()
1533 + tree parms = DECL_ARGUMENTS (current_function_decl);
1536 + parms = DECL_ARGUMENTS (current_function_decl);
1537 + for (; parms; parms = TREE_CHAIN (parms))
1538 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1540 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1545 + skip arguemnt protection if the last argument is used
1546 + for the variable argument
1550 + if (TREE_CHAIN (parms) == 0)
1552 + fntype = TREE_TYPE (current_function_decl);
1554 + if ((TYPE_ARG_TYPES (fntype) != 0 &&
1555 + TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) != void_type_node)
1556 + || current_function_varargs)
1561 + string_p = search_string_def (TREE_TYPE(parms));
1563 + /* check if it is a candidate to move */
1564 + if (string_p || search_pointer_def (TREE_TYPE (parms)))
1567 + = ((TREE_INT_CST_LOW (DECL_SIZE (parms)) + BITS_PER_UNIT - 1)
1570 + start_sequence ();
1572 + if (GET_CODE (DECL_RTL (parms)) == REG)
1576 + change_arg_use_of_insns (prologue_insert_point, DECL_RTL (parms), &safe, 0);
1579 + /* generate codes for copying the content */
1580 + rtx movinsn = emit_move_insn (safe, DECL_RTL (parms));
1582 + /* avoid register elimination in gcse.c (COPY-PROP)*/
1583 + PATTERN (movinsn)->volatil = 1;
1585 + /* save debugger info */
1586 + DECL_INCOMING_RTL (parms) = safe;
1590 + else if (GET_CODE (DECL_RTL (parms)) == MEM
1591 + && GET_CODE (XEXP (DECL_RTL (parms), 0)) == ADDRESSOF)
1594 + rtx safe = gen_reg_rtx (GET_MODE (DECL_RTL (parms)));
1596 + /* generate codes for copying the content */
1597 + movinsn = emit_move_insn (safe, DECL_INCOMING_RTL (parms));
1598 + PATTERN (movinsn)->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
1600 + /* change the addressof information to the newly allocated pseudo register */
1601 + emit_move_insn (DECL_RTL (parms), safe);
1603 + /* save debugger info */
1604 + DECL_INCOMING_RTL (parms) = safe;
1609 + /* declare temporary local variable DECL_NAME (parms) for it */
1611 + = assign_stack_local (DECL_MODE (parms), arg_size,
1612 + DECL_MODE (parms) == BLKmode ? -1 : 0);
1614 + MEM_IN_STRUCT_P (temp_rtx) = AGGREGATE_TYPE_P (TREE_TYPE (parms));
1615 + set_mem_alias_set (temp_rtx, get_alias_set (parms));
1617 + /* generate codes for copying the content */
1618 + store_expr (parms, temp_rtx, 0);
1620 + /* change the reference for each instructions */
1621 + move_arg_location (prologue_insert_point, DECL_RTL (parms),
1622 + temp_rtx, arg_size);
1624 + /* change the location of parms variable */
1625 + SET_DECL_RTL (parms, temp_rtx);
1627 + /* change debugger info */
1628 + DECL_INCOMING_RTL (parms) = temp_rtx;
1631 + emit_insn_before (get_insns (), prologue_insert_point);
1634 +#ifdef FRAME_GROWS_DOWNWARD
1635 + /* process the string argument */
1636 + if (string_p && DECL_MODE (parms) == BLKmode)
1638 + int alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
1639 + arg_size = CEIL_ROUND (arg_size, alignment);
1641 + /* change the reference for each instructions */
1642 + sweep_string_variable (DECL_RTL (parms), arg_size);
1652 + sweep a string variable to the local variable addressed by sweep_frame_offset, that is
1653 + a last position of string variables.
1656 +sweep_string_variable (sweep_var, var_size)
1658 + HOST_WIDE_INT var_size;
1660 + HOST_WIDE_INT sweep_offset;
1662 + switch (GET_CODE (sweep_var))
1665 + if (GET_CODE (XEXP (sweep_var, 0)) == ADDRESSOF
1666 + && GET_CODE (XEXP (XEXP (sweep_var, 0), 0)) == REG)
1668 + sweep_offset = AUTO_OFFSET(XEXP (sweep_var, 0));
1671 + sweep_offset = INTVAL (sweep_var);
1677 + /* scan all declarations of variables and fix the offset address of
1678 + the variable based on the frame pointer */
1679 + sweep_string_in_decls (DECL_INITIAL (current_function_decl), sweep_offset, var_size);
1681 + /* scan all argument variable and fix the offset address based on the frame pointer */
1682 + sweep_string_in_args (DECL_ARGUMENTS (current_function_decl), sweep_offset, var_size);
1684 + /* For making room for sweep variable, scan all insns and fix the offset address
1685 + of the variable that is based on frame pointer*/
1686 + sweep_string_use_of_insns (function_first_insn, sweep_offset, var_size);
1689 + /* Clear all the USED bits in operands of all insns and declarations of local vars */
1690 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
1691 + reset_used_flags_for_insns (function_first_insn);
1693 + sweep_frame_offset -= var_size;
1699 + move an argument to the local variable addressed by frame_offset
1702 +move_arg_location (insn, orig, new, var_size)
1703 + rtx insn, orig, new;
1704 + HOST_WIDE_INT var_size;
1706 + /* For making room for sweep variable, scan all insns and fix the offset address
1707 + of the variable that is based on frame pointer*/
1708 + change_arg_use_of_insns (insn, orig, &new, var_size);
1711 + /* Clear all the USED bits in operands of all insns and declarations of local vars */
1712 + reset_used_flags_for_insns (insn);
1717 +sweep_string_in_decls (block, sweep_offset, sweep_size)
1719 + HOST_WIDE_INT sweep_offset, sweep_size;
1722 + HOST_WIDE_INT offset;
1725 + while (block && TREE_CODE(block)==BLOCK)
1727 + types = BLOCK_VARS(block);
1731 + /* skip the declaration that refers an external variable and
1732 + also skip an global variable */
1733 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types)) {
1735 + if (!DECL_RTL_SET_P (types)) goto next;
1736 + home = DECL_RTL (types);
1738 + /* process for static local variable */
1739 + if (GET_CODE (home) == MEM
1740 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
1743 + if (GET_CODE (home) == MEM
1744 + && XEXP (home, 0) == virtual_stack_vars_rtx)
1748 + /* the operand related to the sweep variable */
1749 + if (sweep_offset <= offset
1750 + && offset < sweep_offset + sweep_size)
1752 + offset = sweep_frame_offset - sweep_size - sweep_offset;
1754 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, offset);
1755 + XEXP (home, 0)->used = 1;
1757 + else if (sweep_offset <= offset
1758 + && offset < sweep_frame_offset)
1759 + { /* the rest of variables under sweep_frame_offset, so shift the location */
1760 + XEXP (home, 0) = plus_constant (virtual_stack_vars_rtx, -sweep_size);
1761 + XEXP (home, 0)->used = 1;
1765 + if (GET_CODE (home) == MEM
1766 + && GET_CODE (XEXP (home, 0)) == MEM)
1768 + /* process for dynamically allocated aray */
1769 + home = XEXP (home, 0);
1772 + if (GET_CODE (home) == MEM
1773 + && GET_CODE (XEXP (home, 0)) == PLUS
1774 + && XEXP (XEXP (home, 0), 0) == virtual_stack_vars_rtx
1775 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
1777 + if (! XEXP (home, 0)->used)
1779 + offset = AUTO_OFFSET(XEXP (home, 0));
1781 + /* the operand related to the sweep variable */
1782 + if (sweep_offset <= offset
1783 + && offset < sweep_offset + sweep_size)
1786 + offset += sweep_frame_offset - sweep_size - sweep_offset;
1787 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1790 + XEXP (home, 0)->used = 1;
1792 + else if (sweep_offset <= offset
1793 + && offset < sweep_frame_offset)
1794 + { /* the rest of variables under sweep_frame_offset,
1795 + so shift the location */
1797 + XEXP (XEXP (home, 0), 1)
1798 + = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1801 + XEXP (home, 0)->used = 1;
1808 + types = TREE_CHAIN(types);
1811 + sweep_string_in_decls (BLOCK_SUBBLOCKS (block), sweep_offset, sweep_size);
1812 + block = BLOCK_CHAIN (block);
1818 +sweep_string_in_args (parms, sweep_offset, sweep_size)
1820 + HOST_WIDE_INT sweep_offset, sweep_size;
1823 + HOST_WIDE_INT offset;
1825 + for (; parms; parms = TREE_CHAIN (parms))
1826 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
1828 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
1830 + home = DECL_INCOMING_RTL (parms);
1832 + if (XEXP (home, 0)->used) continue;
1834 + offset = AUTO_OFFSET(XEXP (home, 0));
1836 + /* the operand related to the sweep variable */
1837 + if (AUTO_BASEPTR (XEXP (home, 0)) == virtual_stack_vars_rtx)
1839 + if (sweep_offset <= offset
1840 + && offset < sweep_offset + sweep_size)
1842 + offset += sweep_frame_offset - sweep_size - sweep_offset;
1843 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1846 + XEXP (home, 0)->used = 1;
1848 + else if (sweep_offset <= offset
1849 + && offset < sweep_frame_offset)
1850 + { /* the rest of variables under sweep_frame_offset, so shift the location */
1851 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1854 + XEXP (home, 0)->used = 1;
1862 +static int has_virtual_reg;
1865 +sweep_string_use_of_insns (insn, sweep_offset, sweep_size)
1867 + HOST_WIDE_INT sweep_offset, sweep_size;
1869 + for (; insn; insn = NEXT_INSN (insn))
1870 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
1871 + || GET_CODE (insn) == CALL_INSN)
1873 + has_virtual_reg = FALSE;
1874 + sweep_string_in_operand (insn, &PATTERN (insn), sweep_offset, sweep_size);
1875 + sweep_string_in_operand (insn, ®_NOTES (insn), sweep_offset, sweep_size);
1881 +sweep_string_in_operand (insn, loc, sweep_offset, sweep_size)
1883 + HOST_WIDE_INT sweep_offset, sweep_size;
1885 + register rtx x = *loc;
1886 + register enum rtx_code code;
1888 + HOST_WIDE_INT offset;
1894 + code = GET_CODE (x);
1899 + case CONST_DOUBLE:
1907 + case ADDR_DIFF_VEC:
1913 + if (x == virtual_incoming_args_rtx
1914 + || x == virtual_stack_vars_rtx
1915 + || x == virtual_stack_dynamic_rtx
1916 + || x == virtual_outgoing_args_rtx
1917 + || x == virtual_cfa_rtx)
1918 + has_virtual_reg = TRUE;
1923 + skip setjmp setup insn and setjmp restore insn
1925 + (set (MEM (reg:SI xx)) (virtual_stack_vars_rtx)))
1926 + (set (virtual_stack_vars_rtx) (REG))
1928 + if (GET_CODE (XEXP (x, 0)) == MEM
1929 + && XEXP (x, 1) == virtual_stack_vars_rtx)
1931 + if (XEXP (x, 0) == virtual_stack_vars_rtx
1932 + && GET_CODE (XEXP (x, 1)) == REG)
1937 + /* Handle typical case of frame register plus constant. */
1938 + if (XEXP (x, 0) == virtual_stack_vars_rtx
1939 + && CONSTANT_P (XEXP (x, 1)))
1941 + if (x->used) goto single_use_of_virtual_reg;
1943 + offset = AUTO_OFFSET(x);
1944 + if (RTX_INTEGRATED_P (x)) k = -1; /* for inline base ptr */
1946 + /* the operand related to the sweep variable */
1947 + if (sweep_offset <= offset + k
1948 + && offset + k < sweep_offset + sweep_size)
1950 + offset += sweep_frame_offset - sweep_size - sweep_offset;
1952 + XEXP (x, 0) = virtual_stack_vars_rtx;
1953 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
1956 + else if (sweep_offset <= offset + k
1957 + && offset + k < sweep_frame_offset)
1958 + { /* the rest of variables under sweep_frame_offset, so shift the location */
1959 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - sweep_size);
1963 + single_use_of_virtual_reg:
1964 + if (has_virtual_reg) {
1965 + /* excerpt from insn_invalid_p in recog.c */
1966 + int icode = recog_memoized (insn);
1968 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
1972 + start_sequence ();
1973 + temp = force_operand (x, NULL_RTX);
1974 + seq = get_insns ();
1977 + emit_insn_before (seq, insn);
1978 + if (! validate_change (insn, loc, temp, 0)
1979 + && ! validate_replace_rtx (x, temp, insn))
1980 + fatal_insn ("sweep_string_in_operand", insn);
1984 + has_virtual_reg = TRUE;
1988 +#ifdef FRAME_GROWS_DOWNWARD
1990 + alert the case of frame register plus constant given by reg.
1992 + else if (XEXP (x, 0) == virtual_stack_vars_rtx
1993 + && GET_CODE (XEXP (x, 1)) == REG)
1994 + fatal_insn ("sweep_string_in_operand: unknown addressing", insn);
1998 + process further subtree:
1999 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2004 + case CALL_PLACEHOLDER:
2005 + sweep_string_use_of_insns (XEXP (x, 0), sweep_offset, sweep_size);
2006 + sweep_string_use_of_insns (XEXP (x, 1), sweep_offset, sweep_size);
2007 + sweep_string_use_of_insns (XEXP (x, 2), sweep_offset, sweep_size);
2014 + /* Scan all subexpressions. */
2015 + fmt = GET_RTX_FORMAT (code);
2016 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2020 + virtual_stack_vars_rtx without offset
2022 + (set (reg:SI xx) (reg:SI 78))
2023 + (set (reg:SI xx) (MEM (reg:SI 78)))
2025 + if (XEXP (x, i) == virtual_stack_vars_rtx)
2026 + fatal_insn ("sweep_string_in_operand: unknown fp usage", insn);
2027 + sweep_string_in_operand (insn, &XEXP (x, i), sweep_offset, sweep_size);
2029 + else if (*fmt == 'E')
2030 + for (j = 0; j < XVECLEN (x, i); j++)
2031 + sweep_string_in_operand (insn, &XVECEXP (x, i, j), sweep_offset, sweep_size);
2036 + change a argument variable to the local variable addressed by the "new" variable.
2038 +static int flag_caui_exit;
2041 +change_arg_use_of_insns (insn, orig, new, size)
2042 + rtx insn, orig, *new;
2043 + HOST_WIDE_INT size;
2045 + flag_caui_exit = FALSE;
2046 + change_arg_use_of_insns_2 (insn, orig, new, size);
2050 +change_arg_use_of_insns_2 (insn, orig, new, size)
2051 + rtx insn, orig, *new;
2052 + HOST_WIDE_INT size;
2054 + for (; insn && !flag_caui_exit; insn = NEXT_INSN (insn))
2055 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2056 + || GET_CODE (insn) == CALL_INSN)
2058 + change_arg_use_in_operand (PATTERN (insn), orig, new, size);
2065 +change_arg_use_in_operand (x, orig, new, size)
2066 + rtx x, orig, *new;
2067 + HOST_WIDE_INT size;
2069 + register enum rtx_code code;
2071 + HOST_WIDE_INT offset;
2077 + code = GET_CODE (x);
2082 + case CONST_DOUBLE:
2090 + case ADDR_DIFF_VEC:
2097 + /* Handle special case of MEM (incoming_args) */
2098 + if (GET_CODE (orig) == MEM
2099 + && XEXP (x, 0) == virtual_incoming_args_rtx)
2103 + /* the operand related to the sweep variable */
2104 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2105 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2107 + offset = AUTO_OFFSET(XEXP (*new, 0))
2108 + + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2110 + XEXP (x, 0) = plus_constant (virtual_stack_vars_rtx, offset);
2111 + XEXP (x, 0)->used = 1;
2119 + /* Handle special case of frame register plus constant. */
2120 + if (GET_CODE (orig) == MEM /* skip if orig is register variable in the optimization */
2121 + && XEXP (x, 0) == virtual_incoming_args_rtx && CONSTANT_P (XEXP (x, 1))
2124 + offset = AUTO_OFFSET(x);
2126 + /* the operand related to the sweep variable */
2127 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2128 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2130 + offset = AUTO_OFFSET(XEXP (*new, 0))
2131 + + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2133 + XEXP (x, 0) = virtual_stack_vars_rtx;
2134 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2141 + process further subtree:
2142 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2149 + /* Handle special case of "set (REG or MEM) (incoming_args)".
2150 + It means that the the address of the 1st argument is stored. */
2151 + if (GET_CODE (orig) == MEM
2152 + && XEXP (x, 1) == virtual_incoming_args_rtx)
2156 + /* the operand related to the sweep variable */
2157 + if (AUTO_OFFSET(XEXP (orig, 0)) <= offset &&
2158 + offset < AUTO_OFFSET(XEXP (orig, 0)) + size) {
2160 + offset = AUTO_OFFSET(XEXP (*new, 0))
2161 + + (offset - AUTO_OFFSET(XEXP (orig, 0)));
2163 + XEXP (x, 1) = plus_constant (virtual_stack_vars_rtx, offset);
2164 + XEXP (x, 1)->used = 1;
2171 + case CALL_PLACEHOLDER:
2172 + change_arg_use_of_insns_2 (XEXP (x, 0), orig, new, size); if (flag_caui_exit) return;
2173 + change_arg_use_of_insns_2 (XEXP (x, 1), orig, new, size); if (flag_caui_exit) return;
2174 + change_arg_use_of_insns_2 (XEXP (x, 2), orig, new, size); if (flag_caui_exit) return;
2183 + && SET_SRC (x) == orig
2184 + && GET_CODE (SET_DEST (x)) == REG)
2186 + /* exit to the change_arg_use_of_insns */
2187 + flag_caui_exit = TRUE;
2188 + x->volatil = 1; /* avoid register elimination in gcse.c (COPY-PROP)*/
2192 + /* Scan all subexpressions. */
2193 + fmt = GET_RTX_FORMAT (code);
2194 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2197 + if (XEXP (x, i) == orig)
2199 + if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
2200 + XEXP (x, i) = *new;
2203 + change_arg_use_in_operand (XEXP (x, i), orig, new, size);
2205 + else if (*fmt == 'E')
2206 + for (j = 0; j < XVECLEN (x, i); j++)
2209 + if (XVECEXP (x, i, j) == orig)
2211 + if (*new == 0) *new = gen_reg_rtx (GET_MODE (orig));
2212 + XVECEXP (x, i, j) = *new;
2215 + change_arg_use_in_operand (XVECEXP (x, i, j), orig, new, size);
2221 +validate_insns_of_varrefs (insn)
2226 + /* Initialize recognition, indicating that volatile is OK. */
2229 + for (; insn; insn = next)
2231 + next = NEXT_INSN (insn);
2232 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2233 + || GET_CODE (insn) == CALL_INSN)
2235 + /* excerpt from insn_invalid_p in recog.c */
2236 + int icode = recog_memoized (insn);
2238 + if (icode < 0 && asm_noperands (PATTERN (insn)) < 0)
2239 + validate_operand_of_varrefs (insn, &PATTERN (insn));
2243 + init_recog_no_volatile ();
2248 +validate_operand_of_varrefs (insn, loc)
2251 + register enum rtx_code code;
2260 + code = GET_CODE (x);
2266 + case CONST_DOUBLE:
2274 + case ADDR_DIFF_VEC:
2281 + /* validate insn of frame register plus constant. */
2282 + if (GET_CODE (x) == PLUS
2283 + && XEXP (x, 0) == virtual_stack_vars_rtx
2284 + && CONSTANT_P (XEXP (x, 1)))
2286 + start_sequence ();
2287 + /* temp = force_operand (x, NULL_RTX); */
2288 + { /* excerpt from expand_binop in optabs.c */
2289 + optab binoptab = add_optab;
2290 + enum machine_mode mode = GET_MODE (x);
2291 + int icode = (int) binoptab->handlers[(int) mode].insn_code;
2292 + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
2294 + rtx xop0 = XEXP (x, 0), xop1 = XEXP (x, 1);
2295 + temp = gen_reg_rtx (mode);
2297 + /* Now, if insn's predicates don't allow offset operands, put them into
2300 + if (! (*insn_data[icode].operand[2].predicate) (xop1, mode1)
2301 + && mode1 != VOIDmode)
2302 + xop1 = copy_to_mode_reg (mode1, xop1);
2304 + pat = GEN_FCN (icode) (temp, xop0, xop1);
2308 + seq = get_insns ();
2311 + emit_insn_before (seq, insn);
2312 + if (! validate_change (insn, loc, temp, 0))
2319 + case CALL_PLACEHOLDER:
2320 + validate_insns_of_varrefs (XEXP (x, 0));
2321 + validate_insns_of_varrefs (XEXP (x, 1));
2322 + validate_insns_of_varrefs (XEXP (x, 2));
2329 + /* Scan all subexpressions. */
2330 + fmt = GET_RTX_FORMAT (code);
2331 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2333 + validate_operand_of_varrefs (insn, &XEXP (x, i));
2334 + else if (*fmt == 'E')
2335 + for (j = 0; j < XVECLEN (x, i); j++)
2336 + validate_operand_of_varrefs (insn, &XVECEXP (x, i, j));
2343 + The following codes are invoked after the instantiation of pseuso registers.
2345 + Reorder local variables to place a peudo register after buffers to avoid
2346 + the corruption of local variables that could be used to further corrupt
2347 + arbitrary memory locations.
2349 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2350 +static void push_frame PARAMS ((HOST_WIDE_INT var_size, HOST_WIDE_INT boundary));
2351 +static void push_frame_in_decls PARAMS ((tree block, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2352 +static void push_frame_in_args PARAMS ((tree parms, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2353 +static void push_frame_of_insns PARAMS ((rtx insn, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2354 +static void push_frame_in_operand PARAMS ((rtx insn, rtx orig, HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2355 +static void push_frame_of_reg_equiv_memory_loc PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2356 +static void push_frame_of_reg_equiv_constant PARAMS ((HOST_WIDE_INT push_size, HOST_WIDE_INT boundary));
2357 +static void reset_used_flags_for_push_frame PARAMS ((void));
2358 +static int check_out_of_frame_access PARAMS ((rtx insn, HOST_WIDE_INT boundary));
2359 +static int check_out_of_frame_access_in_operand PARAMS ((rtx, HOST_WIDE_INT boundary));
2363 +assign_stack_local_for_pseudo_reg (mode, size, align)
2364 + enum machine_mode mode;
2365 + HOST_WIDE_INT size;
2368 +#if defined(FRAME_GROWS_DOWNWARD) || !defined(STACK_GROWS_DOWNWARD)
2369 + return assign_stack_local (mode, size, align);
2371 + tree blocks = DECL_INITIAL (current_function_decl);
2373 + HOST_WIDE_INT saved_frame_offset, units_per_push, starting_frame;
2374 + int first_call_from_purge_addressof, first_call_from_global_alloc;
2376 + if (! flag_propolice_protection
2379 + || current_function_is_inlinable
2380 + || ! search_string_from_argsandvars (1)
2381 + || current_function_contains_functions)
2382 + return assign_stack_local (mode, size, align);
2384 + first_call_from_purge_addressof = !push_frame_offset && !cse_not_expected;
2385 + first_call_from_global_alloc = !saved_cse_not_expected && cse_not_expected;
2386 + saved_cse_not_expected = cse_not_expected;
2388 + starting_frame = (STARTING_FRAME_OFFSET)?STARTING_FRAME_OFFSET:BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2389 + units_per_push = MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2390 + GET_MODE_SIZE (mode));
2392 + if (first_call_from_purge_addressof)
2394 + push_frame_offset = push_allocated_offset;
2395 + if (check_out_of_frame_access (get_insns (), starting_frame))
2397 + /* if there is an access beyond frame, push dummy region to seperate
2398 + the address of instantiated variables */
2399 + push_frame (GET_MODE_SIZE (DImode), 0);
2400 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2404 + if (first_call_from_global_alloc)
2406 + push_frame_offset = push_allocated_offset = 0;
2407 + if (check_out_of_frame_access (get_insns (), starting_frame))
2409 + if (STARTING_FRAME_OFFSET)
2411 + /* if there is an access beyond frame, push dummy region
2412 + to seperate the address of instantiated variables */
2413 + push_frame (GET_MODE_SIZE (DImode), 0);
2414 + assign_stack_local (BLKmode, GET_MODE_SIZE (DImode), -1);
2417 + push_allocated_offset = starting_frame;
2421 + saved_frame_offset = frame_offset;
2422 + frame_offset = push_frame_offset;
2424 + new = assign_stack_local (mode, size, align);
2426 + push_frame_offset = frame_offset;
2427 + frame_offset = saved_frame_offset;
2429 + if (push_frame_offset > push_allocated_offset)
2431 + push_frame (units_per_push, push_allocated_offset + STARTING_FRAME_OFFSET);
2433 + assign_stack_local (BLKmode, units_per_push, -1);
2434 + push_allocated_offset += units_per_push;
2437 + /* At the second call from global alloc, alpha push frame and assign
2438 + a local variable to the top of the stack */
2439 + if (first_call_from_global_alloc && STARTING_FRAME_OFFSET == 0)
2440 + push_frame_offset = push_allocated_offset = 0;
2447 +#if !defined(FRAME_GROWS_DOWNWARD) && defined(STACK_GROWS_DOWNWARD)
2449 + push frame infomation for instantiating pseudo register at the top of stack.
2450 + This is only for the "frame grows upward", it means FRAME_GROWS_DOWNWARD is
2453 + It is called by purge_addressof function and global_alloc (or reload)
2457 +push_frame (var_size, boundary)
2458 + HOST_WIDE_INT var_size, boundary;
2460 + reset_used_flags_for_push_frame();
2462 + /* scan all declarations of variables and fix the offset address of the variable based on the frame pointer */
2463 + push_frame_in_decls (DECL_INITIAL (current_function_decl), var_size, boundary);
2465 + /* scan all argument variable and fix the offset address based on the frame pointer */
2466 + push_frame_in_args (DECL_ARGUMENTS (current_function_decl), var_size, boundary);
2468 + /* scan all operands of all insns and fix the offset address based on the frame pointer */
2469 + push_frame_of_insns (get_insns (), var_size, boundary);
2471 + /* scan all reg_equiv_memory_loc and reg_equiv_constant*/
2472 + push_frame_of_reg_equiv_memory_loc (var_size, boundary);
2473 + push_frame_of_reg_equiv_constant (var_size, boundary);
2475 + reset_used_flags_for_push_frame();
2479 +reset_used_flags_for_push_frame()
2482 + extern rtx *reg_equiv_memory_loc;
2483 + extern rtx *reg_equiv_constant;
2485 + /* Clear all the USED bits in operands of all insns and declarations of local vars */
2486 + reset_used_flags_for_decls (DECL_INITIAL (current_function_decl));
2487 + reset_used_flags_for_insns (get_insns ());
2490 + /* The following codes are processed if the push_frame is called from
2491 + global_alloc (or reload) function */
2492 + if (reg_equiv_memory_loc == 0) return;
2494 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2495 + if (reg_equiv_memory_loc[i])
2497 + rtx x = reg_equiv_memory_loc[i];
2499 + if (GET_CODE (x) == MEM
2500 + && GET_CODE (XEXP (x, 0)) == PLUS
2501 + && AUTO_BASEPTR (XEXP (x, 0)) == frame_pointer_rtx)
2504 + XEXP (x, 0)->used = 0;
2509 + if (reg_equiv_constant == 0) return;
2511 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2512 + if (reg_equiv_constant[i])
2514 + rtx x = reg_equiv_constant[i];
2516 + if (GET_CODE (x) == PLUS
2517 + && AUTO_BASEPTR (x) == frame_pointer_rtx)
2526 +push_frame_in_decls (block, push_size, boundary)
2528 + HOST_WIDE_INT push_size, boundary;
2531 + HOST_WIDE_INT offset;
2534 + while (block && TREE_CODE(block)==BLOCK)
2536 + types = BLOCK_VARS(block);
2540 + /* skip the declaration that refers an external variable and
2541 + also skip an global variable */
2542 + if (! DECL_EXTERNAL (types) && ! TREE_STATIC (types))
2545 + if (!DECL_RTL_SET_P (types)) goto next;
2546 + home = DECL_RTL (types);
2548 + /* process for static local variable */
2549 + if (GET_CODE (home) == MEM
2550 + && GET_CODE (XEXP (home, 0)) == SYMBOL_REF)
2553 + if (GET_CODE (home) == MEM
2554 + && GET_CODE (XEXP (home, 0)) == REG)
2556 + if (XEXP (home, 0) != frame_pointer_rtx
2560 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2564 + XEXP (home, 0)->used = 1;
2567 + if (GET_CODE (home) == MEM
2568 + && GET_CODE (XEXP (home, 0)) == MEM)
2571 + /* process for dynamically allocated aray */
2572 + home = XEXP (home, 0);
2575 + if (GET_CODE (home) == MEM
2576 + && GET_CODE (XEXP (home, 0)) == PLUS
2577 + && GET_CODE (XEXP (XEXP (home, 0), 1)) == CONST_INT)
2579 + offset = AUTO_OFFSET(XEXP (home, 0));
2581 + if (! XEXP (home, 0)->used
2582 + && offset >= boundary)
2584 + offset += push_size;
2585 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2588 + XEXP (home, 0)->used = 1;
2594 + types = TREE_CHAIN(types);
2597 + push_frame_in_decls (BLOCK_SUBBLOCKS (block), push_size, boundary);
2598 + block = BLOCK_CHAIN (block);
2604 +push_frame_in_args (parms, push_size, boundary)
2606 + HOST_WIDE_INT push_size, boundary;
2609 + HOST_WIDE_INT offset;
2611 + for (; parms; parms = TREE_CHAIN (parms))
2612 + if (DECL_NAME (parms) && TREE_TYPE (parms) != error_mark_node)
2614 + if (PARM_PASSED_IN_MEMORY (parms) && DECL_NAME (parms))
2616 + home = DECL_INCOMING_RTL (parms);
2617 + offset = AUTO_OFFSET(XEXP (home, 0));
2619 + if (XEXP (home, 0)->used || offset < boundary) continue;
2621 + /* the operand related to the sweep variable */
2622 + if (AUTO_BASEPTR (XEXP (home, 0)) == frame_pointer_rtx)
2624 + if (XEXP (home, 0) == frame_pointer_rtx)
2625 + XEXP (home, 0) = plus_constant (frame_pointer_rtx,
2628 + offset += push_size;
2629 + XEXP (XEXP (home, 0), 1) = gen_rtx_CONST_INT (VOIDmode,
2634 + XEXP (home, 0)->used = 1;
2641 +static int insn_pushed;
2642 +static int *fp_equiv = 0;
2645 +push_frame_of_insns (insn, push_size, boundary)
2647 + HOST_WIDE_INT push_size, boundary;
2649 + /* init fp_equiv */
2650 + fp_equiv = (int *) xcalloc (max_reg_num (), sizeof (int));
2652 + for (; insn; insn = NEXT_INSN (insn))
2653 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
2654 + || GET_CODE (insn) == CALL_INSN)
2656 + insn_pushed = FALSE;
2657 + push_frame_in_operand (insn, PATTERN (insn), push_size, boundary);
2662 + rtx before = PREV_INSN (trial);
2663 + rtx after = NEXT_INSN (trial);
2664 + int has_barrier = 0;
2666 + rtx seq = split_insns (PATTERN (insn), insn);
2668 + /* If we are splitting a JUMP_INSN, it might be followed by a
2669 + BARRIER. We may need to handle this specially. */
2670 + if (after && GET_CODE (after) == BARRIER)
2673 + after = NEXT_INSN (after);
2676 + if (seq && GET_CODE (seq) == SEQUENCE)
2678 + if (XVECLEN (seq, 0) == 2)
2680 + rtx pattern = PATTERN (XVECEXP (seq, 0, 1));
2682 + if (GET_CODE (pattern) == SET
2683 + && GET_CODE (XEXP (pattern, 0)) == REG
2684 + && GET_CODE (XEXP (pattern, 1)) == PLUS
2685 + && XEXP (pattern, 0) == XEXP (XEXP (pattern, 1), 0)
2686 + && CONSTANT_P (XEXP (XEXP (pattern, 1), 1)))
2688 + rtx offset = XEXP (XEXP (pattern, 1), 1);
2689 + fp_equiv[REGNO (XEXP (pattern, 0))] = INTVAL (offset);
2691 + /* replace the pattern of the insn */
2692 + add_insn_after (XVECEXP (seq, 0, 0), before);
2693 + delete_insn (trial);
2698 + /* excerpt from emit-rtl.c: L3320 */
2699 + tem = emit_insn_after (seq, trial);
2701 + delete_related_insns (trial);
2703 + emit_barrier_after (tem);
2705 + /* Recursively call try_split for each new insn created */
2706 + for (tem = NEXT_INSN (before); tem != after;
2707 + tem = NEXT_INSN (tem))
2708 + if (! INSN_DELETED_P (tem) && INSN_P (tem))
2709 + tem = try_split (PATTERN (tem), tem, 1);
2714 + /* push frame in NOTE */
2715 + push_frame_in_operand (insn, REG_NOTES (insn), push_size, boundary);
2717 + /* push frame in CALL EXPR_LIST */
2718 + if (GET_CODE (insn) == CALL_INSN)
2719 + push_frame_in_operand (insn, CALL_INSN_FUNCTION_USAGE (insn), push_size, boundary);
2728 +push_frame_in_operand (insn, orig, push_size, boundary)
2730 + HOST_WIDE_INT push_size, boundary;
2732 + register rtx x = orig;
2733 + register enum rtx_code code;
2735 + HOST_WIDE_INT offset;
2741 + code = GET_CODE (x);
2746 + case CONST_DOUBLE:
2754 + case ADDR_DIFF_VEC:
2763 + skip setjmp setup insn and setjmp restore insn
2765 + (set (MEM (reg:SI xx)) (frame_pointer_rtx)))
2766 + (set (frame_pointer_rtx) (REG))
2768 + if (GET_CODE (XEXP (x, 0)) == MEM
2769 + && XEXP (x, 1) == frame_pointer_rtx)
2771 + if (XEXP (x, 0) == frame_pointer_rtx
2772 + && GET_CODE (XEXP (x, 1)) == REG)
2776 + powerpc case: restores setjmp address
2777 + (set (frame_pointer_rtx) (plus frame_pointer_rtx const_int -n))
2779 + (set (reg) (plus frame_pointer_rtx const_int -n))
2780 + (set (frame_pointer_rtx) (reg))
2782 + if (GET_CODE (XEXP (x, 0)) == REG
2783 + && GET_CODE (XEXP (x, 1)) == PLUS
2784 + && XEXP (XEXP (x, 1), 0) == frame_pointer_rtx
2785 + && CONSTANT_P (XEXP (XEXP (x, 1), 1))
2786 + && INTVAL (XEXP (XEXP (x, 1), 1)) < 0)
2789 + offset = AUTO_OFFSET(x);
2790 + if (x->used || abs (offset) < boundary)
2793 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset - push_size);
2794 + x->used = 1; insn_pushed = TRUE;
2798 + /* reset fp_equiv register */
2799 + else if (GET_CODE (XEXP (x, 0)) == REG
2800 + && fp_equiv[REGNO (XEXP (x, 0))])
2801 + fp_equiv[REGNO (XEXP (x, 0))] = 0;
2803 + /* propagete fp_equiv register */
2804 + else if (GET_CODE (XEXP (x, 0)) == REG
2805 + && GET_CODE (XEXP (x, 1)) == REG
2806 + && fp_equiv[REGNO (XEXP (x, 1))])
2807 + if (REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER
2808 + || reg_renumber[REGNO (XEXP (x, 0))] > 0)
2809 + fp_equiv[REGNO (XEXP (x, 0))] = fp_equiv[REGNO (XEXP (x, 1))];
2813 + if (XEXP (x, 0) == frame_pointer_rtx
2816 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2817 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2823 + offset = AUTO_OFFSET(x);
2825 + /* Handle special case of frame register plus constant. */
2826 + if (CONSTANT_P (XEXP (x, 1))
2827 + && XEXP (x, 0) == frame_pointer_rtx)
2829 + if (x->used || offset < boundary)
2832 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2833 + x->used = 1; insn_pushed = TRUE;
2838 + Handle alpha case:
2839 + (plus:SI (subreg:SI (reg:DI 63 FP) 0) (const_int 64 [0x40]))
2841 + if (CONSTANT_P (XEXP (x, 1))
2842 + && GET_CODE (XEXP (x, 0)) == SUBREG
2843 + && SUBREG_REG (XEXP (x, 0)) == frame_pointer_rtx)
2845 + if (x->used || offset < boundary)
2848 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2849 + x->used = 1; insn_pushed = TRUE;
2854 + Handle powerpc case:
2855 + (set (reg x) (plus fp const))
2856 + (set (.....) (... (plus (reg x) (const B))))
2858 + else if (CONSTANT_P (XEXP (x, 1))
2859 + && GET_CODE (XEXP (x, 0)) == REG
2860 + && fp_equiv[REGNO (XEXP (x, 0))])
2862 + if (x->used) return;
2864 + offset += fp_equiv[REGNO (XEXP (x, 0))];
2866 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2867 + x->used = 1; insn_pushed = TRUE;
2872 + Handle special case of frame register plus reg (constant).
2873 + (set (reg x) (const B))
2874 + (set (....) (...(plus fp (reg x))))
2876 + else if (XEXP (x, 0) == frame_pointer_rtx
2877 + && GET_CODE (XEXP (x, 1)) == REG
2878 + && PREV_INSN (insn)
2879 + && PATTERN (PREV_INSN (insn))
2880 + && SET_DEST (PATTERN (PREV_INSN (insn))) == XEXP (x, 1)
2881 + && CONSTANT_P (SET_SRC (PATTERN (PREV_INSN (insn)))))
2883 + HOST_WIDE_INT offset = INTVAL (SET_SRC (PATTERN (PREV_INSN (insn))));
2885 + if (x->used || offset < boundary)
2888 + SET_SRC (PATTERN (PREV_INSN (insn)))
2889 + = gen_rtx_CONST_INT (VOIDmode, offset + push_size);
2891 + XEXP (x, 1)->used = 1;
2895 + /* Handle special case of frame register plus reg (used). */
2896 + else if (XEXP (x, 0) == frame_pointer_rtx
2897 + && XEXP (x, 1)->used)
2903 + process further subtree:
2904 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
2909 + case CALL_PLACEHOLDER:
2910 + push_frame_of_insns (XEXP (x, 0), push_size, boundary);
2911 + push_frame_of_insns (XEXP (x, 1), push_size, boundary);
2912 + push_frame_of_insns (XEXP (x, 2), push_size, boundary);
2919 + /* Scan all subexpressions. */
2920 + fmt = GET_RTX_FORMAT (code);
2921 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2924 + if (XEXP (x, i) == frame_pointer_rtx && boundary == 0)
2925 + fatal_insn ("push_frame_in_operand", insn);
2926 + push_frame_in_operand (insn, XEXP (x, i), push_size, boundary);
2928 + else if (*fmt == 'E')
2929 + for (j = 0; j < XVECLEN (x, i); j++)
2930 + push_frame_in_operand (insn, XVECEXP (x, i, j), push_size, boundary);
2934 +push_frame_of_reg_equiv_memory_loc (push_size, boundary)
2935 + HOST_WIDE_INT push_size, boundary;
2938 + extern rtx *reg_equiv_memory_loc;
2940 + /* This function is processed if the push_frame is called from
2941 + global_alloc (or reload) function */
2942 + if (reg_equiv_memory_loc == 0) return;
2944 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2945 + if (reg_equiv_memory_loc[i])
2947 + rtx x = reg_equiv_memory_loc[i];
2950 + if (GET_CODE (x) == MEM
2951 + && GET_CODE (XEXP (x, 0)) == PLUS
2952 + && XEXP (XEXP (x, 0), 0) == frame_pointer_rtx)
2954 + offset = AUTO_OFFSET(XEXP (x, 0));
2956 + if (! XEXP (x, 0)->used
2957 + && offset >= boundary)
2959 + offset += push_size;
2960 + XEXP (XEXP (x, 0), 1) = gen_rtx_CONST_INT (VOIDmode, offset);
2963 + XEXP (x, 0)->used = 1;
2966 + else if (GET_CODE (x) == MEM
2967 + && XEXP (x, 0) == frame_pointer_rtx
2970 + XEXP (x, 0) = plus_constant (frame_pointer_rtx, push_size);
2971 + XEXP (x, 0)->used = 1; insn_pushed = TRUE;
2977 +push_frame_of_reg_equiv_constant (push_size, boundary)
2978 + HOST_WIDE_INT push_size, boundary;
2981 + extern rtx *reg_equiv_constant;
2983 + /* This function is processed if the push_frame is called from
2984 + global_alloc (or reload) function */
2985 + if (reg_equiv_constant == 0) return;
2987 + for (i=LAST_VIRTUAL_REGISTER+1; i < max_regno; i++)
2988 + if (reg_equiv_constant[i])
2990 + rtx x = reg_equiv_constant[i];
2993 + if (GET_CODE (x) == PLUS
2994 + && XEXP (x, 0) == frame_pointer_rtx)
2996 + offset = AUTO_OFFSET(x);
2999 + && offset >= boundary)
3001 + offset += push_size;
3002 + XEXP (x, 1) = gen_rtx_CONST_INT (VOIDmode, offset);
3008 + else if (x == frame_pointer_rtx
3011 + reg_equiv_constant[i]
3012 + = plus_constant (frame_pointer_rtx, push_size);
3013 + reg_equiv_constant[i]->used = 1; insn_pushed = TRUE;
3019 +check_out_of_frame_access (insn, boundary)
3021 + HOST_WIDE_INT boundary;
3023 + for (; insn; insn = NEXT_INSN (insn))
3024 + if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3025 + || GET_CODE (insn) == CALL_INSN)
3027 + if (check_out_of_frame_access_in_operand (PATTERN (insn), boundary))
3035 +check_out_of_frame_access_in_operand (orig, boundary)
3037 + HOST_WIDE_INT boundary;
3039 + register rtx x = orig;
3040 + register enum rtx_code code;
3047 + code = GET_CODE (x);
3052 + case CONST_DOUBLE:
3060 + case ADDR_DIFF_VEC:
3067 + if (XEXP (x, 0) == frame_pointer_rtx)
3068 + if (0 < boundary) return TRUE;
3072 + /* Handle special case of frame register plus constant. */
3073 + if (CONSTANT_P (XEXP (x, 1))
3074 + && XEXP (x, 0) == frame_pointer_rtx)
3076 + if (0 <= AUTO_OFFSET(x)
3077 + && AUTO_OFFSET(x) < boundary) return TRUE;
3081 + process further subtree:
3082 + Example: (plus:SI (mem/s:SI (plus:SI (reg:SI 17) (const_int 8)))
3087 + case CALL_PLACEHOLDER:
3088 + if (check_out_of_frame_access (XEXP (x, 0), boundary)) return TRUE;
3089 + if (check_out_of_frame_access (XEXP (x, 1), boundary)) return TRUE;
3090 + if (check_out_of_frame_access (XEXP (x, 2), boundary)) return TRUE;
3097 + /* Scan all subexpressions. */
3098 + fmt = GET_RTX_FORMAT (code);
3099 + for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3102 + if (check_out_of_frame_access_in_operand (XEXP (x, i), boundary))
3105 + else if (*fmt == 'E')
3106 + for (j = 0; j < XVECLEN (x, i); j++)
3107 + if (check_out_of_frame_access_in_operand (XVECEXP (x, i, j), boundary))
3113 diff -urN gcc-3.3.1/gcc/protector.h gcc-3.3.1-pp/gcc/protector.h
3114 --- gcc-3.3.1/gcc/protector.h 1970-01-01 00:00:00.000000000 +0000
3115 +++ gcc-3.3.1-pp/gcc/protector.h 2003-09-12 13:40:28.000000000 +0000
3117 +/* RTL buffer overflow protection function for GNU C compiler
3118 + Copyright (C) 1987, 88, 89, 92-7, 1998 Free Software Foundation, Inc.
3120 +This file is part of GCC.
3122 +GCC is free software; you can redistribute it and/or modify it under
3123 +the terms of the GNU General Public License as published by the Free
3124 +Software Foundation; either version 2, or (at your option) any later
3127 +GCC is distributed in the hope that it will be useful, but WITHOUT ANY
3128 +WARRANTY; without even the implied warranty of MERCHANTABILITY or
3129 +FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
3132 +You should have received a copy of the GNU General Public License
3133 +along with GCC; see the file COPYING. If not, write to the Free
3134 +Software Foundation, 59 Temple Place - Suite 330, Boston, MA
3135 +02111-1307, USA. */
3138 +/* declaration of GUARD variable */
3139 +#define GUARD_m Pmode
3140 +#define UNITS_PER_GUARD MAX(BIGGEST_ALIGNMENT / BITS_PER_UNIT, GET_MODE_SIZE (GUARD_m))
3142 +#ifndef L_stack_smash_handler
3144 +/* insert a guard variable before a character buffer and change the order
3145 + of pointer variables, character buffers and pointer arguments */
3147 +extern void prepare_stack_protection PARAMS ((int inlinable));
3150 +/* search a character array from the specified type tree */
3152 +extern int search_string_def PARAMS ((tree names));
3155 +/* examine whether the input contains frame pointer addressing */
3157 +extern int contains_fp PARAMS ((rtx op));
3159 +/* allocate a local variable in the stack area before character buffers
3160 + to avoid the corruption of it */
3162 +extern rtx assign_stack_local_for_pseudo_reg PARAMS ((enum machine_mode, HOST_WIDE_INT, int));
3165 diff -urN gcc-3.3.1/gcc/reload1.c gcc-3.3.1-pp/gcc/reload1.c
3166 --- gcc-3.3.1/gcc/reload1.c 2003-06-07 05:30:09.000000000 +0000
3167 +++ gcc-3.3.1-pp/gcc/reload1.c 2003-09-12 13:40:28.000000000 +0000
3172 +#include "protector.h"
3174 /* This file contains the reload pass of the compiler, which is
3175 run after register allocation has been done. It checks that
3176 @@ -1992,7 +1993,7 @@
3179 /* No known place to spill from => no slot to reuse. */
3180 - x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
3181 + x = assign_stack_local_for_pseudo_reg (GET_MODE (regno_reg_rtx[i]), total_size,
3182 inherent_size == total_size ? 0 : -1);
3183 if (BYTES_BIG_ENDIAN)
3184 /* Cancel the big-endian correction done in assign_stack_local.
3185 diff -urN gcc-3.3.1/gcc/simplify-rtx.c gcc-3.3.1-pp/gcc/simplify-rtx.c
3186 --- gcc-3.3.1/gcc/simplify-rtx.c 2003-07-03 07:38:22.000000000 +0000
3187 +++ gcc-3.3.1-pp/gcc/simplify-rtx.c 2003-09-12 13:40:28.000000000 +0000
3188 @@ -1670,7 +1670,8 @@
3189 int n_ops = 2, input_ops = 2, input_consts = 0, n_consts;
3190 int first, negate, changed;
3193 + HOST_WIDE_INT fp_offset = 0;
3195 memset ((char *) ops, 0, sizeof ops);
3197 /* Set up the two operands and then expand them until nothing has been
3198 @@ -1695,6 +1696,10 @@
3202 + if (flag_propolice_protection
3203 + && XEXP (this_op, 0) == virtual_stack_vars_rtx
3204 + && GET_CODE (XEXP (this_op, 1)) == CONST_INT)
3205 + fp_offset = INTVAL (XEXP (this_op, 1));
3209 @@ -1849,10 +1854,10 @@
3210 && GET_CODE (ops[n_ops - 1].op) == CONST_INT
3211 && CONSTANT_P (ops[n_ops - 2].op))
3213 - rtx value = ops[n_ops - 1].op;
3214 + int value = INTVAL (ops[n_ops - 1].op);
3215 if (ops[n_ops - 1].neg ^ ops[n_ops - 2].neg)
3216 - value = neg_const_int (mode, value);
3217 - ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, INTVAL (value));
3219 + ops[n_ops - 2].op = plus_constant (ops[n_ops - 2].op, value);
3223 @@ -1871,6 +1876,54 @@
3224 || (n_ops + n_consts == input_ops && n_consts <= input_consts)))
3227 + if (flag_propolice_protection)
3229 + /* keep the addressing style of local variables
3230 + as (plus (virtual_stack_vars_rtx) (CONST_int x))
3231 + (1) inline function is expanded, (+ (+VFP c1) -c2)=>(+ VFP c1-c2)
3232 + (2) the case ary[r-1], (+ (+VFP c1) (+r -1))=>(+ R (+r -1))
3234 + for (i = 0; i < n_ops; i++)
3235 +#ifdef FRAME_GROWS_DOWNWARD
3236 + if (ops[i].op == virtual_stack_vars_rtx)
3238 + if (ops[i].op == virtual_stack_vars_rtx
3239 + || ops[i].op == frame_pointer_rtx)
3242 + if (GET_CODE (ops[n_ops - 1].op) == CONST_INT)
3244 + HOST_WIDE_INT value = INTVAL (ops[n_ops - 1].op);
3245 + if (n_ops < 3 || value >= fp_offset)
3247 + ops[i].op = plus_constant (ops[i].op, value);
3253 + && (n_ops+1 + n_consts > input_ops
3254 + || (n_ops+1 + n_consts == input_ops && n_consts <= input_consts)))
3256 + ops[n_ops - 1].op = GEN_INT (value-fp_offset);
3257 + ops[i].op = plus_constant (ops[i].op, fp_offset);
3260 + /* buf[BUFSIZE]: buf is the first local variable (+ (+ fp -S) S)
3261 + or (+ (fp 0) r) ==> ((+ (+fp 1) r) -1) */
3262 + else if (fp_offset != 0)
3264 +#ifndef FRAME_GROWS_DOWNWARD
3266 + * For the case of buf[i], i: REG, buf: (plus fp 0),
3268 + else if (fp_offset == 0)
3275 /* Put a non-negated operand first. If there aren't any, make all
3276 operands positive and negate the whole thing later. */
3278 diff -urN gcc-3.3.1/gcc/toplev.c gcc-3.3.1-pp/gcc/toplev.c
3279 --- gcc-3.3.1/gcc/toplev.c 2003-07-18 06:59:16.000000000 +0000
3280 +++ gcc-3.3.1-pp/gcc/toplev.c 2003-09-12 13:40:28.000000000 +0000
3281 @@ -904,6 +904,13 @@
3282 minimum function alignment. Zero means no alignment is forced. */
3283 int force_align_functions_log;
3285 +#if defined(STACK_PROTECTOR) && defined(STACK_GROWS_DOWNWARD)
3286 +/* Nonzero means use propolice as a stack protection method */
3287 +int flag_propolice_protection = 1;
3289 +int flag_propolice_protection = 0;
3292 /* Table of supported debugging formats. */
3295 @@ -1188,6 +1195,10 @@
3296 N_("Trap for signed overflow in addition / subtraction / multiplication") },
3297 { "new-ra", &flag_new_regalloc, 1,
3298 N_("Use graph coloring register allocation.") },
3299 + {"stack-protector", &flag_propolice_protection, 1,
3300 + N_("Enables stack protection") },
3301 + {"no-stack-protector", &flag_propolice_protection, 0,
3302 + N_("Disables stack protection") },
3305 /* Table of language-specific options. */
3306 @@ -1547,7 +1558,9 @@
3307 {"missing-noreturn", &warn_missing_noreturn, 1,
3308 N_("Warn about functions which might be candidates for attribute noreturn") },
3309 {"strict-aliasing", &warn_strict_aliasing, 1,
3310 - N_ ("Warn about code which might break the strict aliasing rules") }
3311 + N_ ("Warn about code which might break the strict aliasing rules") },
3312 + {"stack-protector", &warn_stack_protector, 1,
3313 + N_("Warn when disabling stack protector for some reason")}
3317 @@ -2449,6 +2462,8 @@
3319 insns = get_insns ();
3321 + if (flag_propolice_protection) prepare_stack_protection (inlinable);
3323 /* Dump the rtl code if we are dumping rtl. */
3325 if (open_dump_file (DFI_rtl, decl))